diff --git a/.azure-pipelines/test-module-upgrade.yml b/.azure-pipelines/test-module-upgrade.yml new file mode 100644 index 00000000000..b491241b6ba --- /dev/null +++ b/.azure-pipelines/test-module-upgrade.yml @@ -0,0 +1,50 @@ +pool: + name: Hosted VS2017 + demands: npm + +steps: +- task: NodeTool@0 + displayName: 'Use Node 10.16.0' + inputs: + versionSpec: 10.16.0 + +- task: Npm@1 + displayName: 'Install autorest@beta' + inputs: + command: custom + verbose: false + customCommand: 'install -g "@autorest/autorest"' + +- task: Npm@1 + displayName: 'Install @microsoft/rush' + inputs: + command: custom + verbose: false + customCommand: 'install -g @microsoft/rush@5.12.0' + +- task: CmdLine@2 + displayName: 'Rush sync-versions' + inputs: + script: 'rush sync-versions' + +- task: CmdLine@2 + displayName: 'Rush Update' + inputs: + script: 'rush update' + +- task: CmdLine@2 + displayName: 'Rush Rebuild' + inputs: + script: 'rush rebuild' + +- pwsh: | + ./AutoRestUpgradeTest.ps1 -AllowList + workingDirectory: 'tests-upgrade' + displayName: 'Verify Upgrad' + +- task: PublishPipelineArtifact@0 + displayName: 'Save artifacts' + inputs: + artifactName: CompareResult + targetPath: tests-upgrade\CompareResult + condition: succeededOrFailed() diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index b46ee1762ec..197ca8b039a 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -2,8 +2,9 @@ dependencies: '@autorest/autorest': 3.0.6187 '@azure-tools/async-io': 3.0.252 '@azure-tools/autorest-extension-base': 3.1.266 - '@azure-tools/codegen': 2.1.240 + '@azure-tools/codegen': 2.5.290 '@azure-tools/codegen-csharp': 3.0.262 + '@azure-tools/codemodel': 4.13.348 '@azure-tools/codemodel-v3': 3.1.264 '@azure-tools/linq': 3.1.261 '@azure-tools/tasks': 3.0.252 @@ -16,6 +17,7 @@ dependencies: '@typescript-eslint/eslint-plugin': 2.0.0_b7c2671679a3bd4e1f52a3d3da16606f '@typescript-eslint/parser': 2.0.0_eslint@6.2.2 eslint: 6.2.2 + js-yaml: 3.13.1 mocha: 5.2.0 mocha-typescript: 1.1.17 source-map-support: 0.5.13 @@ -55,15 +57,6 @@ packages: dev: false resolution: integrity: sha512-hNEmbvlEY87AmmVoDRyYEZDG4iHwfYx6G9B7M8n0JvdwyOHn/gb4qUb1ULaPmbFWxMnVFzW070ab61/1VpYVog== - /@azure-tools/codegen/2.1.240: - dependencies: - '@azure-tools/async-io': 3.0.252 - '@azure-tools/linq': 3.1.261 - js-yaml: 3.13.1 - semver: 5.7.1 - dev: false - resolution: - integrity: sha512-w33tuViWhjGCmTmfQqfH+D1Qmg2CJ/Z1F9QOQjomhPWs9DRLyFeil+uRpnEtn90OSUFN0/R/90fiAbrkWvfpGw== /@azure-tools/codegen/2.5.290: dependencies: '@azure-tools/async-io': 3.0.252 @@ -81,6 +74,14 @@ packages: dev: false resolution: integrity: sha512-Zh/as7gux7ifD3J7JUHwRW/e7DrG7rSJBpIyysGZEHbhErxsiPeLTZsS5pDpzwXD3CsEA2hDxrNSDFpVNXNqog== + /@azure-tools/codemodel/4.13.348: + dependencies: + '@azure-tools/autorest-extension-base': 3.1.266 + '@azure-tools/codegen': 2.5.290 + '@azure-tools/linq': 3.1.261 + dev: false + resolution: + integrity: sha512-/5tjloDiPLNdqBdNN1PHudqMQlgCb+8o6L/8Pd3nYHc20cnbg3y3c23N5xpsGqmwwhJhCRrrW2XKrJeP55i09Q== /@azure-tools/linq/3.1.261: dev: false engines: @@ -994,7 +995,7 @@ packages: imurmurhash: 0.1.4 inquirer: 6.5.2 is-glob: 4.0.1 - js-yaml: 3.14.0 + js-yaml: 3.13.1 json-stable-stringify-without-jsonify: 1.0.1 levn: 0.3.0 lodash: 4.17.20 @@ -1579,14 +1580,6 @@ packages: hasBin: true resolution: integrity: sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== - /js-yaml/3.14.0: - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: false - hasBin: true - resolution: - integrity: sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== /jsbn/0.1.1: dev: false resolution: @@ -3106,8 +3099,9 @@ packages: '@autorest/autorest': 3.0.6187 '@azure-tools/async-io': 3.0.252 '@azure-tools/autorest-extension-base': 3.1.266 - '@azure-tools/codegen': 2.1.240 + '@azure-tools/codegen': 2.5.290 '@azure-tools/codegen-csharp': 3.0.262 + '@azure-tools/codemodel': 4.13.348 '@azure-tools/codemodel-v3': 3.1.264 '@azure-tools/linq': 3.1.261 '@azure-tools/tasks': 3.0.252 @@ -3119,6 +3113,7 @@ packages: '@typescript-eslint/eslint-plugin': 2.0.0_b7c2671679a3bd4e1f52a3d3da16606f '@typescript-eslint/parser': 2.0.0_eslint@6.2.2 eslint: 6.2.2 + js-yaml: 3.13.1 mocha: 5.2.0 mocha-typescript: 1.1.17 source-map-support: 0.5.13 @@ -3127,15 +3122,16 @@ packages: dev: false name: '@rush-temp/powershell' resolution: - integrity: sha512-5R44rBmf2Dr0D+pThmzmAX4T8s7O55nqKq2LiSvlhkff1upIbUcrogOihhsGkSr+1XRm7vubVX76LJ9G6HH7HQ== + integrity: sha512-p6zFKQmmQo1rntIeAg/ELKH+i12IV+ZcTM9J3luYZeV7zc1mZ3A+DReEuxshx+3oR0WyOCw9jUgTm0hGHJQXWA== tarball: 'file:projects/powershell.tgz' version: 0.0.0 specifiers: '@autorest/autorest': ~3.0.6118 '@azure-tools/async-io': ~3.0.0 '@azure-tools/autorest-extension-base': ~3.1.0 - '@azure-tools/codegen': ~2.1.0 + '@azure-tools/codegen': ^2.5.276 '@azure-tools/codegen-csharp': ~3.0.0 + '@azure-tools/codemodel': ~4.13.342 '@azure-tools/codemodel-v3': ~3.1.0 '@azure-tools/linq': ~3.1.0 '@azure-tools/tasks': ~3.0.0 @@ -3148,6 +3144,7 @@ specifiers: '@typescript-eslint/eslint-plugin': ~2.0.0 '@typescript-eslint/parser': ~2.0.0 eslint: ~6.2.2 + js-yaml: 3.13.1 mocha: 5.2.0 mocha-typescript: 1.1.17 source-map-support: 0.5.13 diff --git a/docs/development.md b/docs/development.md index 7a6ff9a1fcc..b28094db19a 100644 --- a/docs/development.md +++ b/docs/development.md @@ -14,11 +14,9 @@ Use of this project requires the following: ## Cloning this repository -Make sure that you clone this repository with `--recurse` - there is a submodule with common code that we pull from the `https://github.com/azure/perks` project. - ``` powershell # clone recursively -git clone https://github.com/azure/autorest.powershell --recurse +git clone https://github.com/azure/autorest.powershell # one-time cd autorest.powershell diff --git a/powershell/autorest-configuration.md b/powershell/autorest-configuration.md index 5b6f048abcc..d6b1cf3530a 100644 --- a/powershell/autorest-configuration.md +++ b/powershell/autorest-configuration.md @@ -4,11 +4,21 @@ - Please don't edit this section unless you're re-configuring how the powershell extension plugs in to AutoRest AutoRest needs the below config to pick this up as a plug-in - see https://github.com/Azure/autorest/blob/master/docs/developer/architecture/AutoRest-extension.md +> modelerfour configuration +``` yaml +modelerfour: + emit-yaml-tags: false + lenient-model-deduplication: true + additional-checks: false + always-create-content-type-parameter: false + always-seal-x-ms-enums: true +``` + > if the modeler is loaded already, use that one, otherwise grab it. -``` yaml !isLoaded('@autorest/remodeler') +``` yaml !isLoaded('@autorest/modelerfour') use-extension: - "@autorest/remodeler" : "~2.1.0" + "@autorest/modelerfour": "4.15.414" # will use highest 2.0.x ``` @@ -52,6 +62,7 @@ module-folder: $(current-folder)/generated cmdlet-folder: $(module-folder)/cmdlets model-cmdlet-folder: $(module-folder)/model-cmdlets custom-cmdlet-folder: $(current-folder)/custom +utils-cmdlet-folder: $(current-folder)/utils internal-cmdlet-folder: $(current-folder)/internal test-folder: $(current-folder)/test runtime-folder: $(module-folder)/runtime @@ -93,62 +104,51 @@ declare-directive: # Pipeline Configuration ``` yaml pipeline: -# --- extension remodeler --- - - # "Shake the tree", and normalize the model - remodeler: - input: openapi-document/multi-api/identity # the plugin where we get inputs from - - # allow developer to do transformations on the code model. - remodeler/new-transform: - input: remodeler - - # Make some interpretations about what some things in the model mean - tweakcodemodel: - input: remodeler/new-transform +# --- extension powershell based on modelerfour - # Specific things for Azure - tweakcodemodelazure: - input: tweakcodemodel + # Fix the code model gap between m3 and m4 + tweakm4codemodel: + input: modelerfour/identity -# --- extension powershell --- + tweakcodemodel-v2: + input: tweakm4codemodel + # input: clicommon/identity + + tweakcodemodelazure-v2: + input: tweakcodemodel-v2 - # creates high-level commands - create-commands: - input: tweakcodemodelazure # brings the code-model-v3 with it. + create-commands-v2: + input: tweakcodemodelazure-v2 + + create-virtual-properties-v2: + input: create-commands-v2 - create-virtual-properties: - input: create-commands - - # Choose names for everything in c# - csnamer: - input: create-virtual-properties # and the generated c# files + csnamer-v2: + input: create-virtual-properties-v2 - # ensures that names/descriptions are properly set for powershell - psnamer: - input: csnamer + psnamer-v2: + input: csnamer-v2 - modifiers: - input: psnamer + modifiers-v2: + input: psnamer-v2 - add-azure-completers: - input: modifiers + add-azure-completers-v2: + input: modifiers-v2 - # creates powershell cmdlets for high-level commands. (leverages llc# code) - powershell: - input: add-azure-completers # and the generated c# files + llcsharp-v2: + input: modifiers-v2 + + powershell-v2: + input: add-azure-completers-v2 # --- extension llcsharp --- # generates c# files for http-operations - llcsharp: - input: modifiers - llcsharp/text-transform: - input: llcsharp + input: llcsharp-v2 scope: scope-here powershell/text-transform: - input: powershell + input: powershell-v2 scope: scope-here llcsharp/emitter: @@ -174,10 +174,10 @@ scope-here: # Specific Settings for cm emitting - selects the file types and format that cmv2-emitter will spit out. code-model-emitter-settings: - input-artifact: code-model-v3 + input-artifact: code-model-v4 is-object: true output-uri-expr: | - "code-model-v3" + "code-model-v4" # testing: ask for the files we need output-artifact: @@ -409,4 +409,17 @@ verb-mapping: Write: Write ``` - +``` yaml +cli: + reason: 'Keep same as modelerfour' + naming: + default: + parameter: 'camel' + property: 'camel' + operation: 'pascal' + operationGroup: 'pascal' + choice: 'pascal' + choiceValue: 'pascal' + constant: 'pascal' + type: 'pascal' +``` diff --git a/powershell/cmdlets/class.ts b/powershell/cmdlets/class.ts index d27c3254b7c..5ede09c12ba 100644 --- a/powershell/cmdlets/class.ts +++ b/powershell/cmdlets/class.ts @@ -3,7 +3,10 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { Schema as NewSchema, SchemaType, ArraySchema, SchemaResponse, HttpParameter, ObjectSchema, DictionarySchema, ChoiceSchema, SealedChoiceSchema } from '@azure-tools/codemodel'; import { command, getAllProperties, JsonType, http, getAllPublicVirtualProperties, getVirtualPropertyFromPropertyName, ParameterLocation, getAllVirtualProperties, VirtualParameter, VirtualProperty } from '@azure-tools/codemodel-v3'; +import { CommandOperation, VirtualParameter as NewVirtualParameter } from '../utils/command-operation'; +import { getAllProperties as NewGetAllProperties, getAllPublicVirtualProperties as NewGetAllPublicVirtualProperties, getVirtualPropertyFromPropertyName as NewGetVirtualPropertyFromPropertyName, VirtualProperty as NewVirtualProperty } from '../utils/schema'; import { escapeString, docComment, serialize, pascalCase, DeepPartial } from '@azure-tools/codegen'; import { items, values, Dictionary, length } from '@azure-tools/linq'; import { @@ -15,6 +18,7 @@ import { Alias, ArgumentCompleterAttribute, AsyncCommandRuntime, AsyncJob, Cmdle import { State } from '../internal/state'; import { Channel } from '@azure-tools/autorest-extension-base'; import { IParameter } from '@azure-tools/codemodel-v3/dist/code-model/components'; +import { IParameter as NewIParameter } from '../utils/components'; import { Variable, Local, ParameterModifier } from '@azure-tools/codegen-csharp'; import { getVirtualPropertyName } from '../llcsharp/model/model-class'; const PropertiesRequiringNew = new Set(['Host', 'Events']); @@ -152,6 +156,17 @@ export function addCompleterInfo(targetProperty: Property, parameter: VirtualPar } } +export function NewAddCompleterInfo(targetProperty: Property, parameter: NewVirtualParameter) { + if (parameter.completerInfo && parameter.completerInfo.script) { + targetProperty.add(new Attribute(ClientRuntime.CompleterInfoAttribute, { + parameters: [ + new LiteralExpression(`\nName = ${new StringExpression(parameter.completerInfo.name || '').value}`), + new LiteralExpression(`\nDescription =${new StringExpression(parameter.completerInfo.description || '').value}`), + new LiteralExpression(`\nScript = ${new StringExpression(parameter.completerInfo.script).value}`) + ] + })); + } +} export function addDefaultInfo(targetProperty: Property, parameter: any) { if (parameter.defaultInfo && parameter.defaultInfo.script) { @@ -216,6 +231,60 @@ export function addInfoAttribute(targetProperty: Property, pType: TypeDeclaratio })); +} + + +export function NewAddInfoAttribute(targetProperty: Property, pType: TypeDeclaration, isRequired: boolean, isReadOnly: boolean, description: string, serializedName: string) { + + let pt = pType; + while (pt.elementType) { + switch (pt.elementType.schema.type) { + case JsonType.Object: + if (pt.elementType.schema.language.csharp.interfaceImplementation) { + pt = { + declaration: pt.elementType.schema.language.csharp.interfaceImplementation.declaration, + schema: pt.elementType.schema, + }; + } else { + // arg! it's not done yet. Hope it's not polymorphic itself. + pt = { + declaration: `${pt.elementType.schema.language.csharp.namespace}.${pt.elementType.schema.language.csharp.interfaceName}`, + schema: pt.elementType.schema, + }; + } + break; + + case JsonType.Array: + pt = pt.elementType; + break; + + default: + pt = pt.elementType; + break; + } + } + const ptypes = new Array(); + if (pt.schema && pt.schema && pt.schema.language.csharp.byReference) { + ptypes.push(`typeof(${pt.schema.language.csharp.namespace}.${pt.schema.language.csharp.interfaceName}_Reference)`); + // do we need polymorphic types for by-resource ? Don't think so. + } else { + ptypes.push(`typeof(${pt.declaration})`); + if (pt.schema && pt.schema.language.csharp.classImplementation && pt.schema.language.csharp.classImplementation.discriminators) { + ptypes.push(...[...pt.schema.language.csharp.classImplementation.discriminators.values()].map(each => `typeof(${each.modelInterface.fullName})`)); + } + } + + targetProperty.add(new Attribute(ClientRuntime.InfoAttribute, { + parameters: [ + new LiteralExpression(`\nRequired = ${isRequired}`), + new LiteralExpression(`\nReadOnly = ${isReadOnly}`), + new LiteralExpression(`\nDescription = ${new StringExpression(description).value}`), + new LiteralExpression(`\nSerializedName = ${new StringExpression(serializedName).value}`), + new LiteralExpression(`\nPossibleTypes = new [] { ${ptypes.join(',').replace(/\?/g, '').replace(/undefined\./g, '')} }`), + ] + })); + + } export class CmdletClass extends Class { @@ -231,14 +300,14 @@ export class CmdletClass extends Class { private bodyParameter?: Variable; private bodyParameterInfo?: { type: TypeDeclaration; valueType: TypeDeclaration }; private apProp?: Property; - private operation: command.CommandOperation; + private operation: CommandOperation; private debugMode?: boolean; private variantName: string; private isViaIdentity: boolean; private hasStreamOutput: boolean; private outFileParameter?: Property; - constructor(namespace: Namespace, operation: command.CommandOperation, state: State, objectInitializer?: DeepPartial) { + constructor(namespace: Namespace, operation: CommandOperation, state: State, objectInitializer?: DeepPartial) { // generate the 'variant' part of the name const noun = `${state.project.prefix}${operation.details.csharp.subjectPrefix}${operation.details.csharp.subject}`; const variantName = `${noun}${operation.details.csharp.name ? `_${operation.details.csharp.name}` : ''}`; @@ -281,7 +350,7 @@ export class CmdletClass extends Class { })); // construct the class - this.addClassAttributes(this.operation, this.variantName); + this.NewAddClassAttributes(this.operation, this.variantName); if (this.hasStreamOutput) { this.outFileParameter = this.add(new Property('OutFile', System.String, { attributes: [], description: 'Path to write output file to.' })); this.outFileParameter.add(new Attribute(ParameterAttribute, { parameters: ['Mandatory = true', 'HelpMessage = "Path to write output file to"'] })); @@ -289,7 +358,7 @@ export class CmdletClass extends Class { this.outFileParameter.add(new Attribute(CategoryAttribute, { parameters: [`${ParameterCategory}.Body`] })); } - this.addPowershellParameters(this.operation); + this.NewAddPowershellParameters(this.operation); // implement IEventListener this.implementIEventListener(); @@ -298,13 +367,13 @@ export class CmdletClass extends Class { this.implementConstructors(); // processRecord - this.implementProcessRecord(this.operation); + this.NewImplementProcessRecord(this.operation); - this.implementProcessRecordAsync(this.operation); + this.NewImplementProcessRecordAsync(this.operation); this.debugMode = await this.state.getValue('debug', false); // json serialization - this.implementSerialization(this.operation); + this.NewImplementSerialization(this.operation); for (const prop of this.properties) { if (prop.name === 'Host') { @@ -320,12 +389,19 @@ export class CmdletClass extends Class { let ops = ''; for (const httpOperation of values(this.operation.callGraph)) { - ops = `${ops}\n[OpenAPI] ${httpOperation.operationId}=>${httpOperation.method.toUpperCase()}:"${httpOperation.path}"`; + const request = httpOperation.requests?.[0]; + if (!request) { + continue; + } + const httpMethod = request.protocol.http?.method ?? ''; + const httpPath = request.protocol.http?.path ?? ''; + ops = `${ops}\n[OpenAPI] ${httpOperation.language.default.name}=>${httpMethod.toUpperCase()}:"${httpPath}"`; if (this.debugMode) { - const m = (httpOperation.extensions && httpOperation.extensions['x-ms-metadata']) || (httpOperation.pathExtensions ? httpOperation.pathExtensions['x-ms-metadata'] : undefined); - if (m) { - ops = `${ops}\n [METADATA]\n${serialize(m)}`; - } + // x-ms-metadata seems no longer exists + // const m = (httpOperation.extensions && httpOperation.extensions['x-ms-metadata']) || (httpOperation.pathExtensions ? httpOperation.pathExtensions['x-ms-metadata'] : undefined); + // if (m) { + // ops = `${ops}\n [METADATA]\n${serialize(m)}`; + // } ops = `${ops}\n [DETAILS]`; ops = `${ops}\n verb: ${this.operation.details.csharp.verb}`; @@ -356,7 +432,7 @@ export class CmdletClass extends Class { this.add(new Property('Pipeline', ClientRuntime.HttpPipeline, { getAccess: Access.Private, setAccess: Access.Private, description: `The instance of the that the remote call will use.` })); // client API property (gs01: fill this in correctly) - const clientAPI = new ClassType(this.state.model.details.csharp.namespace, this.state.model.details.csharp.name); + const clientAPI = new ClassType(this.state.model.language.csharp?.namespace, this.state.model.language.csharp?.name || ''); this.add(new LambdaProperty('Client', clientAPI, new LiteralExpression(`${this.state.project.serviceNamespace.moduleClass.declaration}.Instance.ClientAPI`), { description: 'The reference to the client API class.' })); this.add(new Method('StopProcessing', dotnet.Void, { access: Access.Protected, override: Modifier.Override, description: 'Interrupts currently running code within the command.' })).add(function* () { @@ -409,20 +485,24 @@ export class CmdletClass extends Class { } } - private isWritableCmdlet(operation: command.CommandOperation): boolean { - switch (operation.callGraph[0].method.toLowerCase()) { - case 'put': - case 'post': - case 'delete': - case 'patch': - return true; + + private NewIsWritableCmdlet(operation: CommandOperation): boolean { + if (operation.callGraph[0].requests) { + switch (operation.callGraph[0].requests[0]?.protocol.http?.method.toLowerCase()) { + case 'put': + case 'post': + case 'delete': + case 'patch': + return true; + } } return false; } - private implementProcessRecord(operation: command.CommandOperation) { + + private NewImplementProcessRecord(operation: CommandOperation) { const $this = this; - const writable = this.isWritableCmdlet(operation); + const writable = this.NewIsWritableCmdlet(operation); this.add(new Method('ProcessRecord', undefined, { access: Access.Protected, override: Modifier.Override, description: 'Performs execution of the command.' })).add(function* () { yield $this.eventListener.syncSignal(Events.CmdletProcessRecordStart); @@ -477,7 +557,7 @@ export class CmdletClass extends Class { } : normal; if (writable) { - yield If(`ShouldProcess($"Call remote '${operation.callGraph[0].details.csharp.name}' operation")`, work); + yield If(`ShouldProcess($"Call remote '${operation.callGraph[0].language.csharp?.name}' operation")`, work); } else { yield work; } @@ -505,7 +585,8 @@ export class CmdletClass extends Class { } - private implementProcessRecordAsync(operation: command.CommandOperation) { + + private NewImplementProcessRecordAsync(operation: CommandOperation) { const $this = this; const PAR = this.add(new Method('ProcessRecordAsync', System.Threading.Tasks.Task(), { access: Access.Protected, async: Modifier.Async, @@ -549,22 +630,22 @@ export class CmdletClass extends Class { const operationParameters = values(apiCall.parameters). // filter out constants and path parameters when using piping for identity - where(each => !(each.details.csharp.constantValue) /* && (!$this.isViaIdentity || each.in !== ParameterLocation.Path) */). + where(each => !(each.language.csharp?.constantValue) && each.language.default?.name !== '$host'/* && (!$this.isViaIdentity || each.in !== ParameterLocation.Path) */). select(p => { return { - name: p.details.csharp.name, + name: p.language.csharp?.name, param: values($this.properties). where(each => each.metadata.parameterDefinition). - first(each => each.metadata.parameterDefinition.details.csharp.uid === p.details.csharp.uid), - isPathParam: $this.isViaIdentity && p.in === ParameterLocation.Path + first(each => each.metadata.parameterDefinition.language.csharp?.serializedName === p.language.csharp?.serializedName), // xichen: Is it safe enough to use serializedName? + isPathParam: $this.isViaIdentity && p.protocol.http?.in === ParameterLocation.Path }; }). select(each => { if (each.param) { - const httpParam = ((each.param.metadata.parameterDefinition)); + const httpParam = ((each.param.metadata.parameterDefinition)); if (httpParam.required) { return { name: each.param, @@ -573,7 +654,7 @@ export class CmdletClass extends Class { }; } - const httpParamTD = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration((httpParam.schema), httpParam.required, $this.state); + const httpParamTD = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration((httpParam.schema), httpParam.required, $this.state); return { name: each.param, expression: toExpression(`this.InvocationInformation.BoundParameters.ContainsKey("${each.param.value}") ? ${each.param.value} : ${httpParamTD.defaultOfType}`), @@ -591,9 +672,9 @@ export class CmdletClass extends Class { } // create the response handlers - const responses = [...values(apiCall.responses).selectMany(each => each)]; + const responses = [...values(apiCall.responses), ...values(apiCall.exceptions)]; - const callbackMethods = values(responses).toArray().map(each => new LiteralExpression(each.details.csharp.name)); + const callbackMethods = values(responses).toArray().map(each => new LiteralExpression(each.language.csharp?.name || '')); // make callback methods for (const each of values(responses)) { @@ -601,27 +682,27 @@ export class CmdletClass extends Class { const parameters = new Array(); parameters.push(new Parameter('responseMessage', System.Net.Http.HttpResponseMessage, { description: `the raw response message as an ${System.Net.Http.HttpResponseMessage}.` })); - if (each.details.csharp.responseType) { - parameters.push(new Parameter('response', System.Threading.Tasks.Task({ declaration: each.details.csharp.responseType }), { description: `the body result as a from the remote call` })); + if (each.language.csharp?.responseType) { + parameters.push(new Parameter('response', System.Threading.Tasks.Task({ declaration: each.language.csharp?.responseType }), { description: `the body result as a from the remote call` })); } - if (each.details.csharp.headerType) { - parameters.push(new Parameter('headers', System.Threading.Tasks.Task({ declaration: each.details.csharp.headerType }), { description: `the header result as a from the remote call` })); + if (each.language.csharp?.headerType) { + parameters.push(new Parameter('headers', System.Threading.Tasks.Task({ declaration: each.language.csharp.headerType }), { description: `the header result as a from the remote call` })); } - const override = `override${pascalCase(each.details.csharp.name)}`; - const returnNow = new Parameter('returnNow', System.Threading.Tasks.Task(dotnet.Bool), { modifier: ParameterModifier.Ref, description: `/// Determines if the rest of the ${each.details.csharp.name} method should be processed, or if the method should return immediately (set to true to skip further processing )` }); + const override = `override${pascalCase(each.language.csharp?.name || '')}`; + const returnNow = new Parameter('returnNow', System.Threading.Tasks.Task(dotnet.Bool), { modifier: ParameterModifier.Ref, description: `/// Determines if the rest of the ${each.language.csharp?.name} method should be processed, or if the method should return immediately (set to true to skip further processing )` }); const overrideResponseMethod = new PartialMethod(override, dotnet.Void, { parameters: [...parameters, returnNow], - description: `${override} will be called before the regular ${each.details.csharp.name} has been processed, allowing customization of what happens on that response. Implement this method in a partial class to enable this behavior`, + description: `${override} will be called before the regular ${each.language.csharp?.name} has been processed, allowing customization of what happens on that response. Implement this method in a partial class to enable this behavior`, returnsDescription: `A that will be complete when handling of the method is completed.` }); $this.add(overrideResponseMethod); - const responseMethod = new Method(`${each.details.csharp.name}`, System.Threading.Tasks.Task(), { + const responseMethod = new Method(`${each.language.csharp?.name}`, System.Threading.Tasks.Task(), { access: Access.Private, parameters, async: Modifier.Async, - description: each.details.csharp.description, + description: each.language.csharp?.description, returnsDescription: `A that will be complete when handling of the method is completed.` }); responseMethod.push(Using('NoSynchronizationContext', '')); @@ -634,15 +715,15 @@ export class CmdletClass extends Class { yield `// if ${override} has returned true, then return right away.`; yield If(And(IsNotNull(skip), `await ${skip}`), Return()); - if (each.details.csharp.isErrorResponse) { + if (each.language.csharp?.isErrorResponse) { // this should write an error to the error channel. - yield `// Error Response : ${each.responseCode}`; + yield `// Error Response : ${each.protocol.http?.statusCodes[0]}`; const unexpected = function* () { yield '// Unrecognized Response. Create an error record based on what we have.'; - const ex = (each.details.csharp.responseType) ? - Local('ex', `new ${ClientRuntime.name}.RestException<${each.details.csharp.responseType}>(responseMessage, await response)`) : + const ex = (each.language.csharp?.responseType) ? + Local('ex', `new ${ClientRuntime.name}.RestException<${each.language.csharp.responseType}>(responseMessage, await response)`) : Local('ex', `new ${ClientRuntime.name}.RestException(responseMessage)`); yield ex.declarationStatement; @@ -652,15 +733,15 @@ export class CmdletClass extends Class { ErrorDetails = new global::System.Management.Automation.ErrorDetails(${ex.value}.Message) { RecommendedAction = ${ex.value}.Action } });`; }; - if (each.schema) { + if ((each).schema !== undefined) { // the schema should be the error information. // this supports both { error { message, code} } and { message, code} - let props = getAllPublicVirtualProperties(each.schema.details.csharp.virtualProperties); - const errorProperty = values(props).first(p => p.property.details.csharp.name === 'error'); + let props = NewGetAllPublicVirtualProperties((each).schema.language.csharp?.virtualProperties); + const errorProperty = values(props).first(p => p.property.serializedName === 'error'); let ep = ''; if (errorProperty) { - props = getAllPublicVirtualProperties(errorProperty.property.schema.details.csharp.virtualProperties); + props = NewGetAllPublicVirtualProperties(errorProperty.property.schema.language.csharp?.virtualProperties); ep = `${errorProperty.name}?.`; } @@ -694,44 +775,45 @@ export class CmdletClass extends Class { } } - yield `// ${each.details.csharp.name} - response for ${each.responseCode} / ${values(each.mimeTypes).join('/')}`; - if (each.schema) { - const schema = each.schema; + yield `// ${each.language.csharp?.name} - response for ${each.protocol.http?.statusCodes[0]} / ${values(each.protocol.http?.mediaTypes).join('/')}`; - if (apiCall.details.csharp.pageable) { - const pageable = apiCall.details.csharp.pageable; + if ('schema' in each) { + const schema = (each).schema; + + if (apiCall.language.csharp?.pageable) { + const pageable = apiCall.language.csharp.pageable; yield '// response should be returning an array of some kind. +Pageable'; yield `// ${pageable.responseType} / ${pageable.itemName || ''} / ${pageable.nextLinkName || ''}`; switch (pageable.responseType) { // the result is (or works like a x-ms-pageable) case 'pageable': case 'nested-array': { - const valueProperty = schema.properties[pageable.itemName]; - const nextLinkProperty = schema.properties[pageable.nextLinkName]; + const valueProperty = (schema).properties?.find(p => p.serializedName === pageable.itemName); + const nextLinkProperty = (schema)?.properties?.find(p => p.serializedName === pageable.nextLinkName); if (valueProperty && nextLinkProperty) { // it's pageable! const result = new LocalVariable('result', dotnet.Var, { initializer: new LiteralExpression('await response') }); yield result.declarationStatement; // write out the current contents - const vp = getVirtualPropertyFromPropertyName(each.schema.details.csharp.virtualProperties, valueProperty.serializedName); + const vp = NewGetVirtualPropertyFromPropertyName(schema.language.csharp?.virtualProperties, valueProperty.serializedName); if (vp) { yield `WriteObject(${result.value}.${vp.name},true);`; } - const nl = getVirtualPropertyFromPropertyName(each.schema.details.csharp.virtualProperties, nextLinkProperty.serializedName); + const nl = NewGetVirtualPropertyFromPropertyName(schema.language.csharp?.virtualProperties, nextLinkProperty.serializedName); if (nl) { const nextLinkName = `${result.value}.${nl.name}`; yield (If(`${nextLinkName} != null`, If('responseMessage.RequestMessage is System.Net.Http.HttpRequestMessage requestMessage ', function* () { yield `requestMessage = requestMessage.Clone(new global::System.Uri( ${nextLinkName} ),${ClientRuntime.Method.Get} );`; yield $this.eventListener.signal(Events.FollowingNextLink); - yield `await this.${$this.$('Client').invokeMethod(`${apiCall.details.csharp.name}_Call`, ...[toExpression('requestMessage'), ...callbackMethods, dotnet.This, pipeline]).implementation}`; + yield `await this.${$this.$('Client').invokeMethod(`${apiCall.language.csharp?.name}_Call`, ...[toExpression('requestMessage'), ...callbackMethods, dotnet.This, pipeline]).implementation}`; }) )); } return; } else if (valueProperty) { // it's just a nested array - const p = getVirtualPropertyFromPropertyName(each.schema.details.csharp.virtualProperties, valueProperty.serializedName); + const p = getVirtualPropertyFromPropertyName(schema.language.csharp?.virtualProperties, valueProperty.serializedName); if (p) { yield `WriteObject((await response).${p.name}, true);`; } @@ -748,13 +830,13 @@ export class CmdletClass extends Class { } // ok, let's see if the response type } - const props = getAllPublicVirtualProperties(schema.details.csharp.virtualProperties); + const props = NewGetAllPublicVirtualProperties(schema.language.csharp?.virtualProperties); const outValue = (length(props) === 1) ? `(await response).${props[0].name}` : '(await response)'; // we expect to get back some data from this call. - const rType = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(schema, true, $this.state); + const rType = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(schema, true, $this.state); yield `// (await response) // should be ${rType.declaration}`; if ($this.hasStreamOutput && $this.outFileParameter) { const outfile = $this.outFileParameter; @@ -803,7 +885,7 @@ export class CmdletClass extends Class { const actualCall = function* () { yield $this.eventListener.signal(Events.CmdletBeforeAPICall); const idOpParams = operationParameters.filter(each => !each.isPathParam); - const idschema = values($this.state.project.model.schemas).first(each => each.details.default.uid === 'universal-parameter-type'); + const idschema = values($this.state.project.model.schemas.objects).first(each => each.language.default.uid === 'universal-parameter-type'); if ($this.isViaIdentity) { @@ -811,7 +893,7 @@ export class CmdletClass extends Class { yield '// try to call with PATH parameters from Input Object'; if (idschema) { - const allVPs = getAllPublicVirtualProperties(idschema.details.csharp.virtualProperties); + const allVPs = NewGetAllPublicVirtualProperties(idschema.language.csharp?.virtualProperties); const props = [...values(idschema.properties)]; const idOpParams = operationParameters.map(each => { @@ -826,7 +908,7 @@ export class CmdletClass extends Class { const match = props.find(p => pascalCase(p.serializedName) === pascalName); if (match) { - const defaultOfType = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(match.schema, true, $this.state).defaultOfType; + const defaultOfType = $this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(match.schema, true, $this.state).defaultOfType; // match up vp name const vp = allVPs.find(pp => pascalCase(pp.property.serializedName) === pascalName); if (vp) { @@ -837,10 +919,10 @@ export class CmdletClass extends Class { } // fall back! - console.error(`Unable to match identity parameter '${each.name}' member to appropriate virtual parameter. (Guessing '${pascalCase(match.details.csharp.name)}').`); + console.error(`Unable to match identity parameter '${each.name}' member to appropriate virtual parameter. (Guessing '${pascalCase(match.language.csharp?.name ?? '')}').`); return { - name: `InputObject.${pascalCase(match.details.csharp.name)}`, - value: `InputObject.${pascalCase(match.details.csharp.name)} ?? ${defaultOfType}` + name: `InputObject.${pascalCase(match.language.csharp?.name ?? '')}`, + value: `InputObject.${pascalCase(match.language.csharp?.name ?? '')} ?? ${defaultOfType}` }; } console.error(`Unable to match idenity parameter '${each.name}' member to appropriate virtual parameter. (Guessing '${pascalName}')`); @@ -854,19 +936,19 @@ export class CmdletClass extends Class { yield If(IsNull(opParam.name), `ThrowTerminatingError( new ${ErrorRecord}(new global::System.Exception("InputObject has null value for ${opParam.name}"),string.Empty, ${ErrorCategory('InvalidArgument')}, InputObject) );`); } } - yield `await this.${$this.$('Client').invokeMethod(`${apiCall.details.csharp.name}`, ...[...idOpParams.map(each => toExpression(each.value)), ...callbackMethods, dotnet.This, pipeline]).implementation}`; + yield `await this.${$this.$('Client').invokeMethod(`${apiCall.language.csharp?.name}`, ...[...idOpParams.map(each => toExpression(each.value)), ...callbackMethods, dotnet.This, pipeline]).implementation}`; } }; - if (idschema && values(idschema.properties).first(each => each.details.csharp.uid === 'universal-parameter:resource identity')) { - yield If('InputObject?.Id != null', `await this.${$this.$('Client').invokeMethod(`${apiCall.details.csharp.name}ViaIdentity`, ...[toExpression('InputObject.Id'), ...idOpParams.map(each => each.expression), ...callbackMethods, dotnet.This, pipeline]).implementation}`); + if (idschema && values(idschema.properties).first(each => each.language.csharp?.uid === 'universal-parameter:resource identity')) { + yield If('InputObject?.Id != null', `await this.${$this.$('Client').invokeMethod(`${apiCall.language.csharp?.name}ViaIdentity`, ...[toExpression('InputObject.Id'), ...idOpParams.map(each => each.expression), ...callbackMethods, dotnet.This, pipeline]).implementation}`); yield Else(identityFromPathParams); } else { yield identityFromPathParams; } } else { - yield `await this.${$this.$('Client').invokeMethod(`${apiCall.details.csharp.name}`, ...[...operationParameters.map(each => each.expression), ...callbackMethods, dotnet.This, pipeline]).implementation}`; + yield `await this.${$this.$('Client').invokeMethod(`${apiCall.language.csharp?.name}`, ...[...operationParameters.map(each => each.expression), ...callbackMethods, dotnet.This, pipeline]).implementation}`; } yield $this.eventListener.signal(Events.CmdletAfterAPICall); }; @@ -891,7 +973,8 @@ export class CmdletClass extends Class { }); } - private implementSerialization(operation: command.CommandOperation) { + + private NewImplementSerialization(operation: CommandOperation) { const $this = this; // clone if (operation.asjob) { @@ -928,7 +1011,6 @@ export class CmdletClass extends Class { }); } } - private implementConstructors() { // default constructor this.add(new Constructor(this, { description: `Intializes a new instance of the cmdlet class.` })); @@ -1032,7 +1114,8 @@ export class CmdletClass extends Class { }); } - private addPowershellParameters(operation: command.CommandOperation) { + + private NewAddPowershellParameters(operation: CommandOperation) { const vps = operation.details.csharp.virtualParameters || { body: [], operation: [], @@ -1040,7 +1123,8 @@ export class CmdletClass extends Class { for (const parameter of values(operation.parameters)) { // these are the parameters that this command expects - const td = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(parameter.schema, true, this.state); + parameter.schema; + const td = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(parameter.schema, true, this.state); if (parameter.details.csharp.constantValue) { // this parameter has a constant value -- SKIP IT @@ -1092,7 +1176,7 @@ export class CmdletClass extends Class { const expandedBodyParameter = this.add(new BackedProperty(parameter.details.csharp.name, td, { description: parameter.details.csharp.description, - initializer: (parameter.schema.type === JsonType.Array) ? 'null' : `new ${parameter.schema.details.csharp.fullname}()`, + initializer: (parameter.schema.type === SchemaType.Array) ? 'null' : `new ${parameter.schema.language.csharp?.fullname}()`, setAccess: Access.Private, getAccess: Access.Private, })); @@ -1100,10 +1184,11 @@ export class CmdletClass extends Class { for (const vParam of vps.body) { const vSchema = vParam.schema; - const propertyType = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, true, this.state); + vParam.origin; + const propertyType = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, true, this.state); // we need to know if the actual underlying property is actually nullable. - const nullable = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, (vParam.origin).property.details.csharp.required, this.state).isNullable; + const nullable = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, (vParam.origin).property.language.csharp?.required, this.state).isNullable; const cmdletParameter = new Property(vParam.name, propertyType, { get: toExpression(`${expandedBodyParameter.value}.${getVirtualPropertyName((vParam.origin)) || vParam.origin.name}${!nullable ? '' : ` ?? ${propertyType.defaultOfType}`}`), // /* ${inspect(vParam.origin)} */ @@ -1112,24 +1197,27 @@ export class CmdletClass extends Class { new: PropertiesRequiringNew.has(vParam.name) ? Modifier.New : Modifier.None }); - if (vParam.schema.details.csharp.byReference) { + if (vParam.schema.language.csharp?.byReference) { // this parameter's schema is marked as 'by-reference' which means we should // tag it with an ExportAs attribute for the I*Reference type. - cmdletParameter.add(new Attribute(ExportAsAttribute, { parameters: [`typeof(${vParam.schema.details.csharp.referenceInterface})`] })); + cmdletParameter.add(new Attribute(ExportAsAttribute, { parameters: [`typeof(${vParam.schema.language.csharp.referenceInterface})`] })); } - if (vParam.schema.type === JsonType.Array) { - if (vParam.schema.items && vParam.schema.items.details.csharp.byReference) { - cmdletParameter.add(new Attribute(ExportAsAttribute, { parameters: [`typeof(${vParam.schema.items.details.csharp.referenceInterface}[])`] })); - } + if (vParam.schema.type === SchemaType.Array) { + //skip-for-time-being + // if ((vParam.schema). && vParam.schema.items.details.csharp.byReference) { + // cmdletParameter.add(new Attribute(ExportAsAttribute, { parameters: [`typeof(${vParam.schema.items.details.csharp.referenceInterface}[])`] })); + // } cmdletParameter.add(new Attribute(AllowEmptyCollectionAttribute)); } - - if (vSchema.additionalProperties) { + const dictSchema = vSchema.type === SchemaType.Dictionary ? vSchema : + vSchema.type === SchemaType.Object ? (vSchema).parents?.immediate?.find((s) => s.type === SchemaType.Dictionary) : + undefined; + if (dictSchema) { // we have to figure out if this is a standalone dictionary or a hybrid object/dictionary. // if it's a hybrid, we have to create another parameter like -AdditionalProperties and have that dump the contents into the dictionary // if it's a standalone dictionary, we can just use hashtable instead - if (length(vSchema.properties) === 0) { + if (length((vSchema).properties) === 0) { // it's a pure dictionary // add an attribute for changing the exported type. cmdletParameter.add(new Attribute(ExportAsAttribute, { parameters: [`typeof(${System.Collections.Hashtable})`] })); @@ -1173,12 +1261,12 @@ export class CmdletClass extends Class { } else { cmdletParameter.add(new Attribute(ParameterAttribute, { parameters: [new LiteralExpression(`Mandatory = ${vParam.required ? 'true' : 'false'}`), new LiteralExpression(`HelpMessage = "${escapeString(desc || '.')}"`)] })); cmdletParameter.add(new Attribute(CategoryAttribute, { parameters: [`${ParameterCategory}.Body`] })); - addInfoAttribute(cmdletParameter, propertyType, !!vParam.required, false, desc, (vParam.origin).property.serializedName); - addCompleterInfo(cmdletParameter, vParam); + NewAddInfoAttribute(cmdletParameter, propertyType, !!vParam.required, false, desc, (vParam.origin).property.serializedName); + NewAddCompleterInfo(cmdletParameter, vParam); addDefaultInfo(cmdletParameter, vParam); } - const isEnum = propertyType.schema.details.csharp.enum !== undefined; + const isEnum = propertyType instanceof EnumImplementation;; const hasEnum = propertyType instanceof ArrayOf && propertyType.elementType instanceof EnumImplementation; if (isEnum || hasEnum) { cmdletParameter.add(new Attribute(ArgumentCompleterAttribute, { parameters: [`typeof(${hasEnum ? (propertyType).elementType.declaration : propertyType.declaration})`] })); @@ -1190,8 +1278,10 @@ export class CmdletClass extends Class { this.add(cmdletParameter); } - - if (parameter.schema.additionalProperties) { + const paramDictSchema = parameter.schema.type === SchemaType.Dictionary ? parameter.schema : + parameter.schema.type === SchemaType.Object ? (parameter.schema).parents?.immediate?.find((s) => s.type === SchemaType.Dictionary) : + undefined; + if (paramDictSchema) { // if there is an additional properties on this type // add a hashtable parameter for additionalProperties let apPropName = ''; @@ -1210,9 +1300,10 @@ export class CmdletClass extends Class { })); this.bodyParameterInfo = { type: { - declaration: parameter.schema.details.csharp.fullname + declaration: parameter.schema.language.csharp?.fullname }, - valueType: parameter.schema.additionalProperties === true ? System.Object : this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(parameter.schema.additionalProperties, true, this.state) + valueType: (paramDictSchema).elementType.type === SchemaType.Any ? System.Object : + this.state.project.schemaDefinitionResolver.resolveTypeDeclaration((paramDictSchema).elementType, true, this.state) }; } @@ -1224,8 +1315,8 @@ export class CmdletClass extends Class { if (this.isViaIdentity) { // add in the pipeline parameter for the identity - const idschema = values(this.state.project.model.schemas).first(each => each.details.default.uid === 'universal-parameter-type'); - const idtd = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(idschema, true, this.state); + const idschema = values(this.state.project.model.schemas.objects).first(each => each.language.default.uid === 'universal-parameter-type'); + const idtd = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(idschema, true, this.state); const idParam = this.add(new BackedProperty('InputObject', idtd, { description: 'Identity Parameter' })); @@ -1234,11 +1325,15 @@ export class CmdletClass extends Class { idParam.add(new Attribute(CategoryAttribute, { parameters: [`${ParameterCategory}.Path`] })); } for (const vParam of values(vps.operation)) { + if (vParam.name === 'Host') { + // skip 'Host' + continue; + } const vSchema = vParam.schema; - const propertyType = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, true, this.state); + const propertyType = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(vSchema, true, this.state); - const origin = vParam.origin; + const origin = vParam.origin; const regularCmdletParameter = (this.state.project.azure && vParam.name === 'SubscriptionId' && operation.details.csharp.verb.toLowerCase() === 'get') ? @@ -1258,11 +1353,14 @@ export class CmdletClass extends Class { description: vParam.description })); - if (vSchema.additionalProperties) { + const dictSchema = vSchema.type === SchemaType.Dictionary ? vSchema : + vSchema.type === SchemaType.Object ? (vSchema).parents?.immediate?.find((s) => s.type === SchemaType.Dictionary) : + undefined; + if (dictSchema) { // we have to figure out if this is a standalone dictionary or a hybrid object/dictionary. // if it's a hybrid, we have to create another parameter like -AdditionalProperties and have that dump the contents into the dictionary // if it's a standalone dictionary, we can just use hashtable instead - if (length(vSchema.properties) === 0) { + if (length((vSchema).properties) === 0) { // it's a pure dictionary // change the property type to hashtable. // add an attribute to change the exported type. @@ -1282,12 +1380,12 @@ export class CmdletClass extends Class { this.bodyParameter = regularCmdletParameter; } regularCmdletParameter.add(new Attribute(ParameterAttribute, { parameters })); - if (vParam.schema.type === JsonType.Array) { + if (vParam.schema.type === SchemaType.Array) { regularCmdletParameter.add(new Attribute(AllowEmptyCollectionAttribute)); } - addInfoAttribute(regularCmdletParameter, propertyType, vParam.required, false, vParam.description, vParam.origin.name); - addCompleterInfo(regularCmdletParameter, vParam); + NewAddInfoAttribute(regularCmdletParameter, propertyType, vParam.required ?? false, false, vParam.description, origin.name); + NewAddCompleterInfo(regularCmdletParameter, vParam); addDefaultInfo(regularCmdletParameter, vParam); // add aliases if there is any @@ -1296,14 +1394,15 @@ export class CmdletClass extends Class { } const httpParam = origin.details.csharp.httpParameter; - const uid = httpParam ? httpParam.details.csharp.uid : 'no-parameter'; + //const uid = httpParam ? httpParam.details.csharp.uid : 'no-parameter'; - const cat = values(operation.callGraph[0].parameters). - where(each => !(each.details.csharp.constantValue)). - first(each => each.details.csharp.uid === uid); + if (httpParam) { + // xichen: Is it safe to compare by csharp serializedName? Because we no longer have uid + const cat = operation.callGraph[0].parameters?.find((param) => !param.language.csharp?.constantValue && param.language.csharp?.serializedName === httpParam.language.csharp?.serializedName); - if (cat) { - regularCmdletParameter.add(new Attribute(CategoryAttribute, { parameters: [`${ParameterCategory}.${pascalCase(cat.in)}`] })); + if (cat) { + regularCmdletParameter.add(new Attribute(CategoryAttribute, { parameters: [`${ParameterCategory}.${pascalCase((cat.protocol.http?.in))}`] })); + } } @@ -1312,7 +1411,7 @@ export class CmdletClass extends Class { // regularCmdletParameter.add(new Attribute(ArgumentCompleterAttribute, { parameters: [`typeof(${this.declaration})`] })); } - const isEnum = propertyType.schema.details.csharp.enum !== undefined; + const isEnum = propertyType instanceof EnumImplementation; const hasEnum = propertyType instanceof ArrayOf && propertyType.elementType instanceof EnumImplementation; if (isEnum || hasEnum) { regularCmdletParameter.add(new Attribute(ArgumentCompleterAttribute, { parameters: [`typeof(${hasEnum ? (propertyType).elementType.declaration : propertyType.declaration})`] })); @@ -1320,17 +1419,19 @@ export class CmdletClass extends Class { } const ifmatch = this.properties.find((v) => v.name.toLowerCase() === 'ifmatch'); if (ifmatch) { + //no sure why there is an empty block } } - private addClassAttributes(operation: command.CommandOperation, variantName: string) { + + private NewAddClassAttributes(operation: CommandOperation, variantName: string) { const cmdletAttribParams: Array = [ category[operation.details.csharp.verb] ? verbEnum(category[operation.details.csharp.verb], operation.details.csharp.verb) : `"${operation.details.csharp.verb}"`, new StringExpression(variantName) ]; - if (this.isWritableCmdlet(operation)) { + if (this.NewIsWritableCmdlet(operation)) { cmdletAttribParams.push('SupportsShouldProcess = true'); } @@ -1352,56 +1453,61 @@ export class CmdletClass extends Class { // set to hold the output types const outputTypes = new Set(); for (const httpOperation of values(operation.callGraph)) { - const pageableInfo = httpOperation.details.csharp.pageable; - for (const item of items(httpOperation.responses).where(each => each.key !== 'default')) { - for (const schema of values(item.value).selectNonNullable(each => each.schema)) { - const props = getAllProperties(schema); + const pageableInfo = httpOperation.language.csharp?.pageable; + const v = httpOperation.responses && httpOperation.responses.length > 0 && httpOperation.responses[0] instanceof SchemaResponse; + for (const schema of values(httpOperation.responses).selectNonNullable(each => (each).schema)) { - // does the target type just wrap a single output? - const resultSchema = length(props) !== 1 ? schema : props[0].schema; + const props = NewGetAllProperties(schema); - // make sure return type for boolean stays boolean! - if (resultSchema.type === JsonType.Boolean) { + // does the target type just wrap a single output? + const resultSchema = length(props) !== 1 ? schema : props[0].schema; + + // make sure return type for boolean stays boolean! + if (resultSchema.type === SchemaType.Boolean || + (resultSchema.type === SchemaType.Choice && (resultSchema).choiceType.type === SchemaType.Boolean && (resultSchema).choices.length === 1) || + (resultSchema.type === SchemaType.SealedChoice && (resultSchema).choiceType.type === SchemaType.Boolean && (resultSchema).choices.length === 1)) { + outputTypes.add(`typeof(${dotnet.Bool})`); + } else { + const typeDeclaration = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(resultSchema, true, this.state); + + if (typeDeclaration.declaration === System.IO.Stream.declaration || typeDeclaration.declaration === dotnet.Binary.declaration) { + // if this is a stream, skip the output type. + this.hasStreamOutput = true; + shouldAddPassThru = true; outputTypes.add(`typeof(${dotnet.Bool})`); } else { - const typeDeclaration = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(resultSchema, true, this.state); - - if (typeDeclaration.declaration === System.IO.Stream.declaration || typeDeclaration.declaration === dotnet.Binary.declaration) { - // if this is a stream, skip the output type. - this.hasStreamOutput = true; - shouldAddPassThru = true; - outputTypes.add(`typeof(${dotnet.Bool})`); - } else { - let type = ''; - if (typeDeclaration instanceof ArrayOf) { - type = typeDeclaration.elementTypeDeclaration; - } else if (pageableInfo && pageableInfo.responseType === 'pageable') { - if (typeDeclaration === undefined || typeDeclaration.schema.properties[pageableInfo.itemName] === undefined) { - throw new Error(`\n\nOn operation:\n '${httpOperation.operationId}' at '${httpOperation.path}'\n -- you have used 'x-ms-pageable' and there is no property name '${pageableInfo.itemName}' that is an array.\n\n`); - } - const nestedSchema = typeDeclaration.schema.properties[pageableInfo.itemName].schema; - const nestedTypeDeclaration = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(nestedSchema, true, this.state); - type = (nestedTypeDeclaration).elementTypeDeclaration; - } else { - type = typeDeclaration.declaration; - } - // check if this is a stream output - if (type) { - outputTypes.add(`typeof(${type})`); + let type = ''; + if (typeDeclaration instanceof ArrayOf) { + type = typeDeclaration.elementTypeDeclaration; + } else if (pageableInfo && pageableInfo.responseType === 'pageable') { + if (typeDeclaration === undefined || (typeDeclaration.schema).properties?.find(p => p.serializedName === pageableInfo.itemName) === undefined) { + //skip-for-time-being, since operationId does not support in m4 any more + //throw new Error(`\n\nOn operation:\n '${httpOperation.operationId}' at '${httpOperation.path}'\n -- you have used 'x-ms-pageable' and there is no property name '${pageableInfo.itemName}' that is an array.\n\n`); + throw new Error('An error needs to be more specific'); } + const nestedSchema = (typeDeclaration.schema).properties?.find(p => p.serializedName === pageableInfo.itemName)?.schema; + const nestedTypeDeclaration = this.state.project.schemaDefinitionResolver.resolveTypeDeclaration(nestedSchema, true, this.state); + type = (nestedTypeDeclaration).elementTypeDeclaration; + } else { + type = typeDeclaration.declaration; + } + // check if this is a stream output + if (type) { + outputTypes.add(`typeof(${type})`); } } } + } } // if any response does not return, // the cmdlet should have a PassThru parameter shouldAddPassThru = shouldAddPassThru || values(operation.callGraph) - .selectMany(httpOperation => items(httpOperation.responses)) - .selectMany(responsesItem => responsesItem.value) - .any(value => value.schema === undefined); + .selectMany(httpOperation => values((httpOperation.responses || []).concat(httpOperation.exceptions || []))) + //.selectMany(responsesItem => responsesItem.value) + .any(value => (value).schema === undefined); if (outputTypes.size === 0) { outputTypes.add(`typeof(${dotnet.Bool})`); } @@ -1425,4 +1531,4 @@ export class CmdletClass extends Class { this.add(new Attribute(ProfileAttribute, { parameters: [...profileNames] })); } } -} +} \ No newline at end of file diff --git a/powershell/enums/namespace.ts b/powershell/enums/namespace.ts index e6473ce05fd..d704c7b2378 100644 --- a/powershell/enums/namespace.ts +++ b/powershell/enums/namespace.ts @@ -10,6 +10,8 @@ import { IArgumentCompleter, CompletionResult, CommandAst, CompletionResultType, import { join } from 'path'; import { DeepPartial } from '@azure-tools/codegen'; +import { EnumDetails as NewEnumDetails } from '../utils/schema'; + export class EnumNamespace extends Namespace { public get outputFolder(): string { return join(this.state.project.apiFolder, 'Support'); @@ -18,15 +20,18 @@ export class EnumNamespace extends Namespace { constructor(parent: Namespace, public state: State, objectInitializer?: DeepPartial) { super('Support', parent); this.apply(objectInitializer); - - const enumInfos = values(state.model.schemas) - .where(each => each.details.csharp.enum !== undefined && !each.details.csharp.skip) - .select(each => ({ details: each.details.csharp.enum, description: each.details.csharp.description })) - .toArray(); + //const enumInfos = [...state.model.schemas.sealedChoices ?? [], ...state.model.schemas.choices ?? []] + const enumInfos = [...state.model.schemas.sealedChoices ?? []] + .filter((choice) => !choice.language.csharp?.skip) + .map((choice) => { + return { + details: choice.language.csharp?.enum, + description: choice.language.csharp?.description + } + }); const done = new Set(); - for (const enumInfo of enumInfos) { if (done.has(enumInfo.details.name)) { continue; diff --git a/powershell/generators/nuspec.ts b/powershell/generators/nuspec.ts index 15bc147ae43..7e593cedbe0 100644 --- a/powershell/generators/nuspec.ts +++ b/powershell/generators/nuspec.ts @@ -41,6 +41,7 @@ export async function generateNuspec(project: Project) { + `, undefined, 'source-file-other'); } diff --git a/powershell/generators/psm1.ts b/powershell/generators/psm1.ts index 8a545e4e45e..82b0fa54ebb 100644 --- a/powershell/generators/psm1.ts +++ b/powershell/generators/psm1.ts @@ -7,6 +7,7 @@ import { Project } from '../internal/project'; import { PSScriptFile } from '../file-formats/psscript-file'; import { relative } from 'path'; + export function getProfileExportScript(exportFolderScript: string, isAzure: boolean): string { return ` # Export proxy cmdlet scripts diff --git a/powershell/generators/script-cmdlet.ts b/powershell/generators/script-cmdlet.ts index b4794aacb06..741a1292cbd 100644 --- a/powershell/generators/script-cmdlet.ts +++ b/powershell/generators/script-cmdlet.ts @@ -7,7 +7,6 @@ import { Project } from '../internal/project'; import { serialize, indent, setIndentation, applyOverrides, pascalCase } from '@azure-tools/codegen'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; -import { State } from '../internal/state'; import { dotnet, System } from '@azure-tools/codegen-csharp'; import { PSScriptFile } from '../file-formats/psscript-file'; diff --git a/powershell/internal/project.ts b/powershell/internal/project.ts index 3be08c27771..ca37f32a5a9 100644 --- a/powershell/internal/project.ts +++ b/powershell/internal/project.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { Dictionary } from '@azure-tools/linq'; -import { SchemaDefinitionResolver, SchemaDetails, LanguageDetails, EnhancedTypeDeclaration, Boolean } from '../llcsharp/exports'; +import { SchemaDetails, LanguageDetails, EnhancedTypeDeclaration, Boolean, SchemaDefinitionResolver } from '../llcsharp/exports'; import { State } from './state'; import { Project as codeDomProject } from '@azure-tools/codegen-csharp'; import { EnumNamespace } from '../enums/namespace'; @@ -13,8 +13,11 @@ import { ModelExtensionsNamespace } from '../models/model-extensions'; import { ModuleNamespace } from '../module/module-namespace'; import { CmdletNamespace } from '../cmdlets/namespace'; import { Host } from '@azure-tools/autorest-extension-base'; -import { codemodel, PropertyDetails, exportedModels as T, ModelState, JsonType, } from '@azure-tools/codemodel-v3'; +import { codemodel, PropertyDetails, exportedModels as T } from '@azure-tools/codemodel-v3'; import { DeepPartial } from '@azure-tools/codegen'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { BooleanSchema, ChoiceSchema, ConstantSchema, Schema as NewSchema, SchemaType } from '@azure-tools/codemodel'; export type Schema = T.SchemaT, LanguageDetails>; @@ -30,22 +33,25 @@ export interface Metadata { projectUri: string; } -export class PSSwitch extends Boolean { + + +export class NewPSSwitch extends Boolean { get declaration(): string { return `global::System.Management.Automation.SwitchParameter${this.isRequired ? '' : '?'}`; } } - export class PSSchemaResolver extends SchemaDefinitionResolver { inResolve = false; - resolveTypeDeclaration(schema: Schema | undefined, required: boolean, state: ModelState): EnhancedTypeDeclaration { + resolveTypeDeclaration(schema: NewSchema | undefined, required: boolean, state: ModelState): EnhancedTypeDeclaration { const before = this.inResolve; try { if (!this.inResolve) { this.inResolve = true; - if (schema && schema.type === JsonType.Boolean) { - return new PSSwitch(schema, required); + if (schema && (schema.type === SchemaType.Boolean + || (schema.type === SchemaType.Constant && (schema).valueType.type === SchemaType.Boolean) + || (schema.type === SchemaType.Choice && (schema).choiceType.type === SchemaType.Boolean))) { + return new NewPSSwitch(schema, required); } } @@ -56,12 +62,14 @@ export class PSSchemaResolver extends SchemaDefinitionResolver { } } + export class Project extends codeDomProject { public azure!: boolean; public license!: string; public cmdletFolder!: string; public customFolder!: string; + public utilsFolder!: string; public internalFolder!: string; public testFolder!: string; public runtimeFolder!: string; @@ -106,7 +114,7 @@ export class Project extends codeDomProject { public metadata!: Metadata; public state!: State; public helpLinkPrefix!: string; - get model() { return this.state.model; } + get model() { return this.state.model; } constructor(protected service: Host, objectInitializer?: DeepPartial) { super(); @@ -120,7 +128,7 @@ export class Project extends codeDomProject { this.schemaDefinitionResolver = new PSSchemaResolver(); - this.projectNamespace = this.state.model.details.csharp.namespace; + this.projectNamespace = this.state.model.language.csharp?.namespace || ''; this.overrides = { @@ -141,19 +149,21 @@ export class Project extends codeDomProject { // Values this.moduleVersion = await this.state.getValue('module-version'); - this.profiles = this.model.info.extensions['x-ms-metadata'].profiles || []; + // skip-for-time-being + //this.profiles = this.model.info.extensions['x-ms-metadata'].profiles || []; + this.profiles = []; this.accountsVersionMinimum = '1.8.1'; this.helpLinkPrefix = await this.state.getValue('help-link-prefix'); this.metadata = await this.state.getValue('metadata'); this.license = await this.state.getValue('header-text', ''); // Flags - this.azure = this.model.details.default.isAzure; + this.azure = this.model.language.default.isAzure; // Names - this.prefix = this.model.details.default.prefix; - this.serviceName = this.model.details.default.serviceName; - this.subjectPrefix = this.model.details.default.subjectPrefix; + this.prefix = this.model.language.default.prefix; + this.serviceName = this.model.language.default.serviceName; + this.subjectPrefix = this.model.language.default.subjectPrefix; this.moduleName = await this.state.getValue('module-name'); this.dllName = await this.state.getValue('dll-name'); @@ -163,6 +173,7 @@ export class Project extends codeDomProject { this.cmdletFolder = await this.state.getValue('cmdlet-folder'); this.customFolder = await this.state.getValue('custom-cmdlet-folder'); + this.utilsFolder = await this.state.getValue('utils-cmdlet-folder'); this.internalFolder = await this.state.getValue('internal-cmdlet-folder'); this.testFolder = await this.state.getValue('test-folder'); this.runtimeFolder = await this.state.getValue('runtime-folder'); diff --git a/powershell/internal/state.ts b/powershell/internal/state.ts index 37c70ab3b45..c5253f50958 100644 --- a/powershell/internal/state.ts +++ b/powershell/internal/state.ts @@ -3,11 +3,13 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { codemodel, ModelState } from '@azure-tools/codemodel-v3'; +import { codemodel } from '@azure-tools/codemodel-v3'; -import { Host, JsonPath } from '@azure-tools/autorest-extension-base'; +import { Host, JsonPath, Session } from '@azure-tools/autorest-extension-base'; import { Project } from './project'; import { DeepPartial } from '@azure-tools/codegen'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; export interface GeneratorSettings { @@ -22,7 +24,7 @@ export interface GeneratorSettings { apiFolder: string; } -export class State extends ModelState { +export class State extends ModelState { project!: Project; public constructor(service: Host, objectInitializer?: DeepPartial) { @@ -40,8 +42,7 @@ export class State extends ModelState { path(...childPath: JsonPath) { // const result = new State(this.service, this); // result.currentPath = [...this.currentPath, ...childPath]; - // return result; + //return result; return this; } -} - +} \ No newline at end of file diff --git a/powershell/llcsharp/enums/enum.ts b/powershell/llcsharp/enums/enum.ts index e44e8783673..c7de14f5ecc 100644 --- a/powershell/llcsharp/enums/enum.ts +++ b/powershell/llcsharp/enums/enum.ts @@ -25,9 +25,11 @@ import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { State } from '../generator'; import { DeepPartial } from '@azure-tools/codegen'; +import { Schema as NewSchema } from '@azure-tools/codemodel'; + export class EnumClass extends Struct implements EnhancedTypeDeclaration { implementation: EnumImplementation; - get schema(): Schema { + get schema(): NewSchema { return this.implementation.schema; } get convertObjectMethod() { @@ -77,23 +79,23 @@ export class EnumClass extends Struct implements EnhancedTypeDeclaration { } constructor(schemaWithFeatures: EnumImplementation, state: State, objectInitializer?: DeepPartial) { - if (!schemaWithFeatures.schema.details.csharp.enum) { - throw new Error(`ENUM AINT XMSENUM: ${schemaWithFeatures.schema.details.csharp.name}`); + if (!schemaWithFeatures.schema.language.csharp?.enum) { + throw new Error(`ENUM AINT XMSENUM: ${schemaWithFeatures.schema.language.csharp?.name}`); } - super(state.project.supportNamespace, schemaWithFeatures.schema.details.csharp.enum.name, undefined, { + super(state.project.supportNamespace, schemaWithFeatures.schema.language.csharp?.enum.name, undefined, { interfaces: [new Interface(new Namespace('System'), 'IEquatable', { - genericParameters: [`${schemaWithFeatures.schema.details.csharp.enum.name}`] + genericParameters: [`${schemaWithFeatures.schema.language.csharp?.enum.name}`] })], }); - this.description = schemaWithFeatures.schema.details.csharp.description; + this.description = schemaWithFeatures.schema.language.csharp?.description; this.implementation = schemaWithFeatures; this.partial = true; this.apply(objectInitializer); // add known enum values - for (const evalue of schemaWithFeatures.schema.details.csharp.enum.values) { + for (const evalue of schemaWithFeatures.schema.language.csharp?.enum.values) { this.addField(new Field(evalue.name, this, { initialValue: new StringExpression(evalue.value), static: Modifier.Static, description: evalue.description })); } @@ -186,4 +188,4 @@ export class EnumClass extends Struct implements EnhancedTypeDeclaration { public validatePresence(eventListener: Variable, property: Variable): OneOrMoreStatements { return this.implementation.validatePresence(eventListener, property); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/enums/json-serializer.ts b/powershell/llcsharp/enums/json-serializer.ts deleted file mode 100644 index 991df60b157..00000000000 --- a/powershell/llcsharp/enums/json-serializer.ts +++ /dev/null @@ -1,48 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { Modifier } from '@azure-tools/codegen-csharp'; -import { Class } from '@azure-tools/codegen-csharp'; -import { Method } from '@azure-tools/codegen-csharp'; - -import { items } from '@azure-tools/linq'; -import { dotnet } from '@azure-tools/codegen-csharp'; -import { Namespace } from '@azure-tools/codegen-csharp'; -import { Parameter } from '@azure-tools/codegen-csharp'; -import { ClientRuntime } from '../clientruntime'; -import { State } from '../generator'; -import { DeepPartial } from '@azure-tools/codegen'; - -export class JsonSerializerClass extends Class { - - constructor(namespace: Namespace, protected state: State, objectInitializer?: DeepPartial) { - super(namespace, 'JsonSerialization'); - this.apply(objectInitializer); - - this.partial = true; - this.isStatic = true; - - const tojson = this.addMethod(new Method('ToJson', ClientRuntime.JsonNode, { static: Modifier.Static })); - const objP = tojson.addParameter(new Parameter('obj', dotnet.ThisObject)); - const container = tojson.addParameter(new Parameter('container', ClientRuntime.JsonObject, { defaultInitializer: dotnet.Null })); - tojson.add('return null;'); - - const schemas = state.model.schemas; - - // iterate thru all the models - // each model should have a class and an interface. - // if the class has a discriminator value, tell the parent model that it has children - for (const { key: index, value: schema } of items(schemas)) { - const state = this.state.path('components', 'schemas', index); - if (schema.details.csharp && schema.details.csharp.classImplementation) { - // must be a class. - // const implData: CSharpSchemaDetails = schema.details.csharp; - } - - schema.details; - const x = state.project.modelsNamespace.resolveTypeDeclaration(schema, true, state); - } - } -} diff --git a/powershell/llcsharp/generator.ts b/powershell/llcsharp/generator.ts index 5f16695b6e8..b24d9690c6d 100644 --- a/powershell/llcsharp/generator.ts +++ b/powershell/llcsharp/generator.ts @@ -3,15 +3,17 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { ModelState } from '@azure-tools/codemodel-v3'; +import { codeModelSchema } from '@azure-tools/codemodel'; import { Model } from './code-model'; -import { Host, JsonPath } from '@azure-tools/autorest-extension-base'; +import { Host, JsonPath, Session, startSession } from '@azure-tools/autorest-extension-base'; import { Project } from './project'; import { Dictionary } from '@azure-tools/linq'; import { DeepPartial } from '@azure-tools/codegen'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; -export class State extends ModelState { +export class State extends ModelState { project!: Project; public constructor(service: Host, objectInitializer?: DeepPartial) { @@ -23,7 +25,9 @@ export class State extends ModelState { if (project) { this.project = project; } + //const session = await startSession(this.service, {}, codeModelSchema); return await super.init(project); + //return await super.init(project); } path(...childPath: JsonPath) { @@ -32,4 +36,4 @@ export class State extends ModelState { //return result; return this; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/model/interface.ts b/powershell/llcsharp/model/interface.ts index eba0be9d479..e8be1b36e50 100644 --- a/powershell/llcsharp/model/interface.ts +++ b/powershell/llcsharp/model/interface.ts @@ -8,12 +8,14 @@ import { KnownMediaType, JsonType, getPolymorphicBases } from '@azure-tools/code import { Expression, ExpressionOrLiteral, Interface, Namespace, OneOrMoreStatements, Variable, Access, InterfaceProperty, Attribute, StringExpression, LiteralExpression, Property, TypeDeclaration } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; +import { Schema as NewSchema, Language, ObjectSchema } from '@azure-tools/codemodel'; import { State } from '../generator'; import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { ModelClass } from './model-class'; import { TypeContainer } from '@azure-tools/codegen-csharp'; import { DeepPartial } from '@azure-tools/codegen'; import { values } from '@azure-tools/linq'; +import { VirtualProperty as NewVirtualProperty, VirtualProperties as NewVirtualProperties } from '../../utils/schema' export function addInfoAttribute(targetProperty: Property, pType: TypeDeclaration, isRequired: boolean, isReadOnly: boolean, description: string, serializedName: string) { @@ -69,9 +71,62 @@ export function addInfoAttribute(targetProperty: Property, pType: TypeDeclaratio })); } +export function newAddInfoAttribute(targetProperty: Property, pType: TypeDeclaration, isRequired: boolean, isReadOnly: boolean, description: string, serializedName: string) { + + let pt = pType; + while (pt.elementType) { + switch (pt.elementType.schema.type) { + case JsonType.Object: + if (pt.elementType.schema.language.csharp.interfaceImplementation) { + pt = { + declaration: pt.elementType.schema.language.csharp.interfaceImplementation.declaration, + schema: pt.elementType.schema, + }; + } else { + // arg! it's not done yet. Hope it's not polymorphic itself. + pt = { + declaration: `${pt.elementType.schema.language.csharp.namespace}.${pt.elementType.schema.language.csharp.interfaceName}`, + schema: pt.elementType.schema, + }; + } + break; + + case JsonType.Array: + pt = pt.elementType; + break; + + default: + pt = pt.elementType; + break; + } + } + const ptypes = new Array(); + + if (pt.schema && pt.schema.language.csharp.byReference) { + ptypes.push(`typeof(${pt.schema.language.csharp.namespace}.${pt.schema.language.csharp.interfaceName}_Reference)`); + // do we need polymorphic types for by-resource ? Don't think so. + } else { + ptypes.push(`typeof(${pt.declaration})`); + if (pt.schema && pt.schema.language.csharp.classImplementation && pt.schema.language.csharp.classImplementation.discriminators) { + ptypes.push(...[...pt.schema.language.csharp.classImplementation.discriminators.values()].map(each => `typeof(${each.modelInterface.fullName})`)); + } + } + + targetProperty.add(new Attribute(ClientRuntime.InfoAttribute, { + parameters: [ + new LiteralExpression(`\nRequired = ${isRequired}`), + new LiteralExpression(`\nReadOnly = ${isReadOnly}`), + new LiteralExpression(`\nDescription = ${new StringExpression(description ?? '').value}`), + new LiteralExpression(`\nSerializedName = ${new StringExpression(serializedName).value}`), + new LiteralExpression(`\nPossibleTypes = new [] { ${ptypes.join(',').replace(/\?/g, '').replace(/undefined\./g, '')} }`), + ] + })); +} + + export class ModelInterface extends Interface implements EnhancedTypeDeclaration { - get schema(): Schema { + get schema(): NewSchema { return this.classImplementation.schema; } @@ -144,17 +199,17 @@ export class ModelInterface extends Interface implements EnhancedTypeDeclaration init() { (this).init = () => { }; // only allow a single init! - - const implData = (this.schema.details.csharp = this.schema.details.csharp || {}); + this.schema.language.csharp = this.schema.language.csharp || new Language(); + const implData = (this.schema.language.csharp = this.schema.language.csharp || {}); //implData.interfaceImplementation = this; - this.description = `${this.schema.details.csharp.description}`; + this.description = `${this.schema.language.csharp.description}`; - const virtualProperties = this.schema.details.csharp.virtualProperties || { + const virtualProperties: NewVirtualProperties = this.schema.language.csharp.virtualProperties || { owned: [], inherited: [], inlined: [] }; - if (this.schema.details.csharp.virtualProperties) { + if (this.schema.language.csharp.virtualProperties) { for (const virtualProperty of values(virtualProperties.owned)) { if (virtualProperty.private && !this.isInternal) { @@ -163,18 +218,18 @@ export class ModelInterface extends Interface implements EnhancedTypeDeclaration const modelProperty = virtualProperty.property; - const internalSet = !!(!this.isInternal && (modelProperty.details.csharp.readOnly || modelProperty.details.csharp.constantValue)); + const internalSet = !!(!this.isInternal && (modelProperty.readOnly || (modelProperty.language.csharp).constantValue)); - const isRequired = !!modelProperty.details.csharp.required; - const pType = this.state.project.modelsNamespace.resolveTypeDeclaration(modelProperty.schema, isRequired, this.state.path('schema')); + const isRequired = !!modelProperty.required; + const pType = this.state.project.modelsNamespace.NewResolveTypeDeclaration(modelProperty.schema, isRequired, this.state.path('schema')); const p = this.add(new InterfaceProperty(virtualProperty.name, pType, { - description: modelProperty.details.csharp.description, + description: modelProperty.language.default.description, setAccess: internalSet ? Access.Internal : Access.Public })); - this.addInfoAttribute(p, pType, isRequired, internalSet, modelProperty.details.csharp.description, modelProperty.serializedName); + this.addInfoAttribute(p, pType, isRequired, internalSet, modelProperty.language.default.description, modelProperty.serializedName); - if (!this.isInternal && modelProperty.details.csharp.constantValue !== undefined) { + if (!this.isInternal && (modelProperty.language.csharp).constantValue !== undefined) { p.setAccess = Access.Internal; } } @@ -187,23 +242,23 @@ export class ModelInterface extends Interface implements EnhancedTypeDeclaration } const modelProperty = virtualProperty.property; - const isRequired = !!modelProperty.details.csharp.required; - const pType = this.state.project.modelsNamespace.resolveTypeDeclaration(modelProperty.schema, isRequired, this.state.path('schema')); + const isRequired = !!modelProperty.required; + const pType = this.state.project.modelsNamespace.NewResolveTypeDeclaration(modelProperty.schema, isRequired, this.state.path('schema')); - const internalSet = !!(!this.isInternal && (modelProperty.details.csharp.readOnly || modelProperty.details.csharp.constantValue)); + const internalSet = !!(!this.isInternal && (modelProperty.readOnly || (modelProperty.language.csharp).constantValue)); const p = this.add(new InterfaceProperty(virtualProperty.name, pType, { - description: modelProperty.details.csharp.description, + description: modelProperty.language.default.description, setAccess: internalSet ? Access.Internal : Access.Public })); - this.addInfoAttribute(p, pType, isRequired, internalSet, modelProperty.details.csharp.description, modelProperty.serializedName); + this.addInfoAttribute(p, pType, isRequired, internalSet, modelProperty.language.default.description, modelProperty.serializedName); } } if (!this.isInternal) { // mark it as json serializable - if (!this.schema.details.csharp.isHeaderModel) { + if (!this.schema.language.csharp.isHeaderModel) { if (this.state.project.jsonSerialization) { this.interfaces.push(ClientRuntime.IJsonSerializable); } @@ -217,8 +272,7 @@ export class ModelInterface extends Interface implements EnhancedTypeDeclaration addInfoAttribute(p: Property, pType: TypeDeclaration, isRequired: boolean, internalSet: boolean, description: string, serializedName: string) { if (!this.isInternal) { - return addInfoAttribute(p, pType, isRequired, internalSet, description, serializedName); + return newAddInfoAttribute(p, pType, isRequired, internalSet, description, serializedName); } } } - diff --git a/powershell/llcsharp/model/model-class-dictionary.ts b/powershell/llcsharp/model/model-class-dictionary.ts index 728e369caee..3b43e522eba 100644 --- a/powershell/llcsharp/model/model-class-dictionary.ts +++ b/powershell/llcsharp/model/model-class-dictionary.ts @@ -8,6 +8,7 @@ import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { ClientRuntime } from '../clientruntime'; import { getAllVirtualProperties } from '@azure-tools/codemodel-v3'; import { DeepPartial } from '@azure-tools/codegen'; +import { DictionarySchema, ObjectSchema, SchemaType, Schema } from '@azure-tools/codemodel'; export class DictionaryImplementation extends Class { private get state() { return this.modelClass.state; } @@ -27,9 +28,10 @@ export class DictionaryImplementation extends Class { } else { - if (this.schema.additionalProperties) { + const dictSchema = (this.schema).type === SchemaType.Dictionary ? this.schema : this.schema.parents?.immediate?.find((s) => s.type === SchemaType.Dictionary); + if (dictSchema) { this.ownsDictionary = true; - this.valueType = this.schema.additionalProperties === true ? System.Object : this.state.project.modelsNamespace.resolveTypeDeclaration(this.schema.additionalProperties, true, this.state); + this.valueType = (dictSchema).elementType.type === SchemaType.Any ? System.Object : this.state.project.modelsNamespace.NewResolveTypeDeclaration((dictSchema).elementType, true, this.state); this.modelClass.modelInterface.interfaces.push(this.implementIDictionary(this, 'additionalProperties', System.String, this.valueType)); } } @@ -85,7 +87,7 @@ export class DictionaryImplementation extends Class { targetClass.add(new Method('TryGetValue', dotnet.Bool, { parameters: [pKey, pOutValue], body: toExpression(`${accessViaMember}.TryGetValue( ${pKey}, out ${pOutValue})`), access: Access.Public })); - const all = getAllVirtualProperties(this.schema.details.csharp.virtualProperties); + const all = getAllVirtualProperties(this.schema.language.csharp?.virtualProperties); const exclusions = all.map(each => `"${each.name}"`).join(','); // add a CopyFrom that takes an IDictionary or PSObject and copies the values into this dictionary diff --git a/powershell/llcsharp/model/model-class-json.ts b/powershell/llcsharp/model/model-class-json.ts index 0751b79eb5c..d4043908424 100644 --- a/powershell/llcsharp/model/model-class-json.ts +++ b/powershell/llcsharp/model/model-class-json.ts @@ -2,7 +2,9 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { Schema as NewSchema, ObjectSchema, SchemaType } from '@azure-tools/codemodel'; import { KnownMediaType, HeaderProperty, HeaderPropertyType, getAllProperties } from '@azure-tools/codemodel-v3'; +import { getAllProperties as newGetAllProperties } from '@azure-tools/codemodel'; import { EOL, DeepPartial, } from '@azure-tools/codegen'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { Access, Modifier, StringExpression, Expression, System } from '@azure-tools/codegen-csharp'; @@ -26,8 +28,9 @@ import { ModelClass } from './model-class'; import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { popTempVar, pushTempVar } from '../schema/primitive'; -import { ModelProperty } from './property'; import { ObjectImplementation } from '../schema/object'; +import { ModelInterface } from './interface'; + export class JsonSerializableClass extends Class { private btj!: Method; @@ -71,7 +74,7 @@ export class JsonSerializableClass extends Class { const exclusions = new Parameter('exclusions', System.Collections.Generic.HashSet(dotnet.String), { defaultInitializer: dotnet.Null }); deserializerConstructor.parameters.push(exclusions); - this.excludes = [...values(getAllProperties(this.modelClass.schema)).select(each => each.serializedName).select(each => new StringExpression(each))].join(); + this.excludes = [...values(newGetAllProperties(this.modelClass.schema)).select(each => each.serializedName).select(each => new StringExpression(each))].join(); this.excludes = this.excludes ? `,${System.Collections.Generic.HashSet(dotnet.String).new()}{ ${this.excludes} }` : ''; const ap = `((${ClientRuntime}.IAssociativeArray<${vType.declaration}>)this).AdditionalProperties`; @@ -85,8 +88,9 @@ export class JsonSerializableClass extends Class { // wildcard style deserializeStatements.push(new Statements(`${ClientRuntime.JsonSerializable}.FromJson( json, ${ap}, ${ClientRuntime.JsonSerializable}.DeserializeDictionary(()=>${System.Collections.Generic.Dictionary(System.String, System.Object).new()}),${exclusions.value} );`)); - } else if (vType instanceof ObjectImplementation) { - deserializeStatements.push(new Statements(`${ClientRuntime.JsonSerializable}.FromJson( json, ${ap}, (j) => ${this.modelClass.fullName}.FromJson(j) ,${exclusions.value} );`)); + } else if (vType instanceof ModelInterface) { + // use the class of the dictionary value to deserialize values + deserializeStatements.push(new Statements(`${ClientRuntime.JsonSerializable}.FromJson( json, ${ap}, (j) => ${vType.classImplementation.fullName}.FromJson(j) ,${exclusions.value} );`)); } else { deserializeStatements.push(new Statements(`${ClientRuntime.JsonSerializable}.FromJson( json, ${ap}, null ,${exclusions.value} );`)); } @@ -96,7 +100,11 @@ export class JsonSerializableClass extends Class { for (const each of values(modelClass.backingFields)) { serializeStatements.add(`${each.field.value}?.ToJson(${container}, ${mode.use});`); - if ((each.typeDeclaration).schema.additionalProperties) { + const sch = (each.typeDeclaration).schema; + const dictSchema = sch.type === SchemaType.Dictionary ? sch : + sch.type === SchemaType.Object ? (sch).parents?.immediate.find((s) => s.type === SchemaType.Dictionary) : + undefined; + if (dictSchema) { deserializeStatements.add(`${each.field.value} = new ${each.className}(json${this.excludes});`); } else { deserializeStatements.add(`${each.field.value} = new ${each.className}(json);`); @@ -105,12 +113,12 @@ export class JsonSerializableClass extends Class { pushTempVar(); for (const prop of values(modelClass.ownedProperties)) { - if (prop.details.csharp.HeaderProperty === 'Header') { + if (prop.language.csharp.HeaderProperty === 'Header') { continue; } const serializeStatement = (prop.type).serializeToContainerMember(KnownMediaType.Json, prop.valuePrivate, container, prop.serializedName, mode); - if (prop.details.csharp.readOnly) { + if (prop.language.csharp.readOnly) { serializeStatements.add(If(`${mode.use}.HasFlag(${ClientRuntime.SerializationMode.IncludeReadOnly})`, serializeStatement)); } else { serializeStatements.add(serializeStatement); @@ -174,9 +182,8 @@ export class JsonSerializableClass extends Class { yield If(Not(json.check), Return(dotnet.Null)); yield '// Polymorphic type -- select the appropriate constructor using the discriminator'; /** go thru the list of polymorphic values for the discriminator, and call the target class's constructor for that */ - if ($this.schema.discriminator) { - yield Switch(toExpression(`json.StringProperty("${$this.schema.discriminator.propertyName}")`), function* () { + yield Switch(toExpression(`json.StringProperty("${$this.schema.discriminator.property.serializedName}")`), function* () { for (const { key, value } of items(d)) { yield TerminalCase(`"${key}"`, function* () { yield Return(value.new(json)); @@ -238,4 +245,4 @@ export class JsonSerializableClass extends Class { description: 'AfterFromJson will be called after the json deserialization has finished, allowing customization of the object before it is returned. Implement this method in a partial class to enable this behavior ' })); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/model/model-class-serializer.ts b/powershell/llcsharp/model/model-class-serializer.ts index f11323f523e..28caac5c536 100644 --- a/powershell/llcsharp/model/model-class-serializer.ts +++ b/powershell/llcsharp/model/model-class-serializer.ts @@ -23,24 +23,25 @@ import { Ternery } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { dotnet } from '@azure-tools/codegen-csharp'; -import { ModelClass } from './model-class'; import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { popTempVar, pushTempVar } from '../schema/primitive'; -import { ModelProperty } from './property'; import { ObjectImplementation } from '../schema/object'; import { Schema } from '../code-model'; +import { DictionarySchema, ObjectSchema, Schema as NewSchema, SchemaType } from '@azure-tools/codemodel'; import { getVirtualPropertyName } from './model-class'; +import { VirtualProperty as NewVirtualProperty } from '../../utils/schema'; + export class SerializationPartialClass extends Initializer { - constructor(protected targetClass: Class, protected targetInterface: TypeDeclaration, protected serializationType: TypeDeclaration, protected serializationFormat: string, protected schema: Schema, protected resolver: (s: Schema, req: boolean) => EnhancedTypeDeclaration, objectInitializer?: DeepPartial) { + constructor(protected targetClass: Class, protected targetInterface: TypeDeclaration, protected serializationType: TypeDeclaration, protected serializationFormat: string, protected schema: NewSchema, protected resolver: (s: NewSchema, req: boolean) => EnhancedTypeDeclaration, objectInitializer?: DeepPartial) { super(); this.apply(objectInitializer); } protected get virtualProperties() { - return this.schema.details.csharp.virtualProperties || { + return this.schema.language.csharp?.virtualProperties || { owned: [], inherited: [], inlined: [] @@ -73,7 +74,7 @@ export class SerializationPartialClass extends Initializer { export class DeserializerPartialClass extends SerializationPartialClass { private beforeDeserialize!: Method; private afterDeserialize!: Method; - constructor(targetClass: Class, targetInterface: TypeDeclaration, protected serializationType: TypeDeclaration, protected serializationFormat: string, protected schema: Schema, resolver: (s: Schema, req: boolean) => EnhancedTypeDeclaration, objectInitializer?: DeepPartial) { + constructor(targetClass: Class, targetInterface: TypeDeclaration, protected serializationType: TypeDeclaration, protected serializationFormat: string, protected schema: NewSchema, resolver: (s: NewSchema, req: boolean) => EnhancedTypeDeclaration, objectInitializer?: DeepPartial) { super(targetClass, targetInterface, serializationType, serializationFormat, schema, resolver); this.apply(objectInitializer); } @@ -100,6 +101,7 @@ export class DeserializerPartialClass extends SerializationPartialClass { yield `${$this.beforeDeserialize.name}(${$this.contentParameter}, ref ${returnNow.value});`; yield If(returnNow, 'return;'); yield $this.deserializeStatements; + if ($this.hasAadditionalProperties($this.schema)) { // this type has an additional properties dictionary yield '// this type is a dictionary; copy elements from source to here.'; @@ -110,16 +112,22 @@ export class DeserializerPartialClass extends SerializationPartialClass { }); } - private hasAadditionalProperties(aSchema: Schema): boolean { - if (aSchema.additionalProperties) { + private hasAadditionalProperties(aSchema: NewSchema): boolean { + if (aSchema.type === SchemaType.Dictionary) { return true; - } else - for (const each of values(aSchema.allOf)) { - const r = this.hasAadditionalProperties(each); - if (r) { - return r; - } + } + if (aSchema.type !== SchemaType.Object) { + return false; + } + const objSchema = (aSchema).parents?.immediate; + if (!objSchema || objSchema.length === 0) { + return false; + } + for (const parent of objSchema) { + if (this.hasAadditionalProperties(parent)) { + return true; } + } return false; } @@ -128,15 +136,13 @@ export class DeserializerPartialClass extends SerializationPartialClass { return function* () { yield '// actually deserialize '; - - for (const virtualProperty of values($this.allVirtualProperties)) { + for (const virtualProperty of values(>$this.allVirtualProperties)) { // yield `// deserialize ${virtualProperty.name} from ${$this.serializationFormat}`; - const type = $this.resolver(virtualProperty.property.schema, virtualProperty.property.details.default.required); + const type = $this.resolver(virtualProperty.property.schema, virtualProperty.property.language.default.required); const cvt = type.convertObjectMethod; - const t = `((${virtualProperty.originalContainingSchema.details.csharp.fullInternalInterfaceName})this)`; + const t = `((${virtualProperty.originalContainingSchema.language.csharp?.fullInternalInterfaceName})this)`; const tt = type ? `(${type.declaration})` : ''; - yield `${t}.${getVirtualPropertyName(virtualProperty)} = ${tt} ${$this.contentParameter}.GetValueForProperty("${getVirtualPropertyName(virtualProperty)}",${t}.${getVirtualPropertyName(virtualProperty)}, ${cvt});`; } }; @@ -176,45 +182,4 @@ export class DeserializerPartialClass extends SerializationPartialClass { } } -export class SerializerPartialClass extends SerializationPartialClass { - private beforeSerialize!: Method; - private afterSerialize!: Method; - constructor(targetClass: Class, targetInterface: TypeDeclaration, protected serializationType: TypeDeclaration, protected serializationFormat: string, protected schema: Schema, resolver: (s: Schema, req: boolean) => EnhancedTypeDeclaration, objectInitializer?: DeepPartial) { - super(targetClass, targetInterface, serializationType, serializationFormat, schema, resolver); - this.apply(objectInitializer); - } - async init() { - this.addPartialMethods(); - this.addSerializer(); - - } - - protected addSerializer() { - const serializeMethod = this.targetClass.addMethod(new Method(`SerializeTo${this.serializationFormat}`, this.serializationType, { - parameters: [this.refContainerParameter], - description: `Serializes this instance of ${this.thisCref} into a ${this.typeCref}.`, - returnsDescription: `a serialized instance of ${this.thisCref} /> as a ${this.typeCref}.` - })); - - } - - protected addPartialMethods() { - const before = `BeforeSerialize${this.serializationFormat}`; - const after = `AfterSerialize${this.serializationFormat}`; - // add partial methods for future customization - this.beforeSerialize = this.targetClass.addMethod(new PartialMethod(before, dotnet.Void, { - access: Access.Default, - parameters: [this.refContainerParameter, this.returnNowParameter], - description: `${before} will be called before the serialization has commenced, allowing complete customization of the object before it is serialized. - If you wish to disable the default serialization entirely, return true in the output parameter. - Implement this method in a partial class to enable this behavior.` - })); - - this.afterSerialize = this.targetClass.addMethod(new PartialMethod(after, dotnet.Void, { - access: Access.Default, - parameters: [this.refContainerParameter], - description: `${after} will be called after the serialization has finished, allowing customization of the ${this.typeCref} before it is returned. Implement this method in a partial class to enable this behavior ` - })); - } -} diff --git a/powershell/llcsharp/model/model-class-xml.ts b/powershell/llcsharp/model/model-class-xml.ts deleted file mode 100644 index 831158e974f..00000000000 --- a/powershell/llcsharp/model/model-class-xml.ts +++ /dev/null @@ -1,174 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { KnownMediaType, HeaderProperty, HeaderPropertyType } from '@azure-tools/codemodel-v3'; -import { EOL, DeepPartial } from '@azure-tools/codegen'; -import { items, values } from '@azure-tools/linq'; - -import { Access, Class, Constructor, dotnet, If, IsDeclaration, Method, Modifier, Not, Parameter, ParameterModifier, PartialMethod, Return, Statements, Switch, System, TerminalCase, Ternery } from '@azure-tools/codegen-csharp'; -import { ClientRuntime } from '../clientruntime'; -import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; -import { popTempVar, pushTempVar } from '../schema/primitive'; -import { ModelClass } from './model-class'; - -import { ModelProperty } from './property'; - -export class XmlSerializableClass extends Class { - private btj!: Method; - private atj!: Method; - private bfj!: Method; - private afj!: Method; - - constructor(protected modelClass: ModelClass, objectInitializer?: DeepPartial) { - super(modelClass.namespace, modelClass.name); - this.apply(objectInitializer); - this.partial = true; - - this.addPartialMethods(); - - // set up the declaration for the toXml method. - const container = new Parameter('container', System.Xml.Linq.XElement, { description: `The container to serialize this object into. If the caller passes in null, a new instance will be created and returned to the caller.` }); - const mode = new Parameter('serializationMode', ClientRuntime.SerializationMode, { description: `Allows the caller to choose the depth of the serialization. See .` }); - - const toXmlMethod = this.addMethod(new Method('ToXml', System.Xml.Linq.XElement, { - parameters: [container, mode], - })); - - // setup the declaration for the xml deserializer constructor - const xmlParameter = new Parameter('xml', System.Xml.Linq.XElement, { description: `A ${System.Xml.Linq.XElement} instance to deserialize from.` }); - const deserializerConstructor = this.addMethod(new Constructor(this, { - parameters: [xmlParameter], access: Access.Internal, - description: `Deserializes a ${System.Xml.Linq.XElement} into a new instance of .` - })); - - const serializeStatements = new Statements(); - const deserializeStatements = new Statements(); - - for (const each of values(modelClass.backingFields)) { - serializeStatements.add(`${each.field.value}?.ToXml(result, ${mode.use});`); - deserializeStatements.add(`${each.field.value} = new ${each.className}(xml);`); - } - pushTempVar(); - for (const property of values(modelClass.schema.properties)) { - const prop = modelClass.$(property.details.csharp.name); - const serializeStatement = (prop.type).serializeToContainerMember(KnownMediaType.Xml, prop, container, prop.serializedName, mode); - - if (property.details.csharp[HeaderProperty] === HeaderPropertyType.Header) { - // it's a header only property. Don't serialize unless the mode has SerializationMode.IncludeHeaders enabled - serializeStatements.add(If(`${mode.use}.HasFlag(${ClientRuntime.SerializationMode.IncludeHeaders})`, serializeStatement)); - } else { - if (property.details.csharp.readOnly) { - serializeStatements.add(If(`${mode.use}.HasFlag(${ClientRuntime.SerializationMode.IncludeReadOnly})`, serializeStatement)); - } else { - serializeStatements.add(serializeStatement); - } - } - deserializeStatements.add(prop.assignPrivate((prop.type).deserializeFromContainerMember(KnownMediaType.Xml, xmlParameter, prop.serializedName, prop))); - } - popTempVar(); - const $this = this; - - // generate the implementation for toXml - toXmlMethod.add(function* () { - yield `${container} = ${container} ?? new ${System.Xml.Linq.XElement.declaration}(nameof(${$this.modelClass.name}));`; - yield EOL; - - yield 'bool returnNow = false;'; - yield `${$this.btj.name}(ref ${container}, ref returnNow);`; - - yield If('returnNow', `return ${container};`); - - // get serialization statements - yield serializeStatements; - - yield `${$this.atj.name}(ref ${container});`; - yield Return(container); - }); - - // and let's fill in the deserializer constructor statements now. - deserializerConstructor.add(function* () { - yield 'bool returnNow = false;'; - yield `${$this.bfj.name}(xml, ref returnNow);`; - yield If('returnNow', 'return;'); - - yield deserializeStatements; - yield `${$this.afj.name}(xml);`; - }); - } - - public get definition(): string { - const $this = this.modelClass; - // gotta write this just before we write out the class, since we had to wait until everyone had reported to their parents. - const d = this.modelClass.discriminators; - const isp = this.modelClass.isPolymorphic; - // create the FromXml method - const node = new Parameter('node', System.Xml.Linq.XElement, { description: `A ${System.Xml.Linq.XElement} instance to deserialize from.` }); - const fromXml = this.addMethod(new Method('FromXml', this.modelClass.modelInterface, { parameters: [node], static: Modifier.Static })); - fromXml.add(function* () { - - const xml = IsDeclaration(node, System.Xml.Linq.XElement, 'xml'); - - if (isp) { - yield If(Not(xml.check), Return(dotnet.Null)); - yield '// Polymorphic type -- select the appropriate constructor using the discriminator'; - /** go thru the list of polymorphic values for the discriminator, and call the target class's constructor for that */ - - if ($this.schema.discriminator) { - yield Switch(`xml.StringProperty("${$this.schema.discriminator.propertyName}")`, function* () { - for (const { key, value } of items(d)) { - yield TerminalCase(`"${key}"`, function* () { - yield Return(value.new(xml)); - }); - } - }); - } - yield Return($this.new(xml)); - } else { - // just tell it to create the instance (providing that it's a XElement) - yield Return(Ternery(xml.check, $this.new(xml), dotnet.Null)); - } - }); - - return super.definition; - } - - public get fileName(): string { - return `${super.fileName}.xml`; - } - - protected addPartialMethods() { - // add partial methods for future customization - this.btj = this.addMethod(new PartialMethod('BeforeToXml', dotnet.Void, { - access: Access.Default, - parameters: [ - new Parameter('container', System.Xml.Linq.XElement, { modifier: ParameterModifier.Ref, description: 'The XElement that the serialization result will be placed in.' }), - new Parameter('returnNow', dotnet.Bool, { modifier: ParameterModifier.Ref, description: 'Determines if the rest of the serialization should be processed, or if the method should return instantly.' }), - ], - })); - - this.atj = this.addMethod(new PartialMethod('AfterToXml', dotnet.Void, { - access: Access.Default, - parameters: [ - new Parameter('container', System.Xml.Linq.XElement, { modifier: ParameterModifier.Ref, description: 'The XElement that the serialization result will be placed in.' }), - ], - })); - - this.bfj = this.addMethod(new PartialMethod('BeforeFromXml', dotnet.Void, { - access: Access.Default, - parameters: [ - new Parameter('xml', System.Xml.Linq.XElement, { description: 'The XmlNode that should be deserialized into this object.' }), - new Parameter('returnNow', dotnet.Bool, { modifier: ParameterModifier.Ref, description: 'Determines if the rest of the deserialization should be processed, or if the method should return instantly.' }), - ], - })); - - this.afj = this.addMethod(new PartialMethod('AfterFromXml', dotnet.Void, { - access: Access.Default, - parameters: [ - new Parameter('xml', System.Xml.Linq.XElement, { description: 'The XmlNode that should be deserialized into this object.' }), - ], - })); - } - -} diff --git a/powershell/llcsharp/model/model-class.ts b/powershell/llcsharp/model/model-class.ts index 949bded3e5e..fd98f5310a2 100644 --- a/powershell/llcsharp/model/model-class.ts +++ b/powershell/llcsharp/model/model-class.ts @@ -17,14 +17,17 @@ import { ModelProperty } from './property'; import { PropertyOriginAttribute, DoNotFormatAttribute, FormatTableAttribute } from '../csharp-declarations'; import { Schema } from '../code-model'; import { DictionaryImplementation } from './model-class-dictionary'; +import { Languages, Language, Schema as NewSchema, SchemaType, ObjectSchema, DictionarySchema } from '@azure-tools/codemodel'; +import { VirtualProperty as NewVirtualProperty, getAllVirtualProperties as newGetAllVirtualProperties } from '../../utils/schema'; -export function getVirtualPropertyName(vp?: VirtualProperty): string { +export function getVirtualPropertyName(vp?: NewVirtualProperty): string { if (vp && vp.accessViaMember && vp.accessViaProperty?.accessViaMember) { return getVirtualPropertyName(vp.accessViaMember); } return vp ? vp.name : ''; } + export interface BackingField { field: Field; typeDeclaration: TypeDeclaration; @@ -82,8 +85,8 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { /* @internal */ validateMethod?: Method; /* @internal */ discriminators: Map = new Map(); /* @internal */ parentModelClasses: Array = new Array(); - /* @internal */ get modelInterface(): ModelInterface { return this.schema.details.csharp.interfaceImplementation; } - /* @internal */ get internalModelInterface(): ModelInterface { return this.schema.details.csharp.internalInterfaceImplementation; } + /* @internal */ get modelInterface(): ModelInterface { return this.schema.language.csharp?.interfaceImplementation; } + /* @internal */ get internalModelInterface(): ModelInterface { return this.schema.language.csharp?.internalInterfaceImplementation; } /* @internal */ state: State; /* @internal */ backingFields = new Array(); @@ -95,18 +98,19 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { private readonly validationStatements = new Statements(); public ownedProperties = new Array(); - private pMap = new Map(); + private pMap = new Map(); // public hasHeaderProperties: boolean; constructor(namespace: Namespace, schemaWithFeatures: ObjectImplementation, state: State, objectInitializer?: DeepPartial) { - super(namespace, schemaWithFeatures.schema.details.csharp.name); + super(namespace, schemaWithFeatures.schema.language.csharp?.name || ''); this.featureImplementation = schemaWithFeatures; - this.schema.details.csharp.classImplementation = this; // mark the code-model with the class we're creating. + this.schema.language.csharp = this.schema.language.csharp || new Language(); + this.schema.language.csharp.classImplementation = this; // mark the code-model with the class we're creating. this.state = state; this.apply(objectInitializer); - if (this.state.getValue('powershell') && this.schema.details.csharp.suppressFormat) { + if (this.state.getValue('powershell') && this.schema.language.csharp.suppressFormat) { this.add(new Attribute(DoNotFormatAttribute)); } @@ -116,21 +120,21 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { this.handleDiscriminator(); // create an interface for this model class - if (!this.schema.details.csharp.interfaceImplementation) { - (this.schema.details.csharp.interfaceImplementation = new ModelInterface(this.namespace, this.schema.details.csharp.interfaceName || `I${this.schema.details.csharp.name}`, this, this.state)); + if (!this.schema.language.csharp.interfaceImplementation) { + (this.schema.language.csharp.interfaceImplementation = new ModelInterface(this.namespace, this.schema.language.csharp.interfaceName || `I${this.schema.language.csharp.name}`, this, this.state)); } this.interfaces.push(this.modelInterface); - if (!this.schema.details.csharp.internalInterfaceImplementation) { - (this.schema.details.csharp.internalInterfaceImplementation = new ModelInterface(this.namespace, this.schema.details.csharp.internalInterfaceName || `I${this.schema.details.csharp.name}Internal`, this, this.state, { accessModifier: Access.Internal })); + if (!this.schema.language.csharp.internalInterfaceImplementation) { + (this.schema.language.csharp.internalInterfaceImplementation = new ModelInterface(this.namespace, this.schema.language.csharp.internalInterfaceName || `I${this.schema.language.csharp.name}Internal`, this, this.state, { accessModifier: Access.Internal })); } this.interfaces.push(this.internalModelInterface); - this.schema.details.csharp.internalInterfaceImplementation.init(); - this.schema.details.csharp.interfaceImplementation.init(); + this.schema.language.csharp.internalInterfaceImplementation.init(); + this.schema.language.csharp.interfaceImplementation.init(); // add default constructor this.addMethod(new Constructor(this, { description: `Creates an new instance.` })); // default constructor for fits and giggles. @@ -138,7 +142,9 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { // handle parent interface implementation if (!this.handleAllOf()) { // handle the AdditionalProperties if used - if (this.schema.additionalProperties) { + const dictSchema = (this.schema).type === SchemaType.Dictionary ? this.schema : + this.schema.parents?.immediate?.find((schema) => schema.type === SchemaType.Dictionary); + if (dictSchema) { this.dictionaryImpl = new DictionaryImplementation(this).init(); } } @@ -158,7 +164,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { } } - private nested(virtualProperty: VirtualProperty, internal: boolean): string { + private nested(virtualProperty: NewVirtualProperty, internal: boolean): string { if (virtualProperty.accessViaProperty) { if (virtualProperty.accessViaProperty.accessViaProperty) { // return `/*1*/${getVirtualPropertyName(virtualProperty.accessViaMember)}.${this.nested(virtualProperty.accessViaProperty.accessViaProperty, internal)}`; @@ -169,13 +175,13 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { return `${getVirtualPropertyName(virtualProperty.accessViaMember)}`; } - private accessor(virtualProperty: VirtualProperty, internal = false): string { + private accessor(virtualProperty: NewVirtualProperty, internal = false): string { if (virtualProperty.accessViaProperty) { const prefix = virtualProperty.accessViaProperty.accessViaProperty ? this.nested(virtualProperty.accessViaProperty.accessViaProperty, internal) : ''; const containingProperty = this.pMap.get(virtualProperty.accessViaProperty); if (containingProperty && virtualProperty.accessViaMember) { //return `/*3*/((${virtualProperty.accessViaMember.originalContainingSchema.details.csharp.fullInternalInterfaceName})${containingProperty.name}${prefix}).${getVirtualPropertyName(virtualProperty.accessViaMember)}`; - return `((${virtualProperty.accessViaMember.originalContainingSchema.details.csharp.fullInternalInterfaceName})${containingProperty.name}${prefix}).${getVirtualPropertyName(virtualProperty.accessViaMember)}`; + return `((${virtualProperty.accessViaMember.originalContainingSchema.language.csharp?.fullInternalInterfaceName})${containingProperty.name}${prefix}).${getVirtualPropertyName(virtualProperty.accessViaMember)}`; } } // return `/*4* ${virtualProperty.name}/${virtualProperty.accessViaMember?.name}/${virtualProperty.accessViaProperty?.name} */${getVirtualPropertyName(virtualProperty.accessViaMember) || '/*!!*/' + virtualProperty.name}`; @@ -187,8 +193,8 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { // and then expand the nested properties into this class forwarding to the member. // add properties - if (this.schema.details.csharp.virtualProperties) { - const addFormatAttributesToProperty = (property: Property, virtualProperty: VirtualProperty) => { + if (this.schema.language.csharp?.virtualProperties) { + const addFormatAttributesToProperty = (property: Property, virtualProperty: NewVirtualProperty) => { if (virtualProperty.format) { if (virtualProperty.format.suppressFormat) { property.add(new Attribute(DoNotFormatAttribute)); @@ -212,22 +218,22 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { }; /* Owned Properties */ - for (const virtualProperty of values(this.schema.details.csharp.virtualProperties.owned)) { + for (const virtualProperty of values(>(this.schema.language.csharp.virtualProperties.owned))) { const actualProperty = virtualProperty.property; let n = 0; - const decl = this.state.project.modelsNamespace.resolveTypeDeclaration(actualProperty.schema, actualProperty.details.csharp.required, this.state.path('schema')); + const decl = this.state.project.modelsNamespace.NewResolveTypeDeclaration(actualProperty.schema, actualProperty.language.csharp?.required, this.state.path('schema')); /* public property */ - const myProperty = new ModelProperty(virtualProperty.name, actualProperty.schema, actualProperty.details.csharp.required, actualProperty.serializedName, actualProperty.details.csharp.description, this.state.path('properties', n++), { - initializer: actualProperty.details.csharp.constantValue ? typeof actualProperty.details.csharp.constantValue === 'string' ? new StringExpression(actualProperty.details.csharp.constantValue) : new LiteralExpression(actualProperty.details.csharp.constantValue) : undefined + const myProperty = new ModelProperty(virtualProperty.name, actualProperty.schema, actualProperty.language.csharp?.required, actualProperty.serializedName, actualProperty.language.csharp?.description || '', this.state.path('properties', n++), { + initializer: actualProperty.language.csharp?.constantValue ? typeof actualProperty.language.csharp.constantValue === 'string' ? new StringExpression(actualProperty.language.csharp.constantValue) : new LiteralExpression(actualProperty.language.csharp.constantValue) : undefined }); - if (actualProperty.details.csharp.readOnly) { + if (actualProperty.language.csharp?.readOnly || actualProperty.readOnly) { myProperty.set = undefined; } - myProperty.details = virtualProperty.property.details; + myProperty.language = virtualProperty.property.language; - if (actualProperty.details.csharp.constantValue !== undefined) { + if (actualProperty.language.csharp?.constantValue !== undefined) { myProperty.setAccess = Access.Internal; myProperty.set = undefined; } @@ -244,7 +250,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { if (myProperty.getAccess !== Access.Public || myProperty.setAccess !== Access.Public || myProperty.set === undefined) { /* internal interface property */ - this.add(new Property(`${virtualProperty.originalContainingSchema.details.csharp.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, decl, { + this.add(new Property(`${virtualProperty.originalContainingSchema.language.csharp?.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, decl, { description: `Internal Acessors for ${virtualProperty.name}`, getAccess: Access.Explicit, setAccess: Access.Explicit, @@ -260,29 +266,29 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { } /* Inherited properties. */ - for (const virtualProperty of values(this.schema.details.csharp.virtualProperties.inherited)) { + for (const virtualProperty of values(>(this.schema.language.csharp.virtualProperties.inherited))) { // so each parent property that is getting exposed // has to be accessed via the field in this.backingFields - const parentField = this.backingFields.find(each => virtualProperty.accessViaSchema ? virtualProperty.accessViaSchema.details.csharp.interfaceImplementation.fullName === each.typeDeclaration.declaration : false); + const parentField = this.backingFields.find(each => virtualProperty.accessViaSchema ? virtualProperty.accessViaSchema.language.csharp?.interfaceImplementation.fullName === each.typeDeclaration.declaration : false); - const propertyType = this.state.project.modelsNamespace.resolveTypeDeclaration(virtualProperty.property.schema, virtualProperty.property.details.csharp.required, this.state); - const opsType = this.state.project.modelsNamespace.resolveTypeDeclaration(virtualProperty.originalContainingSchema, false, this.state); - const via = virtualProperty.accessViaProperty; - const parentCast = `(${virtualProperty.originalContainingSchema.details.csharp.internalInterfaceImplementation.fullName})`; + const propertyType = this.state.project.modelsNamespace.NewResolveTypeDeclaration(virtualProperty.property.schema, virtualProperty.property.language.csharp?.required, this.state); + const opsType = this.state.project.modelsNamespace.NewResolveTypeDeclaration(virtualProperty.originalContainingSchema, false, this.state); + const via = virtualProperty.accessViaProperty; + const parentCast = `(${virtualProperty.originalContainingSchema.language.csharp?.internalInterfaceImplementation.fullName})`; const vp = this.add(new Property(virtualProperty.name, propertyType, { - description: virtualProperty.property.details.csharp.description, + description: virtualProperty.property.language.csharp?.description, get: toExpression(`(${parentCast}${parentField.field.name}).${this.accessor(virtualProperty)}`), - set: (virtualProperty.property.details.csharp.readOnly || virtualProperty.property.details.csharp.constantValue) ? undefined : toExpression(`(${parentCast}${parentField.field.name}).${this.accessor(virtualProperty)} = value`) + set: (virtualProperty.property.language.csharp?.readOnly || virtualProperty.property.language.csharp?.constantValue) ? undefined : toExpression(`(${parentCast}${parentField.field.name}).${this.accessor(virtualProperty)} = value`) })); - if (virtualProperty.property.details.csharp.constantValue !== undefined) { + if (virtualProperty.property.language.csharp?.constantValue !== undefined) { vp.setAccess = Access.Internal; vp.set = undefined; } if (vp.getAccess !== Access.Public || vp.setAccess !== Access.Public || vp.set === undefined) { - this.add(new Property(`${virtualProperty.originalContainingSchema.details.csharp.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, propertyType, { + this.add(new Property(`${virtualProperty.originalContainingSchema.language.csharp?.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, propertyType, { description: `Internal Acessors for ${virtualProperty.name}`, getAccess: Access.Explicit, setAccess: Access.Explicit, @@ -298,7 +304,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { } /* Inlined properties. */ - for (const virtualProperty of values(this.schema.details.csharp.virtualProperties.inlined)) { + for (const virtualProperty of values(>this.schema.language.csharp.virtualProperties.inlined)) { if (virtualProperty.private) { // continue; // can't remove it, it has to be either public or internally implemented. @@ -308,13 +314,13 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { const containingProperty = this.pMap.get(virtualProperty.accessViaProperty); if (containingProperty) { - const propertyType = this.state.project.modelsNamespace.resolveTypeDeclaration(virtualProperty.property.schema, virtualProperty.property.details.csharp.required, this.state); + const propertyType = this.state.project.modelsNamespace.NewResolveTypeDeclaration(virtualProperty.property.schema, virtualProperty.property.language.csharp?.required, this.state); // regular inlined property const vp = new Property(virtualProperty.name, propertyType, { - description: virtualProperty.property.details.csharp.description, + description: virtualProperty.property.language.csharp?.description, get: toExpression(`${this.accessor(virtualProperty)}`), - set: (virtualProperty.property.details.csharp.readOnly || virtualProperty.property.details.csharp.constantValue) ? undefined : toExpression(`${this.accessor(virtualProperty)} = value`) + set: (virtualProperty.property.language.csharp?.readOnly || virtualProperty.property.language.csharp?.constantValue) ? undefined : toExpression(`${this.accessor(virtualProperty)} = value`) }); if (!virtualProperty.private) { @@ -322,7 +328,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { } if (virtualProperty.private || vp.getAccess !== Access.Public || vp.setAccess !== Access.Public || vp.set === undefined) { - this.add(new Property(`${virtualProperty.originalContainingSchema.details.csharp.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, propertyType, { + this.add(new Property(`${virtualProperty.originalContainingSchema.language.csharp?.internalInterfaceImplementation.fullName}.${virtualProperty.name}`, propertyType, { description: `Internal Acessors for ${virtualProperty.name}`, getAccess: Access.Explicit, setAccess: Access.Explicit, @@ -331,7 +337,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { })); } - if (virtualProperty.property.details.csharp.constantValue !== undefined) { + if (virtualProperty.property.language.csharp?.constantValue !== undefined) { vp.setAccess = Access.Internal; vp.set = undefined; } @@ -363,18 +369,21 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { } } - private additionalPropertiesType(aSchema: Schema): TypeDeclaration | undefined { - if (aSchema.additionalProperties) { - - if (aSchema.additionalProperties === true) { + private additionalPropertiesType(aSchema: NewSchema): TypeDeclaration | undefined { + const schema = aSchema.type === SchemaType.Dictionary ? aSchema : + aSchema.type === SchemaType.Object ? (aSchema).parents?.immediate?.find((s) => s.type === SchemaType.Dictionary) : + undefined; + if (schema) { + const dictSchema = schema as DictionarySchema; + if (dictSchema.elementType.type === SchemaType.Any) { return System.Object; } else { // we're going to implement IDictionary - return this.state.project.modelsNamespace.resolveTypeDeclaration(aSchema.additionalProperties, true, this.state); + return this.state.project.modelsNamespace.NewResolveTypeDeclaration(dictSchema.elementType, true, this.state); } } else - for (const each of values(aSchema.allOf)) { + for (const each of values((aSchema).parents?.immediate)) { const r = this.additionalPropertiesType(each); if (r) { return r; @@ -387,17 +396,20 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { let hasAdditionalPropertiesInParent = false; // handle s // add an 'implements' for the interface for the allOf. - for (const { key: eachSchemaIndex, value: eachSchemaValue } of items(this.schema.allOf)) { + for (const { key: eachSchemaIndex, value: eachSchemaValue } of items(this.schema.parents?.immediate)) { + if (eachSchemaValue.type === SchemaType.Dictionary) { + continue; + } const aSchema = eachSchemaValue; const aState = this.state.path('allOf', eachSchemaIndex); - const td = this.state.project.modelsNamespace.resolveTypeDeclaration(aSchema, true, aState); - const parentClass = (aSchema.details.csharp.classImplementation); + const td = this.state.project.modelsNamespace.NewResolveTypeDeclaration(aSchema, true, aState); + const parentClass = (aSchema.language.csharp?.classImplementation); const className = parentClass.fullName; const fieldName = camelCase(deconstruct(className.replace(/^.*\./, ''))); // add the interface as a parent to our interface. - const iface = aSchema.details.csharp.interfaceImplementation; + const iface = aSchema.language.csharp?.interfaceImplementation; // add a field for the inherited values const backingField = this.addField(new Field(`__${fieldName}`, td, { initialValue: `new ${className}()`, access: Access.Private, description: `Backing field for Inherited model ` })); @@ -409,7 +421,7 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { this.validationStatements.add(td.validatePresence(this.validationEventListener, backingField)); this.validationStatements.add(td.validateValue(this.validationEventListener, backingField)); - this.internalModelInterface.interfaces.push(aSchema.details.csharp.internalInterfaceImplementation); + this.internalModelInterface.interfaces.push(aSchema.language.csharp?.internalInterfaceImplementation); this.modelInterface.interfaces.push(iface); // @@ -430,30 +442,30 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { // we'll add a deserializer factory method a bit later.. } - if (this.schema.details.csharp.discriminatorValue) { + if (this.schema.discriminatorValue) { // we have a discriminator value, and we should tell our parent who we are so that they can build a proper deserializer method. // um. just how do we *really* know which allOf is polymorphic? // that's really sad. - for (const { key: eachAllOfIndex, value: eachAllOfValue } of items(this.schema.allOf)) { + for (const { key: eachAllOfIndex, value: eachAllOfValue } of items(this.schema.parents?.all)) { const parentSchema = eachAllOfValue; const aState = this.state.path('allOf', eachAllOfIndex); // ensure the parent schema has it's class created first. - this.state.project.modelsNamespace.resolveTypeDeclaration(parentSchema, true, aState); + this.state.project.modelsNamespace.NewResolveTypeDeclaration(parentSchema, true, aState); - const parentClass = parentSchema.details.csharp.classImplementation; + const parentClass = parentSchema.language.csharp?.classImplementation; if (parentClass.isPolymorphic) { // remember this class for later. this.parentModelClasses.push(parentClass); // tell that parent who we are. - parentClass.addDiscriminator(this.schema.details.csharp.discriminatorValue, this); + parentClass.addDiscriminator(this.schema.discriminatorValue, this); } } } } private addHeaderDeserializer() { - const avp = getAllVirtualProperties(this.schema.details.csharp.virtualProperties); + const avp = newGetAllVirtualProperties(this.schema.language.csharp?.virtualProperties); const headers = new Parameter('headers', System.Net.Http.Headers.HttpResponseHeaders); const readHeaders = new Method(`${ClientRuntime.IHeaderSerializable}.ReadHeaders`, undefined, { access: Access.Explicit, @@ -462,11 +474,12 @@ export class ModelClass extends Class implements EnhancedTypeDeclaration { let used = false; - for (const headerProperty of values(avp).where(each => each.property.details.csharp[HeaderProperty] === HeaderPropertyType.HeaderAndBody || each.property.details.csharp[HeaderProperty] === HeaderPropertyType.Header)) { + for (const headerProperty of values(avp).where(each => each.property.language.csharp?.[HeaderProperty] === HeaderPropertyType.HeaderAndBody || each.property.language.csharp?.[HeaderProperty] === HeaderPropertyType.Header)) { used = true; - const t = `((${headerProperty.originalContainingSchema.details.csharp.fullInternalInterfaceName})this)`; + headerProperty.property.schema + const t = `((${headerProperty.originalContainingSchema.language.csharp?.fullInternalInterfaceName})this)`; const values = `__${camelCase([...deconstruct(headerProperty.property.serializedName), 'Header'])}`; - const td = this.state.project.modelsNamespace.resolveTypeDeclaration(headerProperty.property.schema, false, this.state.path('schema')); + const td = this.state.project.modelsNamespace.NewResolveTypeDeclaration(headerProperty.property.schema, false, this.state); readHeaders.add(If(`${valueOf(headers)}.TryGetValues("${headerProperty.property.serializedName}", out var ${values})`, `${t}.${headerProperty.name} = ${td.deserializeFromContainerMember(KnownMediaType.Header, headers, values, td.defaultOfType)};`)); } if (used) { diff --git a/powershell/llcsharp/model/namespace.ts b/powershell/llcsharp/model/namespace.ts index 2d23f84beb5..92647993625 100644 --- a/powershell/llcsharp/model/namespace.ts +++ b/powershell/llcsharp/model/namespace.ts @@ -3,6 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { Schema as NewSchema, Schemas as NewSchemas, Language } from '@azure-tools/codemodel'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { ImportDirective, Namespace } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; @@ -35,8 +36,8 @@ export class ModelsNamespace extends Namespace { private subNamespaces = new Dictionary(); resolver = new SchemaDefinitionResolver(); - - constructor(parent: Namespace, private schemas: Dictionary, private state: State, objectInitializer?: DeepPartial) { + newResolver = new SchemaDefinitionResolver(); + constructor(parent: Namespace, private schemas: NewSchemas, private state: State, objectInitializer?: DeepPartial) { super('Models', parent); this.subNamespaces[this.fullName] = this; @@ -47,59 +48,79 @@ export class ModelsNamespace extends Namespace { // special case... hook this up before we get anywhere. state.project.modelsNamespace = this; - for (const { key: index, value: schema } of items(schemas)) { - const state = this.state.path(index); + if (schemas.objects) { + for (const schema of schemas.objects) { + this.NewResolveTypeDeclaration(schema, true, state); + } + } + if (schemas.dictionaries) { + for (const schema of schemas.dictionaries) { + this.NewResolveTypeDeclaration(schema, true, state); + } + } - // verify that the model isn't in a bad state - if (validation.objectWithFormat(schema, state)) { - continue; + if (schemas.any) { + for (const schema of schemas.any) { + this.NewResolveTypeDeclaration(schema, true, state); } - this.resolveTypeDeclaration(schema, true, state); } + + if (schemas.strings) { + for (const schema of schemas.strings) { + this.NewResolveTypeDeclaration(schema, true, state); + } + } + if (schemas.sealedChoices) { + for (const schema of schemas.sealedChoices) { + this.NewResolveTypeDeclaration(schema, true, state); + } + } + //todo, need to add support for other types + } get outputFolder() { return 'Models'; } - public resolveTypeDeclaration(schema: Schema | undefined, required: boolean, state: State): EnhancedTypeDeclaration { + public NewResolveTypeDeclaration(schema: NewSchema | undefined, required: boolean, state: State): EnhancedTypeDeclaration { if (!schema) { throw new Error('SCHEMA MISSING?'); } - const td = this.resolver.resolveTypeDeclaration(schema, required, state); + const td = this.newResolver.resolveTypeDeclaration(schema, required, state); - if (!schema.details.csharp.skip) { + if (!schema.language.csharp?.skip) { if (td instanceof ObjectImplementation) { // it's a class object. // create it if necessary - const fullname = schema.details.csharp.namespace || this.fullName; + const fullname = schema.language.csharp?.namespace || this.fullName; const ns = this.subNamespaces[fullname] || this.add(new ApiVersionNamespace(fullname)); - const mc = schema.details.csharp.classImplementation || new ModelClass(ns, td, this.state, { description: schema.details.csharp.description }); + const mc = schema.language.csharp?.classImplementation || new ModelClass(ns, td, this.state, { description: schema.language.csharp?.description }); // this gets implicity created during class creation: - return schema.details.csharp.interfaceImplementation; + return schema.language.csharp?.interfaceImplementation; } - if (state.project.azure && /^api-?version$/i.exec(schema.details.csharp.name)) { + if (state.project.azure && /^api-?version$/i.exec(schema.language.csharp?.name || '')) { return td; } if (td instanceof EnumImplementation) { - if (schema.details.csharp.enum) { - const ec = state.project.supportNamespace.findClassByName(schema.details.csharp.enum.name); + if (schema.language.csharp?.enum) { + const ec = state.project.supportNamespace.findClassByName(schema.language.csharp.enum.name); if (length(ec) === 0) { new EnumClass(td, state); - // return schema.details.csharp.typeDeclaration = ec[0]; + // return schema.language.csharp.typeDeclaration = ec[0]; } } - - return schema.details.csharp.typeDeclaration = td; + schema.language.csharp = schema.language.csharp || new Language(); + return schema.language.csharp.typeDeclaration = td; } } return td; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/model/property.ts b/powershell/llcsharp/model/property.ts index d846cd77f11..94a82a74ee4 100644 --- a/powershell/llcsharp/model/property.ts +++ b/powershell/llcsharp/model/property.ts @@ -15,8 +15,8 @@ import { EnhancedTypeDeclaration } from '../schema/extended-type-declaration'; import { State } from '../generator'; -import { ModelClass } from './model-class'; import { DeepPartial } from '@azure-tools/codegen'; +import { Schema as NewSchema, SchemaType } from '@azure-tools/codemodel'; export class ModelProperty extends BackedProperty implements EnhancedVariable { /** emits an expression to deserialize a property from a member inside a container */ @@ -56,28 +56,29 @@ export class ModelProperty extends BackedProperty implements EnhancedVariable { private required: boolean; // DISABLED // public IsHeaderProperty: boolean; - public schema: Schema; + public schema: NewSchema; public serializedName: string; private typeDeclaration: EnhancedTypeDeclaration; - public details: any; + public language: any; - constructor(name: string, schema: Schema, isRequired: boolean, serializedName: string, description: string, state: State, objectInitializer?: DeepPartial) { - const decl = state.project.modelsNamespace.resolveTypeDeclaration(schema, isRequired, state.path('schema')); + constructor(name: string, schema: NewSchema, isRequired: boolean, serializedName: string, description: string, state: State, objectInitializer?: DeepPartial) { + const decl = state.project.modelsNamespace.NewResolveTypeDeclaration(schema, isRequired, state.path('schema')); super(name, decl); this.typeDeclaration = decl; this.serializedName = serializedName; this.schema = schema; - if (this.schema.readOnly) { - this.set = undefined; - } + // skip-for-time-being + // if (this.schema.readOnly) { + // this.set = undefined; + // } this.apply(objectInitializer); this.description = description; this.required = isRequired; - if (this.schema.type === JsonType.Object && isAnExpression(this.get) && schema.details.csharp.classImplementation) { + if ((this.schema.type === SchemaType.Object || this.schema.type === SchemaType.Dictionary || this.schema.type === SchemaType.Any) && isAnExpression(this.get) && schema.language.csharp?.classImplementation) { // for objects, the getter should auto-create a new object - this.get = toExpression(`(${this.get.value} = ${this.get.value} ?? new ${schema.details.csharp.fullname}())`); + this.get = toExpression(`(${this.get.value} = ${this.get.value} ?? new ${schema.language.csharp?.fullname}())`); } } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/operation/api-class.ts b/powershell/llcsharp/operation/api-class.ts index d52af8c87fb..1d66df80f69 100644 --- a/powershell/llcsharp/operation/api-class.ts +++ b/powershell/llcsharp/operation/api-class.ts @@ -7,7 +7,7 @@ import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { Class, Namespace } from '@azure-tools/codegen-csharp'; import { State } from '../generator'; -import { CallMethod, OperationMethod, ValidationMethod } from '../operation/method'; +import { OperationMethod, CallMethod, ValidationMethod } from '../operation/method'; import { ParameterLocation } from '@azure-tools/codemodel-v3'; import { DeepPartial } from '@azure-tools/codegen'; @@ -15,40 +15,41 @@ export class ApiClass extends Class { // protected sender: Property; constructor(namespace: Namespace, protected state: State, objectInitializer?: DeepPartial) { - super(namespace, state.model.details.csharp.name); + super(namespace, state.model.language.csharp?.name || ''); this.apply(objectInitializer); // add basics // this.sender = this.add(new Property("Sender", ClientRuntime.ISendAsync)); // add operations from code model - for (const { key: operationIndex, value: operation } of items(state.model.http.operations)) { - // an operation has parameters for parameters, body, callbacks, listener and sender - // we need to make sure that the parameters for a given operation are consistent between the operation method, the call method, and the validation method. - // can we generate the common parameters here and just give them to the methods? (ie, can we share the instances between the methods?) - // code-dom doesn't store references from the child to the parent, so as long as the definitions work without modification, it looks like we can. - - // we'll do that work in the OM and expose them as public properties. - const operationMethod = new OperationMethod(this, operation, false, state.path('components', 'operations', operationIndex)); - this.addMethod(operationMethod); - if ([...values(operation.parameters).select(each => each.in === ParameterLocation.Path)].length > 0) { - // method has parameters in the path, so it could support '...ViaIdentity' - const identityMethod = new OperationMethod(this, operation, true, state.path('components', 'operations', operationIndex)); - identityMethod.emitCall(false); - this.addMethod(identityMethod); + // todo + for (const operationGroup of state.model.operationGroups) { + for (const operation of operationGroup.operations) { + const operationMethod = new OperationMethod(this, operation, false, state); + this.addMethod(operationMethod); + // Compare with m3, m4 operation has one more parameter called '$host'. We should skip it + const parameters = operation.parameters?.filter((param) => param.language.default.serializedName !== '$host'); + if ([...values(parameters).select(each => each.protocol.http?.in === ParameterLocation.Path)].length > 0) { + // method has parameters in the path, so it could support '...ViaIdentity' + const identityMethod = new OperationMethod(this, operation, true, state); + identityMethod.emitCall(false); + this.addMethod(identityMethod); + + } + + // check if this exact method is been created before (because _call and _validate have less specific parameters than the api) + const cm = new CallMethod(this, operationMethod, state); + if (!this.hasMethodWithSameDeclaration(cm)) { + this.addMethod(cm); + } + + const vm = new ValidationMethod(this, operationMethod, state); + if (!this.hasMethodWithSameDeclaration(vm)) { + this.addMethod(vm); + } } - // check if this exact method is been created before (because _call and _validate have less specific parameters than the api) - const cm = new CallMethod(this, operationMethod, state.path('components', 'operations', operationIndex)); - if (!this.hasMethodWithSameDeclaration(cm)) { - this.addMethod(cm); - } - - const vm = new ValidationMethod(this, operationMethod, state.path('components', 'operations', operationIndex)); - if (!this.hasMethodWithSameDeclaration(vm)) { - this.addMethod(vm); - } } } } diff --git a/powershell/llcsharp/operation/method.ts b/powershell/llcsharp/operation/method.ts index f3356f8fb06..aa4e6521316 100644 --- a/powershell/llcsharp/operation/method.ts +++ b/powershell/llcsharp/operation/method.ts @@ -4,6 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { NewResponse, ParameterLocation } from '@azure-tools/codemodel-v3'; +import { Operation, SchemaResponse, Schema as NewSchema, Response } from '@azure-tools/codemodel'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { EOL, DeepPartial } from '@azure-tools/codegen'; import { Access, Modifier } from '@azure-tools/codegen-csharp'; @@ -24,13 +25,14 @@ import { Local, LocalVariable, Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { HttpOperation, Schema } from '../code-model'; import { State } from '../generator'; -import { CallbackParameter, OperationBodyParameter, OperationParameter } from '../operation/parameter'; +import { CallbackParameter, OperationParameter, OperationBodyParameter } from '../operation/parameter'; import { isMediaTypeJson, isMediaTypeXml, KnownMediaType, knownMediaType, normalizeMediaType, parseMediaType } from '@azure-tools/codemodel-v3'; import { ClassType, dotnet, System } from '@azure-tools/codegen-csharp'; import { Ternery } from '@azure-tools/codegen-csharp'; + function removeEncoding(pp: OperationParameter, paramName: string, kmt: KnownMediaType): string { const up = pp.typeDeclaration.serializeToNode(kmt, pp, paramName, ClientRuntime.SerializationMode.None).value; return pp.param.extensions && pp.param.extensions['x-ms-skip-url-encoding'] ? up.replace(/global::System.Uri.EscapeDataString|System.Uri.EscapeDataString/g, '') : up; @@ -83,14 +85,14 @@ export class OperationMethod extends Method { protected callName: string; - constructor(protected parent: Class, public operation: HttpOperation, public viaIdentity: boolean, protected state: State, objectInitializer?: DeepPartial) { - super(viaIdentity ? `${operation.details.csharp.name}ViaIdentity` : operation.details.csharp.name, System.Threading.Tasks.Task()); + constructor(public parent: Class, public operation: Operation, public viaIdentity: boolean, protected state: State, objectInitializer?: DeepPartial) { + super(viaIdentity ? `${operation.language.csharp?.name}ViaIdentity` : operation.language.csharp?.name || '', System.Threading.Tasks.Task()); this.apply(objectInitializer); this.async = Modifier.Async; this.returnsDescription = `A that will be complete when handling of the response is completed.`; const $this = this; - this.callName = `${operation.details.csharp.name}_Call`; + this.callName = `${operation.language.csharp?.name}_Call`; this.push(Using('NoSynchronizationContext', '')); // add parameters @@ -100,19 +102,23 @@ export class OperationMethod extends Method { if (this.viaIdentity) { this.addParameter(identity); } - - for (let index = 0; index < length(this.operation.parameters); index++) { + let baseUrl = ''; + for (let index = 0; index < length(this.operation.parameters) && this.operation.parameters; index++) { const value = this.operation.parameters[index]; + if (value.language.default.name === '$host') { + baseUrl = value.clientDefaultValue; + continue; + } const p = new OperationParameter(this, value, this.state.path('parameters', index)); - if (value.details.csharp.constantValue) { - const constTd = state.project.modelsNamespace.resolveTypeDeclaration(value.schema, true, state); - p.defaultInitializer = constTd.deserializeFromString(KnownMediaType.UriParameter, new StringExpression(`${value.details.csharp.constantValue}`), toExpression(constTd.defaultOfType)); + if (value.language.csharp?.constantValue) { + const constTd = state.project.modelsNamespace.NewResolveTypeDeclaration(value.schema, true, state); + p.defaultInitializer = constTd.deserializeFromString(KnownMediaType.UriParameter, new StringExpression(`${value.language.csharp.constantValue}`), toExpression(constTd.defaultOfType)); } // don't add path parameters when we're in identity mode - if (!this.viaIdentity || value.in !== ParameterLocation.Path) { + if (!this.viaIdentity || value.protocol.http?.in !== ParameterLocation.Path) { this.addParameter(p); } else { this.add(function* () { @@ -122,28 +128,30 @@ export class OperationMethod extends Method { this.methodParameters.push(p); } - this.description = this.operation.details.csharp.description; + this.description = this.operation.language.csharp?.description || ''; // add body paramter if there should be one. - if (this.operation.requestBody) { + if (this.operation.requests && this.operation.requests.length && this.operation.requests[0].parameters && this.operation.requests[0].parameters.length) { // this request does have a request body. - this.bodyParameter = new OperationBodyParameter(this, 'body', this.operation.requestBody.description || '', this.operation.requestBody.schema, this.operation.requestBody.required, this.state.path('requestBody'), { - mediaType: knownMediaType(this.operation.requestBody.contentType), - contentType: this.operation.requestBody.contentType - }); - this.addParameter(this.bodyParameter); + const param = this.operation.requests[0].parameters.find((p) => !p.origin || p.origin.indexOf('modelerfour:synthesized') < 0); + if (param) { + this.bodyParameter = new OperationBodyParameter(this, 'body', param.language.default.description, param.schema, param.required ?? false, this.state, { + // TODO: temp solution. We need a class like NewKnowMediaType + mediaType: knownMediaType(KnownMediaType.Json), + contentType: KnownMediaType.Json + }); + this.addParameter(this.bodyParameter); + } } - for (const responses of values(this.operation.responses)) { - for (const response of values(responses)) { - const responseType = response.schema ? state.project.modelsNamespace.resolveTypeDeclaration(response.schema, true, state) : null; - const headerType = response.headerSchema ? state.project.modelsNamespace.resolveTypeDeclaration(response.headerSchema, true, state) : null; + for (const response of [...values(this.operation.responses), ...values(this.operation.exceptions)]) { + const responseType = (response).schema ? state.project.modelsNamespace.NewResolveTypeDeclaration(((response).schema), true, state) : null; + const headerType = response.language.default.headerSchema ? state.project.modelsNamespace.NewResolveTypeDeclaration(response.language.default.headerSchema, true, state) : null; + const newCallbackParameter = new CallbackParameter(response.language.csharp?.name || '', responseType, headerType, this.state, { description: response.language.csharp?.description }); + this.addParameter(newCallbackParameter); + this.callbacks.push(newCallbackParameter); - const newCallbackParameter = new CallbackParameter(response.details.csharp.name, responseType, headerType, this.state, { description: response.details.csharp.description }); - this.addParameter(newCallbackParameter); - this.callbacks.push(newCallbackParameter); - } } // add eventhandler parameter @@ -152,42 +160,42 @@ export class OperationMethod extends Method { // add optional parameter for sender this.senderParameter = this.addParameter(new Parameter('sender', ClientRuntime.ISendAsync, { description: `an instance of an ${ClientRuntime.ISendAsync} pipeline to use to make the request.` })); - let rx = this.operation.path; + let rx = this.operation.requests ? this.operation.requests[0].protocol.http?.path : ''; + const path = rx; // For post API, Some URI may contain an action string .e.x '/start' at the end // of the URI, for such cases, we will drop the action string if identityCorrection // is set in the configuration - if (this.operation.method === 'post' && this.state.project.identityCorrection) { + if (this.operation.requests && this.operation.requests.length && this.operation.requests[0].protocol.http?.method === 'post' && this.state.project.identityCorrection) { const idx = rx.lastIndexOf('/'); rx = rx.substr(0, idx); } + let url = `${baseUrl}/${path.startsWith('/') ? path.substr(1) : path}`; - let url = `${this.operation.baseUrl}${this.operation.path.startsWith('/') ? this.operation.path.substr(1) : this.operation.path}`; + const serverParams = this.methodParameters.filter(each => each.param.protocol.http?.in === ParameterLocation.Uri); - const serverParams = this.methodParameters.filter(each => each.param.in === ParameterLocation.Uri); - - const headerParams = this.methodParameters.filter(each => each.param.in === ParameterLocation.Header); - const pathParams = this.methodParameters.filter(each => each.param.in === ParameterLocation.Path); - const queryParams = this.methodParameters.filter(each => each.param.in === ParameterLocation.Query); - const cookieParams = this.methodParameters.filter(each => each.param.in === ParameterLocation.Cookie); + const headerParams = this.methodParameters.filter(each => each.param.protocol.http?.in === ParameterLocation.Header); + const pathParams = this.methodParameters.filter(each => each.param.protocol.http?.in === ParameterLocation.Path); + const queryParams = this.methodParameters.filter(each => each.param.protocol.http?.in === ParameterLocation.Query); + const cookieParams = this.methodParameters.filter(each => each.param.protocol.http?.in === ParameterLocation.Cookie); // replace any server params in the uri for (const pp of serverParams) { - url = url.replace(`{${pp.param.name}}`, `" + url = url.replace(`{${pp.param.language.default.serializedName}}`, `" + ${pp.name} + "`); } for (const pp of pathParams) { - rx = rx.replace(`{${pp.param.name}}`, `(?<${pp.param.name}>[^/]+)`); + rx = rx.replace(`{${pp.param.language.default.serializedName}}`, `(?<${pp.param.language.default.serializedName}>[^/]+)`); if (this.viaIdentity) { - url = url.replace(`{${pp.param.name}}`, `" + url = url.replace(`{${pp.param.language.default.serializedName}}`, `" + ${pp.name} + "`); } else { - url = url.replace(`{${pp.param.name}}`, `" + url = url.replace(`{${pp.param.language.default.serializedName}}`, `" + ${removeEncoding(pp, '', KnownMediaType.UriParameter)} + "`); } @@ -209,11 +217,11 @@ export class OperationMethod extends Method { const match = Local('_match', `${System.Text.RegularExpressions.Regex.new(rx).value}.Match(${identity.value})`); yield match.declarationStatement; - yield If(`!${match}.Success`, `throw new global::System.Exception("Invalid identity for URI '${$this.operation.path}'");`); + yield If(`!${match}.Success`, `throw new global::System.Exception("Invalid identity for URI '${path}'");`); yield EOL; yield '// replace URI parameters with values from identity'; for (const pp of pathParams) { - yield `var ${pp.name} = ${match.value}.Groups["${pp.param.name}"].Value;`; + yield `var ${pp.name} = ${match.value}.Groups["${pp.param.language.default.serializedName}"].Value;`; } } @@ -222,9 +230,9 @@ export class OperationMethod extends Method { initializer: System.Uri.new(`${System.Text.RegularExpressions.Regex.declaration}.Replace( "${url}" ${queryParams.length > 0 ? '+ "?"' : ''}${queryParams.joinWith(pp => ` - + ${removeEncoding(pp, pp.param.name, KnownMediaType.QueryParameter)}`, ` + + ${removeEncoding(pp, pp.param.language.default.serializedName, KnownMediaType.QueryParameter)}`, ` + "&"` -)} + )} ,"\\\\?&*$|&*$|(\\\\?)&+|(&)&+","$1$2")`.replace(/\s*\+ ""/gm, '')) }); yield urlV.declarationStatement; @@ -235,18 +243,19 @@ export class OperationMethod extends Method { yield EOL; yield '// generate request object '; - yield `var request = ${System.Net.Http.HttpRequestMessage.new(`${ClientRuntime.fullName}.Method.${$this.operation.method.capitalize()}, ${urlV.value}`)};`; + const method = $this.operation.requests ? $this.operation.requests[0].protocol.http?.method : ''; + yield `var request = ${System.Net.Http.HttpRequestMessage.new(`${ClientRuntime.fullName}.Method.${method.capitalize()}, ${urlV.value}`)};`; yield eventListener.signal(ClientRuntime.Events.RequestCreated, urlV.value); yield EOL; if (length(headerParams) > 0) { yield '// add headers parameters'; for (const hp of headerParams) { - if (hp.param.name === 'Content-Length') { + if (hp.param.language.default.name === 'Content-Length') { // content length is set when the request body is set continue; } - yield hp.serializeToContainerMember(KnownMediaType.Header, new LocalVariable('request.Headers', dotnet.Var), hp.param.name, ClientRuntime.SerializationMode.None); + yield hp.serializeToContainerMember(KnownMediaType.Header, new LocalVariable('request.Headers', dotnet.Var), hp.param.language.default.serializedName, ClientRuntime.SerializationMode.None); } yield EOL; } @@ -283,12 +292,11 @@ export class OperationMethod extends Method { } } } - export class CallMethod extends Method { public returnNull = false; constructor(protected parent: Class, protected opMethod: OperationMethod, protected state: State, objectInitializer?: DeepPartial) { - super(`${opMethod.operation.details.csharp.name}_Call`, System.Threading.Tasks.Task()); - this.description = `Actual wire call for method.`; + super(`${opMethod.name}_Call`, System.Threading.Tasks.Task()); + this.description = `Actual wire call for method.`; this.returnsDescription = opMethod.returnsDescription; this.apply(objectInitializer); @@ -322,17 +330,18 @@ export class CallMethod extends Method { // add response handlers yield Switch(`${response}.StatusCode`, function* () { - for (const { key: responseCode, value: responses } of items(opMethod.operation.responses)) { - if (responseCode !== 'default') { + for (const responses of [...values(opMethod.operation.responses), ...values(opMethod.operation.exceptions)]) { + if (responses.protocol.http?.statusCodes[0] !== 'default') { + const responseCode = responses.protocol.http?.statusCodes[0]; // will use enum when it can, fall back to casting int when it can't - yield Case(System.Net.HttpStatusCode[responseCode] ? System.Net.HttpStatusCode[responseCode].value : `(${System.Net.HttpStatusCode.declaration})${responseCode}`, $this.responsesEmitter($this, opMethod, responses, eventListener)); + yield Case(System.Net.HttpStatusCode[responseCode] ? System.Net.HttpStatusCode[responseCode].value : `(${System.Net.HttpStatusCode.declaration})${responseCode}`, $this.responsesEmitter($this, opMethod, [responses], eventListener)); } else { - yield DefaultCase($this.responsesEmitter($this, opMethod, responses, eventListener)); + yield DefaultCase($this.responsesEmitter($this, opMethod, [responses], eventListener)); } } // missing default response? - if (!opMethod.operation.responses.default) { + if (!opMethod.operation.exceptions) { // if no default, we need one that handles the rest of the stuff. yield TerminalDefaultCase(function* () { yield `throw new ${ClientRuntime.fullName}.UndeclaredResponseException(_response);`; @@ -348,13 +357,13 @@ export class CallMethod extends Method { yield eventListener.signal(ClientRuntime.Events.ResponseCreated, response.value); const EOL = 'EOL'; // LRO processing (if appropriate) - if ($this.opMethod.operation.details.csharp.lro) { + if ($this.opMethod.operation.language.csharp?.lro) { yield '// this operation supports x-ms-long-running-operation'; const originalUri = Local('_originalUri', new LiteralExpression(`${reqParameter.use}.RequestUri.AbsoluteUri`)); yield originalUri; - yield `// declared final-state-via: ${$this.opMethod.operation.details.csharp.lro['final-state-via']}`; - const fsv = $this.opMethod.operation.details.csharp.lro['final-state-via']; + yield `// declared final-state-via: ${$this.opMethod.operation.language.csharp.lro['final-state-via']}`; + const fsv = $this.opMethod.operation.language.csharp.lro['final-state-via']; let finalUri: LocalVariable; switch (fsv) { @@ -368,11 +377,10 @@ export class CallMethod extends Method { finalUri = Local('_finalUri', response.invokeMethod('GetFirstHeader', new StringExpression('Location'))); yield finalUri; break; - case 'azure-asyncoperation': case 'azure-async-operation': - // depending on the type of request, do the appropriate behavior - switch ($this.opMethod.operation.method.toLowerCase()) { + //depending on the type of request, do the appropriate behavior + switch ($this.opMethod.operation.requests?.[0].protocol.http?.method.toLowerCase()) { case 'post': case 'delete': finalUri = Local('_finalUri', response.invokeMethod('GetFirstHeader', new StringExpression('Azure-AsyncOperation'))); @@ -388,19 +396,20 @@ export class CallMethod extends Method { default: // depending on the type of request, fall back to the appropriate behavior - switch ($this.opMethod.operation.method.toLowerCase()) { - case 'post': - case 'delete': - finalUri = Local('_finalUri', response.invokeMethod('GetFirstHeader', new StringExpression('Location'))); - yield finalUri; - break; - case 'patch': - case 'put': - // perform a final GET on the original URI. - finalUri = originalUri; - break; + if ($this.opMethod.operation.requests) { + switch ($this.opMethod.operation.requests[0].protocol.http?.method.toLowerCase()) { + case 'post': + case 'delete': + finalUri = Local('_finalUri', response.invokeMethod('GetFirstHeader', new StringExpression('Location'))); + yield finalUri; + break; + case 'patch': + case 'put': + // perform a final GET on the original URI. + finalUri = originalUri; + break; + } } - break; } @@ -425,12 +434,12 @@ export class CallMethod extends Method { yield EOL; yield '// while we wait, let\'s grab the headers and get ready to poll. '; - yield 'if (!System.String.IsNullOrEmpty(_response.GetFirstHeader(@"Azure-AsyncOperation"))) {' + yield 'if (!System.String.IsNullOrEmpty(_response.GetFirstHeader(@"Azure-AsyncOperation"))) {'; yield ' ' + asyncOperation.assign(response.invokeMethod('GetFirstHeader', new StringExpression('Azure-AsyncOperation'))); - yield '}' - yield 'if (!global::System.String.IsNullOrEmpty(_response.GetFirstHeader(@"Location"))) {' + yield '}'; + yield 'if (!global::System.String.IsNullOrEmpty(_response.GetFirstHeader(@"Location"))) {'; yield ' ' + location.assign(response.invokeMethod('GetFirstHeader', new StringExpression('Location'))); - yield '}' + yield '}'; const uriLocal = Local('_uri', Ternery( System.String.IsNullOrEmpty(asyncOperation), Ternery(System.String.IsNullOrEmpty(location), @@ -559,20 +568,20 @@ if( ${response.value}.StatusCode == ${System.Net.HttpStatusCode.OK}) yield 'break;'; } - private * responsesEmitter($this: CallMethod, opMethod: OperationMethod, responses: Array, eventListener: EventListener) { + private * responsesEmitter($this: CallMethod, opMethod: OperationMethod, responses: Array, eventListener: EventListener) { if (length(responses) > 1) { yield Switch('_contentType', function* () { for (const eachResponse of values(responses)) { - const mimetype = length(eachResponse.mimeTypes) > 0 ? eachResponse.mimeTypes[0] : ''; - const callbackParameter = values(opMethod.callbacks).first(each => each.name === eachResponse.details.csharp.name); + const mimetype = length(eachResponse.protocol.http?.mediaTypes) > 0 ? eachResponse.protocol.http?.mimeTypes[0] : ''; + const callbackParameter = values(opMethod.callbacks).first(each => each.name === eachResponse.language.csharp?.name); - let count = length(eachResponse.mimeTypes); - for (const mt of values(eachResponse.mimeTypes)) { + let count = length(eachResponse.protocol.http?.mediaTypes); + for (const mt of values(eachResponse.protocol.http?.mediaTypes)) { count--; - const mediaType = normalizeMediaType(mt); + const mediaType = normalizeMediaType(mt); if (mediaType) { if (count === 0) { - yield Case(new StringExpression(mediaType).toString(), $this.responseHandler(mimetype, eachResponse, callbackParameter)); + yield Case(new StringExpression(mediaType).toString(), $this.NewResponseHandler(mimetype, eachResponse, callbackParameter)); } else { yield TerminalCase(new StringExpression(mediaType).toString(), ''); } @@ -582,14 +591,13 @@ if( ${response.value}.StatusCode == ${System.Net.HttpStatusCode.OK}) }); } else { const response = responses[0]; - const callbackParameter = values(opMethod.callbacks).first(each => each.name === response.details.csharp.name); + const callbackParameter = values(opMethod.callbacks).first(each => each.name === response.language.csharp?.name); // all mimeTypes per for this response code. yield eventListener.signal(ClientRuntime.Events.BeforeResponseDispatch, '_response'); - yield $this.responseHandler(values(response.mimeTypes).first() || '', response, callbackParameter); + yield $this.NewResponseHandler(values(response.protocol.http?.mediaTypes).first() || '', response, callbackParameter); } } - private * responseHandlerForNormalPipeline(mimetype: string, eachResponse: NewResponse, callbackParameter: CallbackParameter) { const callbackParameters = new Array(); @@ -618,17 +626,47 @@ if( ${response.value}.StatusCode == ${System.Net.HttpStatusCode.OK}) yield `await ${eachResponse.details.csharp.name}(_response${callbackParameters.length === 0 ? '' : ','}${callbackParameters.joinWith(valueOf)});`; } + private * NewResponseHandlerForNormalPipeline(mimetype: string, eachResponse: Response, callbackParameter: CallbackParameter) { + const callbackParameters = new Array(); + + if (callbackParameter.responseType) { + // hande the body response + const r = callbackParameter.responseType.deserializeFromResponse(knownMediaType(mimetype), toExpression('_response'), toExpression('null')); + if (r) { + + callbackParameters.push(r); + } + + // if (parseMediaType(mimetype)) { + // this media type isn't directly supported by deserialization + // we can return a stream to the consumer instead + // } + } + + if (callbackParameter.headerType) { + // header model deserialization... + const r = callbackParameter.headerType.deserializeFromResponse(KnownMediaType.Header, toExpression('_response'), toExpression('null')); + if (r) { + callbackParameters.push(r); + } + } + // make the callback with the appropriate parameters + yield `await ${eachResponse.language.csharp?.name}(_response${callbackParameters.length === 0 ? '' : ','}${callbackParameters.joinWith(valueOf)});`; + } + private responseHandler(mimetype: string, eachResponse: NewResponse, callbackParameter: CallbackParameter) { return this.responseHandlerForNormalPipeline(mimetype, eachResponse, callbackParameter); } + private NewResponseHandler(mimetype: string, eachResponse: Response, callbackParameter: CallbackParameter) { + return this.NewResponseHandlerForNormalPipeline(mimetype, eachResponse, callbackParameter); + } } - export class ValidationMethod extends Method { constructor(protected parent: Class, protected opMethod: OperationMethod, protected state: State, objectInitializer?: DeepPartial) { - super(`${opMethod.operation.details.csharp.name}_Validate`, System.Threading.Tasks.Task()); + super(`${opMethod.name}_Validate`, System.Threading.Tasks.Task()); this.apply(objectInitializer); - this.description = `Validation method for method. Call this like the actual call, but you will get validation events back.`; + this.description = `Validation method for method. Call this like the actual call, but you will get validation events back.`; this.returnsDescription = opMethod.returnsDescription; this.access = Access.Internal; this.async = Modifier.Async; @@ -664,4 +702,4 @@ export class ValidationMethod extends Method { } }); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/operation/namespace.ts b/powershell/llcsharp/operation/namespace.ts index d84b6a2de70..22f6a2736a6 100644 --- a/powershell/llcsharp/operation/namespace.ts +++ b/powershell/llcsharp/operation/namespace.ts @@ -11,7 +11,7 @@ import { DeepPartial } from '@azure-tools/codegen'; export class ServiceNamespace extends Namespace { constructor(public state: State, objectInitializer?: DeepPartial) { - super(state.model.details.csharp.namespace || 'INVALID.NAMESPACE', state.project); + super(state.model.language.csharp?.namespace, state.project); this.apply(objectInitializer); this.add(new ImportDirective(`static ${ClientRuntime.Extensions}`)); } diff --git a/powershell/llcsharp/operation/parameter.ts b/powershell/llcsharp/operation/parameter.ts index 4495c64c9c7..e46c779bedb 100644 --- a/powershell/llcsharp/operation/parameter.ts +++ b/powershell/llcsharp/operation/parameter.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { Method } from '@azure-tools/codegen-csharp'; - +import { Parameter as NewHttpOperationParameter, Schema as NewSchema } from '@azure-tools/codemodel'; import { KnownMediaType } from '@azure-tools/codemodel-v3'; import { System } from '@azure-tools/codegen-csharp'; import { Expression, ExpressionOrLiteral } from '@azure-tools/codegen-csharp'; @@ -20,18 +20,19 @@ import { State } from '../generator'; import { DeepPartial } from '@azure-tools/codegen'; /** represents a method parameter for an http operation (header/cookie/query/path) */ + export class OperationParameter extends Parameter implements EnhancedVariable { public typeDeclaration: EnhancedTypeDeclaration; - public param: HttpOperationParameter; + public param: NewHttpOperationParameter; - constructor(parent: Method, param: HttpOperationParameter, state: State, objectInitializer?: DeepPartial) { - const typeDeclaration = state.project.modelsNamespace.resolveTypeDeclaration(param.schema, param.required, state.path('schema')); - super(param.details.csharp.name, typeDeclaration); + constructor(parent: Method, param: NewHttpOperationParameter, state: State, objectInitializer?: DeepPartial) { + const typeDeclaration = state.project.modelsNamespace.NewResolveTypeDeclaration(param.schema, !!param.required, state); + super(param.language.csharp?.name || '', typeDeclaration); this.param = param; this.typeDeclaration = typeDeclaration; this.apply(objectInitializer); - this.description = param.details.csharp.description || ''; + this.description = param.language.csharp?.description || ''; } /** emits an expression to deserialize a property from a member inside a container */ @@ -68,6 +69,7 @@ export class OperationParameter extends Parameter implements EnhancedVariable { } /** represents a method parameter for an http operation (body) */ + export class OperationBodyParameter extends Parameter implements EnhancedVariable { /** emits an expression to deserialize a property from a member inside a container */ deserializeFromContainerMember(mediaType: KnownMediaType, container: ExpressionOrLiteral, serializedName: string): Expression { @@ -107,15 +109,15 @@ export class OperationBodyParameter extends Parameter implements EnhancedVariabl public typeDeclaration: EnhancedTypeDeclaration; - constructor(parent: Method, name: string, description: string, schema: Schema, required: boolean, state: State, objectInitializer?: DeepPartial) { - const typeDeclaration = state.project.modelsNamespace.resolveTypeDeclaration(schema, required, state.path('schema')); + constructor(parent: Method, name: string, description: string, schema: NewSchema, required: boolean, state: State, objectInitializer?: DeepPartial) { + const typeDeclaration = state.project.modelsNamespace.NewResolveTypeDeclaration(schema, required, state.path('schema')); super(name, typeDeclaration); this.typeDeclaration = typeDeclaration; this.mediaType = KnownMediaType.Json; this.contentType = KnownMediaType.Json; this.apply(objectInitializer); - this.description = description || schema.details.csharp.description; + this.description = description || schema.language.csharp?.description || ''; } public get jsonSerializationStatement(): OneOrMoreStatements { @@ -128,6 +130,7 @@ export class OperationBodyParameter extends Parameter implements EnhancedVariabl } } + export class CallbackParameter extends Parameter { responseType: (EnhancedTypeDeclaration) | null; headerType: (EnhancedTypeDeclaration) | null; @@ -156,4 +159,4 @@ export class CallbackParameter extends Parameter { this.headerType = headerType; this.apply(objectInitializer); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/project.ts b/powershell/llcsharp/project.ts index bc684e0a59d..650a8f9ae22 100644 --- a/powershell/llcsharp/project.ts +++ b/powershell/llcsharp/project.ts @@ -47,7 +47,7 @@ export class Project extends codeDomProject { // add project namespace - this.projectNamespace = this.state.model.details.csharp.namespace; + this.projectNamespace = this.state.model.language.csharp?.namespace; this.overrides = { 'Carbon.Json.Converters': `${this.projectNamespace}.Runtime.Json`, 'Carbon.Internal.Extensions': `${this.projectNamespace}.Runtime.Json`, @@ -90,4 +90,4 @@ export class Project extends codeDomProject { public serviceNamespace!: ServiceNamespace; public modelsNamespace!: ModelsNamespace; public supportNamespace!: SupportNamespace; -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/Uuid.ts b/powershell/llcsharp/schema/Uuid.ts index 03b3d9a23cf..75034805561 100644 --- a/powershell/llcsharp/schema/Uuid.ts +++ b/powershell/llcsharp/schema/Uuid.ts @@ -6,10 +6,12 @@ import { nameof } from '@azure-tools/codegen'; import { Variable } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; +import { StringSchema } from '@azure-tools/codemodel'; import { String } from './string'; + export class Uuid extends String { - constructor(schema: Schema, isRequired: boolean) { + constructor(schema: StringSchema, isRequired: boolean) { super(schema, isRequired); } diff --git a/powershell/llcsharp/schema/array.ts b/powershell/llcsharp/schema/array.ts index 11dbdb7504d..9d1c35835d6 100644 --- a/powershell/llcsharp/schema/array.ts +++ b/powershell/llcsharp/schema/array.ts @@ -15,6 +15,9 @@ import { Ternery } from '@azure-tools/codegen-csharp'; import { LocalVariable, Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; +import { Schema as NewSchema } from '@azure-tools/codemodel'; + + import { popTempVar, pushTempVar } from '../schema/primitive'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; @@ -25,19 +28,20 @@ export class ArrayOf implements EnhancedTypeDeclaration { return toExpression('null /* arrayOf */'); } - constructor(public schema: Schema, public isRequired: boolean, public elementType: EnhancedTypeDeclaration, protected minItems: number | undefined, protected maxItems: number | undefined, protected unique: boolean | undefined) { + constructor(public schema: NewSchema, public isRequired: boolean, public elementType: EnhancedTypeDeclaration, protected minItems: number | undefined, protected maxItems: number | undefined, protected unique: boolean | undefined) { } + protected get isWrapped(): boolean { - return this.schema.xml && this.schema.xml.wrapped || false; + return this.schema.serialization?.xml && this.schema.serialization?.xml.wrapped || false; } protected get wrapperName(): string | undefined { - return this.schema.xml && this.isWrapped ? this.schema.xml.name : undefined; + return this.schema.serialization?.xml && this.isWrapped ? this.schema.serialization.xml.name : undefined; } protected get serializedName(): string | undefined { - return this.schema.xml ? this.schema.xml.name : undefined; + return this.schema.serialization?.xml ? this.schema.serialization.xml.name : undefined; } get elementTypeDeclaration(): string { return this.elementType.declaration; @@ -48,6 +52,7 @@ export class ArrayOf implements EnhancedTypeDeclaration { } get encode(): string { + this.schema.extensions = this.schema.extensions || {}; return this.schema.extensions['x-ms-skip-url-encoding'] ? '' : 'global::System.Uri.EscapeDataString'; } @@ -150,10 +155,9 @@ export class ArrayOf implements EnhancedTypeDeclaration { const serArray = `global::System.Linq.Enumerable.ToArray(System.Linq.Enumerable.Select(${value}, (${each}) => ${this.elementType.serializeToNode(mediaType, each, serializedName, mode)}))`; return toExpression(`null != ${value} ? new ${ClientRuntime.XNodeArray}(${serArray}) : null`); } - case KnownMediaType.Xml: { if (this.isWrapped) { - const name = this.elementType.schema.xml ? this.elementType.schema.xml.name || serializedName : serializedName; + const name = this.elementType.schema.serialization?.xml ? this.elementType.schema.serialization?.xml.name || serializedName : serializedName; return toExpression(`null != ${value} ? global::new System.Xml.Linq.XElement("${name}", global::System.Linq.Enumerable.ToArray(global::System.Linq.Enumerable.Select(${value}, (${each}) => ${this.elementType.serializeToNode(mediaType, each, name, mode)}))`); } else { throw new Error('Can\'t set an Xml Array to the document without wrapping it.'); @@ -185,10 +189,10 @@ export class ArrayOf implements EnhancedTypeDeclaration { } case KnownMediaType.Xml: { // if the reference doesn't define an XML schema then use its default name - const defaultName = this.elementType.schema.details.csharp.name; + const defaultName = this.elementType.schema.language.csharp?.name || ''; return System.Net.Http.StringContent.new(Ternery( IsNotNull(value), - `${this.serializeToNode(mediaType, value, this.schema.xml ? this.schema.xml.name || defaultName : defaultName, mode)}).ToString()`, + `${this.serializeToNode(mediaType, value, this.schema.serialization?.xml ? this.schema.serialization.xml?.name || defaultName : defaultName, mode)}).ToString()`, System.String.Empty ), System.Text.Encoding.UTF8); } @@ -226,8 +230,7 @@ export class ArrayOf implements EnhancedTypeDeclaration { } case KnownMediaType.Xml: if (this.isWrapped) { - - return `AddIf( ${System.Xml.Linq.XElement.new('"{this.serializedName || serializedName}"', `${this.serializeToNode(mediaType, value, this.elementType.schema.xml ? this.elementType.schema.xml.name || '!!!' : serializedName, mode)}):null`)}, ${container}.Add); `; + return `AddIf( ${System.Xml.Linq.XElement.new('"{this.serializedName || serializedName}"', `${this.serializeToNode(mediaType, value, this.elementType.schema.serialization?.xml ? this.elementType.schema.serialization?.xml.name || '!!!' : serializedName, mode)}):null`)}, ${container}.Add); `; } else { return If(`null != ${value}`, ForEach(each, toExpression(value), `AddIf(${this.elementType.serializeToNode(mediaType, each, serializedName, mode)}, ${container}.Add);`)); } @@ -260,3 +263,4 @@ export class ArrayOf implements EnhancedTypeDeclaration { `.trim(); } } + diff --git a/powershell/llcsharp/schema/binary.ts b/powershell/llcsharp/schema/binary.ts index a0cf767083d..49091f193c8 100644 --- a/powershell/llcsharp/schema/binary.ts +++ b/powershell/llcsharp/schema/binary.ts @@ -11,10 +11,12 @@ import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; +import { BinarySchema } from '@azure-tools/codemodel'; + export class Binary implements EnhancedTypeDeclaration { public isXmlAttribute = false; - constructor(public schema: Schema, public isRequired: boolean) { + constructor(public schema: BinarySchema, public isRequired: boolean) { } get convertObjectMethod() { diff --git a/powershell/llcsharp/schema/boolean.ts b/powershell/llcsharp/schema/boolean.ts index 8890200beaf..342b75a0ea3 100644 --- a/powershell/llcsharp/schema/boolean.ts +++ b/powershell/llcsharp/schema/boolean.ts @@ -6,13 +6,14 @@ import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; -import { Primitive } from './primitive'; +import { Schema as NewSchema, BooleanSchema } from '@azure-tools/codemodel'; +import { NewPrimitive } from './primitive'; -export class Boolean extends Primitive { +export class Boolean extends NewPrimitive { isXmlAttribute = false; jsonType = ClientRuntime.JsonBoolean; - constructor(schema: Schema, public isRequired: boolean) { + constructor(schema: BooleanSchema, public isRequired: boolean) { super(schema); } diff --git a/powershell/llcsharp/schema/byte-array.ts b/powershell/llcsharp/schema/byte-array.ts index 8025bd4c016..3a27284dbc4 100644 --- a/powershell/llcsharp/schema/byte-array.ts +++ b/powershell/llcsharp/schema/byte-array.ts @@ -10,6 +10,7 @@ import { If } from '@azure-tools/codegen-csharp'; import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; +import { Schema as NewSchema, ByteArraySchema } from '@azure-tools/codemodel'; import { popTempVar, pushTempVar } from './primitive'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; import { ClientRuntime } from '../clientruntime'; @@ -103,7 +104,7 @@ export class ByteArray implements EnhancedTypeDeclaration { return `/* serializeToContainerMember doesn't support '${mediaType}' ${__filename}*/`; } - constructor(public schema: Schema, public isRequired: boolean) { + constructor(public schema: ByteArraySchema, public isRequired: boolean) { } validateValue(eventListener: Variable, property: Variable): string { @@ -116,4 +117,4 @@ export class ByteArray implements EnhancedTypeDeclaration { } return ''; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/char.ts b/powershell/llcsharp/schema/char.ts index 9444296135b..d894a7487cd 100644 --- a/powershell/llcsharp/schema/char.ts +++ b/powershell/llcsharp/schema/char.ts @@ -6,17 +6,19 @@ import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; -import { Primitive } from './primitive'; +import { Schema as NewSchema, SchemaType, ChoiceSchema, ChoiceValue } from '@azure-tools/codemodel'; +import { NewPrimitive } from './primitive'; import { length } from '@azure-tools/linq'; -export class Char extends Primitive { + +export class Char extends NewPrimitive { public isXmlAttribute = false; - private choices?: Array; + private choices?: Array; jsonType = ClientRuntime.JsonString; - constructor(schema: Schema, public isRequired: boolean) { + constructor(schema: NewSchema, public isRequired: boolean) { super(schema); - this.choices = length(schema.enum) > 0 ? schema.enum : undefined; + this.choices = schema.type === SchemaType.Choice ? (schema).choices : undefined; } get declaration(): string { @@ -39,4 +41,4 @@ ${this.validateEnum(property)} } return '// todo validate enum choices'; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/date-time.ts b/powershell/llcsharp/schema/date-time.ts index cfca85c32ca..e271fb11ff7 100644 --- a/powershell/llcsharp/schema/date-time.ts +++ b/powershell/llcsharp/schema/date-time.ts @@ -11,16 +11,18 @@ import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; -import { Primitive } from './primitive'; +import { Schema as NewSchema, DateTimeSchema, UnixTimeSchema, DateSchema } from '@azure-tools/codemodel'; +import { NewPrimitive } from './primitive'; -export class DateTime extends Primitive { + +export class DateTime extends NewPrimitive { public isXmlAttribute = false; public jsonType = ClientRuntime.JsonString; // public DateFormat = new StringExpression('yyyy-MM-dd'); public DateTimeFormat = new StringExpression('yyyy\'-\'MM\'-\'dd\'T\'HH\':\'mm\':\'ss.fffffffK'); get encode(): string { - return this.schema.extensions['x-ms-skip-url-encoding'] ? '' : 'global::System.Uri.EscapeDataString'; + return (this.schema.extensions && this.schema.extensions['x-ms-skip-url-encoding']) ? '' : 'global::System.Uri.EscapeDataString'; } get declaration(): string { @@ -90,7 +92,7 @@ export class DateTime extends Primitive { } return (`/* serializeToContainerMember doesn't support '${mediaType}' ${__filename}*/`); } - constructor(schema: Schema, public isRequired: boolean) { + constructor(schema: DateTimeSchema | DateSchema, public isRequired: boolean) { super(schema); } // public static string DateFormat = "yyyy-MM-dd"; @@ -105,21 +107,23 @@ export class DateTime extends Primitive { } } + + export class DateTime1123 extends DateTime { public DateTimeFormat = new StringExpression('R'); - constructor(schema: Schema, isRequired: boolean) { + constructor(schema: DateTimeSchema, isRequired: boolean) { super(schema, isRequired); } } -export class UnixTime extends Primitive { +export class UnixTime extends NewPrimitive { public isXmlAttribute = false; public jsonType = ClientRuntime.JsonNumber; private EpochDate = System.DateTime.new('1970', '1', '1', '0', '0', '0', System.DateTimeKind.Utc); get encode(): string { - return this.schema.extensions['x-ms-skip-url-encoding'] ? '' : 'global::System.Uri.EscapeDataString'; + return (this.schema.extensions && this.schema.extensions['x-ms-skip-url-encoding']) ? '' : 'global::System.Uri.EscapeDataString'; } @@ -149,7 +153,7 @@ export class UnixTime extends Primitive { return toExpression(`(null == ${value} ? ${System.String.Empty} : "${serializedName}=" + ${this.encode}(${value}.ToString()))`); } - // return toExpression(`if (${value} != null) { queryParameters.Add($"${value}={${value}}"); }`); + // return toExpression(`if (${value} != null) { queryParameters.Add($"${value}={${value}}"); }`); case KnownMediaType.Cookie: case KnownMediaType.Header: @@ -177,7 +181,7 @@ export class UnixTime extends Primitive { } - constructor(schema: Schema, public isRequired: boolean) { + constructor(schema: UnixTimeSchema, public isRequired: boolean) { super(schema); } @@ -188,4 +192,4 @@ export class UnixTime extends Primitive { get declaration(): string { return `global::System.DateTime${this.isRequired ? '' : '?'}`; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/date.ts b/powershell/llcsharp/schema/date.ts index dbfeb5f6bf1..c4a48751abe 100644 --- a/powershell/llcsharp/schema/date.ts +++ b/powershell/llcsharp/schema/date.ts @@ -5,11 +5,13 @@ import { StringExpression } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; +import { DateSchema } from '@azure-tools/codemodel'; import { DateTime } from './date-time'; + export class Date extends DateTime { public DateTimeFormat = new StringExpression('yyyy-MM-dd'); - constructor(schema: Schema, isRequired: boolean) { + constructor(schema: DateSchema, isRequired: boolean) { super(schema, isRequired); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/duration.ts b/powershell/llcsharp/schema/duration.ts index db32fa8e191..711398f9a55 100644 --- a/powershell/llcsharp/schema/duration.ts +++ b/powershell/llcsharp/schema/duration.ts @@ -8,19 +8,21 @@ import { Expression, ExpressionOrLiteral, toExpression, System, valueOf } from ' import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; -import { Primitive } from './primitive'; +import { Schema as NewSchema, DurationSchema } from '@azure-tools/codemodel'; +import { NewPrimitive } from './primitive'; import { ClientRuntime } from '../clientruntime'; -export class Duration extends Primitive { + +export class Duration extends NewPrimitive { public isXmlAttribute = false; public jsonType = ClientRuntime.JsonString; - constructor(public schema: Schema, public isRequired: boolean) { + constructor(public schema: DurationSchema, public isRequired: boolean) { super(schema); } get encode(): string { - return this.schema.extensions['x-ms-skip-url-encoding'] ? '' : 'global::System.Uri.EscapeDataString'; + return (this.schema.extensions && this.schema.extensions['x-ms-skip-url-encoding']) ? '' : 'global::System.Uri.EscapeDataString'; } get declaration(): string { @@ -68,4 +70,4 @@ export class Duration extends Primitive { public validatePresence(eventListener: Variable, property: Variable): string { return ''; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/enum.ts b/powershell/llcsharp/schema/enum.ts index 258b96b42b7..798329dbbe0 100644 --- a/powershell/llcsharp/schema/enum.ts +++ b/powershell/llcsharp/schema/enum.ts @@ -4,6 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { Schema } from '../code-model'; +import { Schema as NewSchema } from '@azure-tools/codemodel'; import { String } from './string'; import { dotnet, toExpression } from '@azure-tools/codegen-csharp'; @@ -13,18 +14,19 @@ export class EnumImplementation extends String { return !this.isRequired; } - constructor(schema: Schema, isRequired: boolean) { + constructor(schema: NewSchema, isRequired: boolean) { + super(schema, isRequired); } get defaultOfType() { - return this.isRequired ? toExpression(`((${this.schema.details.csharp.namespace}.${this.schema.details.csharp.name}${this.isRequired ? '' : '?'})"")`) : dotnet.Null; + return this.isRequired ? toExpression(`((${this.schema.language.csharp?.namespace}.${this.schema.language.csharp?.name}${this.isRequired ? '' : '?'})"")`) : dotnet.Null; } get convertObjectMethod() { - return `${this.schema.details.csharp.namespace}.${this.schema.details.csharp.name}.CreateFrom`; + return `${this.schema.language.csharp?.namespace}.${this.schema.language.csharp?.name}.CreateFrom`; } - get declaration(): string { return `${this.schema.details.csharp.namespace}.${this.schema.details.csharp.name}${this.isRequired ? '' : '?'}`; } + get declaration(): string { return `${this.schema.language.csharp?.namespace}.${this.schema.language.csharp?.name}${this.isRequired ? '' : '?'}`; } } diff --git a/powershell/llcsharp/schema/extended-type-declaration.ts b/powershell/llcsharp/schema/extended-type-declaration.ts index b5a29aa1329..b08fd1ab2c7 100644 --- a/powershell/llcsharp/schema/extended-type-declaration.ts +++ b/powershell/llcsharp/schema/extended-type-declaration.ts @@ -9,8 +9,10 @@ import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { TypeDeclaration } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { Schema } from '../code-model'; +import { Schema as NewSchema } from '@azure-tools/codemodel'; /** A TypeDeclaration that can assist in generating code for a variety of serialization, validation and other common use cases */ + export interface EnhancedTypeDeclaration extends TypeDeclaration { /** emits an expression to deserialize a property from a member inside a container */ deserializeFromContainerMember(mediaType: KnownMediaType, container: ExpressionOrLiteral, serializedName: string, defaultValue: Expression): Expression; @@ -48,7 +50,7 @@ export interface EnhancedTypeDeclaration extends TypeDeclaration { isXmlAttribute: boolean; /** the underlying schema for this type declarartion. */ - schema: Schema; + schema: NewSchema; isNullable: boolean; @@ -57,4 +59,4 @@ export interface EnhancedTypeDeclaration extends TypeDeclaration { /** emits the code required to validate that this has a permissable value */ validateValue(eventListener: Variable, property: Variable): OneOrMoreStatements; -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/integer.ts b/powershell/llcsharp/schema/integer.ts index d293093ab3a..0d39024b097 100644 --- a/powershell/llcsharp/schema/integer.ts +++ b/powershell/llcsharp/schema/integer.ts @@ -7,13 +7,15 @@ import { nameof } from '@azure-tools/codegen'; import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; -import { Primitive } from './primitive'; +import { NumberSchema } from '@azure-tools/codemodel'; +import { NewPrimitive } from './primitive'; -export class Numeric extends Primitive { + +export class Numeric extends NewPrimitive { public isXmlAttribute = false; public jsonType = ClientRuntime.JsonNumber; - constructor(schema: Schema, public isRequired: boolean, protected numericType: string) { + constructor(schema: NumberSchema, public isRequired: boolean, protected numericType: string) { super(schema); } get declaration(): string { @@ -29,19 +31,19 @@ ${this.validateMultipleOf(eventListener, property)} `.trim(); } protected validateMinimum(eventListener: Variable, property: Variable): string { - return this.schema.minimum && !this.schema.exclusiveMinimum ? `await ${eventListener}.AssertIsGreaterThanOrEqual(${nameof(property.value)},${property},${this.schema.minimum});` : ''; + return (this.schema).minimum && !(this.schema).exclusiveMinimum ? `await ${eventListener}.AssertIsGreaterThanOrEqual(${nameof(property.value)},${property},${(this.schema).minimum});` : ''; } protected validateMaximum(eventListener: Variable, property: Variable): string { - return this.schema.maximum && !this.schema.exclusiveMaximum ? `await ${eventListener}.AssertIsLessThanOrEqual(${nameof(property.value)},${property},${this.schema.maximum});` : ''; + return (this.schema).maximum && !(this.schema).exclusiveMaximum ? `await ${eventListener}.AssertIsLessThanOrEqual(${nameof(property.value)},${property},${(this.schema).maximum});` : ''; } protected validateExclusiveMinimum(eventListener: Variable, property: Variable): string { - return this.schema.minimum && this.schema.exclusiveMinimum ? `await ${eventListener}.AssertIsGreaterThan(${nameof(property.value)},${property},${this.schema.minimum});` : ''; + return (this.schema).minimum && (this.schema).exclusiveMinimum ? `await ${eventListener}.AssertIsGreaterThan(${nameof(property.value)},${property},${(this.schema).minimum});` : ''; } protected validateExclusiveMaximum(eventListener: Variable, property: Variable): string { - return this.schema.maximum && this.schema.exclusiveMaximum ? `await ${eventListener}.AssertIsLessThan(${nameof(property.value)},${property},${this.schema.maximum});` : ''; + return (this.schema).maximum && (this.schema).exclusiveMaximum ? `await ${eventListener}.AssertIsLessThan(${nameof(property.value)},${property},${(this.schema).maximum});` : ''; } protected validateMultipleOf(eventListener: Variable, property: Variable): string { - return this.schema.multipleOf ? `await ${eventListener}.AssertIsMultipleOf(${nameof(property.value)},${property},${this.schema.multipleOf});` : ''; + return (this.schema).multipleOf ? `await ${eventListener}.AssertIsMultipleOf(${nameof(property.value)},${property},${(this.schema).multipleOf});` : ''; } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/object.ts b/powershell/llcsharp/schema/object.ts index 376203c3197..cf6a138e813 100644 --- a/powershell/llcsharp/schema/object.ts +++ b/powershell/llcsharp/schema/object.ts @@ -14,9 +14,11 @@ import { Ternery } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; +import { Schema as NewSchema, ObjectSchema } from '@azure-tools/codemodel'; import { popTempVar, pushTempVar } from './primitive'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; + export class ObjectImplementation implements EnhancedTypeDeclaration { public isXmlAttribute = false; @@ -29,7 +31,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { } get convertObjectMethod() { - return `${this.schema.details.csharp.fullname}TypeConverter.ConvertFrom`; + return `${this.schema.language.csharp?.fullname}TypeConverter.ConvertFrom`; } deserializeFromContainerMember(mediaType: KnownMediaType, container: ExpressionOrLiteral, serializedName: string, defaultValue: Expression): Expression { @@ -44,7 +46,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { // XElement/XElement or XElement/XAttribute const tmp = `__${camelCase(['xml', ...deconstruct(serializedName)])}`; // prefer specified XML name if available - return toExpression(`If( ${valueOf(container)}?.Element("${this.schema.xml ? this.schema.xml.name || serializedName : serializedName}"), out var ${tmp}) ? ${this.classDeclaration}.FromXml(${tmp}) : ${defaultValue}`); + return toExpression(`If( ${valueOf(container)}?.Element("${this.schema.serialization?.xml ? this.schema.serialization.xml.name || serializedName : serializedName}"), out var ${tmp}) ? ${this.classDeclaration}.FromXml(${tmp}) : ${defaultValue}`); } } return toExpression(`${defaultValue} /* deserializeFromContainerMember doesn't support '${mediaType}' ${__filename} */`); @@ -101,20 +103,21 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { System.String.Empty), System.Text.Encoding.UTF8); } - case KnownMediaType.Multipart: { - let contents = ''; - for (const p of values(this.schema.properties)) { - // to do -- add in a potential support for the filename too. - contents = `${contents}${EOL} bodyContent.Add( ${System.Net.Http.StreamContent.new(`${value}.${p.details.csharp.name}`)},"${p.serializedName}");`; - } - // bodyContent.Add(new _ystem.Net.Http.StreamContent(body.AudioFile), "audioFile"); - return toExpression(`new ${System.Func(System.Net.Http.MultipartFormDataContent)}(() => -{ - var bodyContent = ${System.Net.Http.MultipartFormDataContent.new()}; - ${contents} - return bodyContent; -})()`); - } + //skip-for-time-being + // case KnownMediaType.Multipart: { + // let contents = ''; + // for (const p of values(this.schema.properties)) { + // // to do -- add in a potential support for the filename too. + // contents = `${contents}${EOL} bodyContent.Add( ${System.Net.Http.StreamContent.new(`${value}.${p.details.csharp.name}`)},"${p.serializedName}");`; + // } + // // bodyContent.Add(new _ystem.Net.Http.StreamContent(body.AudioFile), "audioFile"); + // return toExpression(`new ${System.Func(System.Net.Http.MultipartFormDataContent)}(() => + // { + // var bodyContent = ${System.Net.Http.MultipartFormDataContent.new()}; + // ${contents} + // return bodyContent; + // })()`); + // } } return toExpression(`null /* serializeToContent doesn't support '${mediaType}' ${__filename}*/`); } @@ -136,7 +139,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { deserializeFromResponse(mediaType: KnownMediaType, content: ExpressionOrLiteral, defaultValue: Expression): Expression | undefined { switch (mediaType) { case KnownMediaType.Json: { - if (this.schema.details.csharp.hasHeaders) { + if (this.schema.language.csharp?.hasHeaders) { return toExpression(`${content}.Content.ReadAsStringAsync().ContinueWith( body => ${this.deserializeFromString(mediaType, 'body.Result', defaultValue)}.ReadHeaders(_response.Headers))`); } return toExpression(`${content}.Content.ReadAsStringAsync().ContinueWith( body => ${this.deserializeFromString(mediaType, 'body.Result', defaultValue)})`); @@ -157,7 +160,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { case KnownMediaType.Xml: // prefer specified XML name if available - return `AddIf( null != ${value} ? ${value}.ToXml(new ${System.Xml.Linq.XElement}("${this.schema.xml ? this.schema.xml.name || serializedName : serializedName}")) : null, ${container}.Add );`; + return `AddIf( null != ${value} ? ${value}.ToXml(new ${System.Xml.Linq.XElement}("${this.schema.serialization?.xml ? this.schema.serialization.xml.name || serializedName : serializedName}")) : null, ${container}.Add );`; } return `/* serializeToContainerMember doesn't support '${mediaType}' ${__filename}*/`; @@ -165,7 +168,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { isRequired = false; - constructor(public schema: Schema) { + constructor(public schema: ObjectSchema) { } public validatePresence(eventListener: Variable, property: Variable): OneOrMoreStatements { @@ -175,7 +178,7 @@ export class ObjectImplementation implements EnhancedTypeDeclaration { return `await ${eventListener}.AssertObjectIsValid(${nameof(property.value)}, ${property}); `; } - get declaration(): string { return `${this.schema.details.csharp.namespace}.${this.schema.details.csharp.interfaceName}`; } - get classDeclaration(): string { return `${this.schema.details.csharp.namespace}.${this.schema.details.csharp.name}`; } + get declaration(): string { return `${this.schema.language.csharp?.namespace}.${this.schema.language.csharp?.interfaceName}`; } + get classDeclaration(): string { return `${this.schema.language.csharp?.namespace}.${this.schema.language.csharp?.name}`; } } diff --git a/powershell/llcsharp/schema/primitive.ts b/powershell/llcsharp/schema/primitive.ts index 393f0f6a72b..2264da29511 100644 --- a/powershell/llcsharp/schema/primitive.ts +++ b/powershell/llcsharp/schema/primitive.ts @@ -14,6 +14,7 @@ import { Ternery } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; +import { Schema as NewSchema, PrimitiveSchema } from '@azure-tools/codemodel'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; let tmpVar: number | undefined; @@ -48,7 +49,7 @@ export function popTempVar() { } } -export abstract class Primitive implements EnhancedTypeDeclaration { +export abstract class NewPrimitive implements EnhancedTypeDeclaration { abstract isRequired: boolean; abstract isXmlAttribute: boolean; abstract declaration: string; @@ -58,7 +59,7 @@ export abstract class Primitive implements EnhancedTypeDeclaration { } get encode(): string { - return this.schema.extensions['x-ms-skip-url-encoding'] ? '' : 'global::System.Uri.EscapeDataString'; + return (this.schema.extensions && this.schema.extensions['x-ms-skip-url-encoding']) ? '' : 'global::System.Uri.EscapeDataString'; } get defaultOfType() { @@ -73,7 +74,7 @@ export abstract class Primitive implements EnhancedTypeDeclaration { return result; } - constructor(public schema: Schema) { + constructor(public schema: PrimitiveSchema) { } /** validatePresence on primitives is generally not required; the nullability determines requiredness... */ public validatePresence(eventListener: Variable, property: Variable): string { @@ -186,7 +187,7 @@ export abstract class Primitive implements EnhancedTypeDeclaration { return toExpression(`(null == ${value} ? ${System.String.Empty} : "${serializedName}=" + ${this.encode}(${value}.ToString()))`); } - // return toExpression(`if (${value} != null) { queryParameters.Add($"${value}={${value}}"); }`); + // return toExpression(`if (${value} != null) { queryParameters.Add($"${value}={${value}}"); }`); case KnownMediaType.Cookie: case KnownMediaType.Header: @@ -229,4 +230,4 @@ export abstract class Primitive implements EnhancedTypeDeclaration { } return (`/* serializeToContainerMember doesn't support '${mediaType}' ${__filename}*/`); } -} +} \ No newline at end of file diff --git a/powershell/llcsharp/schema/schema-resolver.ts b/powershell/llcsharp/schema/schema-resolver.ts index 8ab8a58edb0..1e5c2a4c3fa 100644 --- a/powershell/llcsharp/schema/schema-resolver.ts +++ b/powershell/llcsharp/schema/schema-resolver.ts @@ -3,7 +3,9 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { ModelState, codemodel, IntegerFormat, NumberFormat, StringFormat, JsonType } from '@azure-tools/codemodel-v3'; +import { codeModelSchema, ArraySchema, UnixTimeSchema, CodeModel, Schema as NewSchema, StringSchema, BooleanSchema, NumberSchema, ByteArraySchema, DateTimeSchema, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, VirtualParameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext, ConstantSchema, ChoiceSchema, DurationSchema, BinarySchema, DateSchema } from '@azure-tools/codemodel'; + +import { codemodel, IntegerFormat, NumberFormat, StringFormat, JsonType } from '@azure-tools/codemodel-v3'; import { Schema } from '../code-model'; import * as message from '../messages'; import { ArrayOf } from './array'; @@ -20,120 +22,109 @@ import { ObjectImplementation } from './object'; import { String } from './string'; import { Uuid } from './Uuid'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; +import { PwshModel } from '../../utils/PwshModel'; +import { ModelState } from '../../utils/model-state'; +import { Channel, Host, Session, startSession } from '@azure-tools/autorest-extension-base'; +import { schemaHasEnum } from '../validations'; export class SchemaDefinitionResolver { private readonly cache = new Map(); - private add(schema: Schema, value: EnhancedTypeDeclaration): EnhancedTypeDeclaration { - this.cache.set(schema.details.csharp.fullname || '', value); + private add(schema: NewSchema, value: EnhancedTypeDeclaration): EnhancedTypeDeclaration { + this.cache.set(schema.language?.csharp?.fullname || '', value); return value; } - resolveTypeDeclaration(schema: Schema | undefined, required: boolean, state: ModelState): EnhancedTypeDeclaration { + resolveTypeDeclaration(schema: NewSchema | undefined, required: boolean, state: ModelState): EnhancedTypeDeclaration { if (!schema) { throw new Error('SCHEMA MISSING?'); } // determine if we need a new model class for the type or just a known type object switch (schema.type) { - case JsonType.Array: { + case SchemaType.Array: { // can be recursive! // handle boolean arrays as booleans (powershell will try to turn it into switches!) - const elementType = (schema.items && schema.items.type === JsonType.Boolean) ? new Boolean(schema, true) : this.resolveTypeDeclaration(schema.items, true, state.path('items')); - return new ArrayOf(schema, required, elementType, schema.minItems, schema.maxItems, schema.uniqueItems); + const ar = schema; + const elementType = (ar.elementType.type === SchemaType.Boolean) ? new Boolean(schema, true) : this.resolveTypeDeclaration(ar.elementType, true, state.path('items')); + return new ArrayOf(schema, required, elementType, ar.minItems, ar.maxItems, ar.uniqueItems); } - case JsonType.Object: { - const result = schema.details.csharp && this.cache.get(schema.details.csharp.fullname || ''); + case SchemaType.Any: + case SchemaType.Dictionary: + case SchemaType.Object: { + const result = schema.language.csharp && this.cache.get(schema.language.csharp.fullname || ''); if (result) { return result; } - return this.add(schema, new ObjectImplementation(schema)); + return this.add(schema, new ObjectImplementation(schema)); } - case JsonType.String: - switch (schema.format) { - case StringFormat.Base64Url: - case StringFormat.Byte: - // member should be byte array - // on wire format should be base64url - return new ByteArray(schema, required); - - case StringFormat.Binary: - // represent as a stream - // wire format is stream of bytes - return new Binary(schema, required); - - case StringFormat.Char: - // a single character - return new Char(schema, required); - - case StringFormat.Date: - return new Date(schema, required); - - case StringFormat.DateTime: - return new DateTime(schema, required); - - case StringFormat.DateTimeRfc1123: - return new DateTime1123(schema, required); - - case StringFormat.Duration: - return new Duration(schema, required); - - case StringFormat.Uuid: - return new Uuid(schema, required); + case SchemaType.Time: + case SchemaType.Credential: + case SchemaType.String: { + return new String(schema, required); - case StringFormat.Url: - case StringFormat.Password: - case StringFormat.None: - case undefined: - case null: - if (schema.extensions && schema.extensions['x-ms-enum']) { - return new EnumImplementation(schema, required); - } - /* - if(schema.extensions && schema.extensions['x-ms-header-collection-prefix']) { - return new Wildcard(schema, new String({}, required)); - } - */ - // just a regular old string. - return new String(schema, required); - - default: - state.warning(`Schema with type:'${schema.type} and 'format:'${schema.format}' is not recognized.`, message.DoesNotSupportEnum); - return new String(schema, required); + } + case SchemaType.Binary: + return new Binary(schema, required); + case SchemaType.Duration: + return new Duration(schema, required); + case SchemaType.Uuid: + return new Uuid(schema, required); + case SchemaType.DateTime: + if ((schema).format === StringFormat.DateTimeRfc1123) { + return new DateTime1123(schema, required); } - - case JsonType.Boolean: - return new Boolean(schema, required); - - case JsonType.Integer: - switch (schema.format) { - case IntegerFormat.Int64: - case IntegerFormat.None: - return new Numeric(schema, required, required ? 'long' : 'long?'); - case IntegerFormat.UnixTime: - return new UnixTime(schema, required); - case IntegerFormat.Int32: - return new Numeric(schema, required, required ? 'int' : 'int?'); + return new DateTime(schema, required); + case SchemaType.Date: + return new Date(schema, required); + case SchemaType.ByteArray: + return new ByteArray(schema, required); + case SchemaType.Boolean: + return new Boolean(schema, required); + + case SchemaType.Integer: + switch ((schema).precision) { + case 64: + return new Numeric(schema, required, required ? 'long' : 'long?'); + // skip-for-time-being + // case IntegerFormat.UnixTime: + // return new UnixTime(schema, required); + case 16: + case 32: + return new Numeric(schema, required, required ? 'int' : 'int?'); } // fallback to int if the format isn't recognized - return new Numeric(schema, required, required ? 'int' : 'int?'); - - case JsonType.Number: - switch (schema.format) { - case NumberFormat.None: - case NumberFormat.Double: - return new Numeric(schema, required, required ? 'double' : 'double?'); - case NumberFormat.Float: - return new Numeric(schema, required, required ? 'float' : 'float?'); - case NumberFormat.Decimal: - return new Numeric(schema, required, required ? 'decimal' : 'decimal?'); + return new Numeric(schema, required, required ? 'int' : 'int?'); + + case SchemaType.UnixTime: + return new UnixTime(schema, required); + + case SchemaType.Number: + switch ((schema).precision) { + case 64: + return new Numeric(schema, required, required ? 'double' : 'double?'); + case 32: + return new Numeric(schema, required, required ? 'float' : 'float?'); + case 128: + return new Numeric(schema, required, required ? 'decimal' : 'decimal?'); } // fallback to float if the format isn't recognized - return new Numeric(schema, required, required ? 'float' : 'float?'); + return new Numeric(schema, required, required ? 'float' : 'float?'); + + case SchemaType.Constant: + return this.resolveTypeDeclaration((schema).valueType, required, state); + case SchemaType.Choice: { + return this.resolveTypeDeclaration((schema).choiceType, required, state); + } + case SchemaType.SealedChoice: + if (schema.language.default.skip === true) { + return new String(schema, required); + } + return new EnumImplementation(schema, required); case undefined: if (schema.extensions && schema.extensions['x-ms-enum']) { - return new EnumImplementation(schema, required); + return new EnumImplementation(schema, required); } // "any" case @@ -141,7 +132,7 @@ export class SchemaDefinitionResolver { break; } - state.error(`Schema '${schema.details.csharp.name}' is declared with invalid type '${schema.type}'`, message.UnknownJsonType); + state.error(`Schema '${schema.language.csharp?.name}' is declared with invalid type '${schema.type}'`, message.UnknownJsonType); throw new Error('Unknown Model. Fatal.'); } } diff --git a/powershell/llcsharp/schema/string.ts b/powershell/llcsharp/schema/string.ts index 078bf26bbfb..19ae3320108 100644 --- a/powershell/llcsharp/schema/string.ts +++ b/powershell/llcsharp/schema/string.ts @@ -12,10 +12,13 @@ import { OneOrMoreStatements } from '@azure-tools/codegen-csharp'; import { Variable } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../clientruntime'; import { Schema } from '../code-model'; +import { ChoiceSchema, Schema as NewSchema, SchemaType, SealedChoiceSchema, StringSchema } from '@azure-tools/codemodel'; import { popTempVar, pushTempVar } from './primitive'; import { EnhancedTypeDeclaration } from './extended-type-declaration'; import { length } from '@azure-tools/linq'; + + /** A ETD for the c# string type. */ export class String implements EnhancedTypeDeclaration { public isXmlAttribute = false; @@ -161,7 +164,8 @@ export class String implements EnhancedTypeDeclaration { return (`/* serializeToContainerMember doesn't support '${mediaType}' ${__filename}*/`); } - constructor(public schema: Schema, public isRequired: boolean) { + constructor(public schema: NewSchema, public isRequired: boolean) { + } get declaration(): string { @@ -183,27 +187,31 @@ ${this.validateEnum(eventListener, property)} } private validateMinLength(eventListener: Variable, property: Variable): string { - if (!this.schema.minLength) { + const len = (this.schema).minLength; + if (!len) { return ''; } - return `await ${eventListener}.AssertMinimumLength(${nameof(property.value)},${property},${this.schema.minLength});`; + return `await ${eventListener}.AssertMinimumLength(${nameof(property.value)},${property},${len});`; } private validateMaxLength(eventListener: Variable, property: Variable): string { - if (!this.schema.maxLength) { + const len = (this.schema).maxLength; + if (!len) { return ''; } - return `await ${eventListener}.AssertMaximumLength(${nameof(property.value)},${property},${this.schema.maxLength});`; + return `await ${eventListener}.AssertMaximumLength(${nameof(property.value)},${property},${len});`; } private validateRegex(eventListener: Variable, property: Variable): string { - if (!this.schema.pattern) { + const pattern = (this.schema).pattern; + if (!pattern) { return ''; } - return `await ${eventListener}.AssertRegEx(${nameof(property.value)},${property},@"${this.schema.pattern}");`; + return `await ${eventListener}.AssertRegEx(${nameof(property.value)},${property},@"${pattern}");`; } private validateEnum(eventListener: Variable, property: Variable): string { - if (!this.schema.enum || length(this.schema.enum) === 0) { + if (this.schema.type !== SchemaType.SealedChoice && this.schema.type != SchemaType.Choice) { return ''; } - return `await ${eventListener}.AssertEnum(${nameof(property.value)},${property},${this.schema.enum.joinWith((v) => `@"${v}"`)});`; + const choiceValues = (this.schema).choices.map((c) => c.value); + return `await ${eventListener}.AssertEnum(${nameof(property.value)},${property},${choiceValues.joinWith((v) => `@"${v}"`)});`; } } diff --git a/powershell/main.ts b/powershell/main.ts index 155741913ce..9eebe7b6292 100644 --- a/powershell/main.ts +++ b/powershell/main.ts @@ -4,25 +4,35 @@ *--------------------------------------------------------------------------------------------*/ import { AutoRestExtension, } from '@azure-tools/autorest-extension-base'; -import { applyModifiers } from './plugins/modifiers'; -import { createCommands } from './plugins/create-commands'; -import { namer } from './plugins/ps-namer'; -import { powershell } from './plugins/powershell'; -import { addCompleter } from './plugins/add-azure-completers'; -import { csnamer } from './plugins/cs-namer'; -import { llcsharp } from './plugins/llcsharp'; +import { createInlinedPropertiesPlugin } from './plugins/plugin-create-inline-properties'; +import { tweakModelPlugin } from './plugins/plugin-tweak-model'; +import { tweakModelAzurePluginV2 } from './plugins/plugin-tweak-model-azure-v2'; +import { createCommandsV2 } from './plugins/create-commands-v2'; +import { csnamerV2 } from './plugins/cs-namer-v2'; +import { namerV2 } from './plugins/ps-namer-v2'; +import { llcsharpV2 } from './plugins/llcsharp-v2'; +import { powershellV2 } from './plugins/powershell-v2'; +import { addCompleterV2 } from './plugins/add-azure-completers-v2'; +import { applyModifiersV2 } from './plugins/modifiers-v2'; +import { tweakM4ModelPlugin } from './plugins/plugin-tweak-m4-model'; require('source-map-support').install(); export async function main() { const pluginHost = new AutoRestExtension(); - pluginHost.Add('powershell', powershell); - pluginHost.Add('create-commands', createCommands); - pluginHost.Add('psnamer', namer); - pluginHost.Add('modifiers', applyModifiers); - pluginHost.Add('add-azure-completers', addCompleter); - pluginHost.Add('csnamer', csnamer); - pluginHost.Add('llcsharp', llcsharp); + // Following are plugins moved from remodeler + pluginHost.Add('tweakm4codemodel', tweakM4ModelPlugin); + pluginHost.Add('tweakcodemodel-v2', tweakModelPlugin); + pluginHost.Add('tweakcodemodelazure-v2', tweakModelAzurePluginV2); + pluginHost.Add('create-virtual-properties-v2', createInlinedPropertiesPlugin); + pluginHost.Add('create-commands-v2', createCommandsV2); + pluginHost.Add('csnamer-v2', csnamerV2); + pluginHost.Add('psnamer-v2', namerV2); + pluginHost.Add('modifiers-v2', applyModifiersV2); + pluginHost.Add('add-azure-completers-v2', addCompleterV2); + pluginHost.Add('llcsharp-v2', llcsharpV2); + pluginHost.Add('powershell-v2', powershellV2); + await pluginHost.Run(); } diff --git a/powershell/models/model-extensions.ts b/powershell/models/model-extensions.ts index 24e90893091..8024ec3f3a3 100644 --- a/powershell/models/model-extensions.ts +++ b/powershell/models/model-extensions.ts @@ -2,9 +2,10 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { DictionarySchema, ObjectSchema, Schema as NewSchema, SchemaType } from '@azure-tools/codemodel'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { Catch, Try, Else, ElseIf, If, Interface, Attribute, Parameter, Modifier, dotnet, Class, LambdaMethod, LiteralExpression, Method, Namespace, System, Return, LocalVariable, Constructor, IsAssignableFrom, ImportDirective, Property, Access, InterfaceProperty } from '@azure-tools/codegen-csharp'; -import { Schema, ClientRuntime, SchemaDefinitionResolver, ObjectImplementation, DeserializerPartialClass } from '../llcsharp/exports'; +import { Schema, ClientRuntime, ObjectImplementation, SchemaDefinitionResolver, DeserializerPartialClass } from '../llcsharp/exports'; import { State } from '../internal/state'; import { PSObject, PSTypeConverter, TypeConverterAttribute } from '../internal/powershell-declarations'; import { join } from 'path'; @@ -35,220 +36,222 @@ export class ModelExtensionsNamespace extends Namespace { } resolver = new SchemaDefinitionResolver(); - constructor(parent: Namespace, private schemas: Dictionary, private state: State, objectInitializer?: DeepPartial) { + constructor(parent: Namespace, private schemas: Dictionary>, private state: State, objectInitializer?: DeepPartial) { super('Models', parent); this.apply(objectInitializer); this.add(new ImportDirective(`${ClientRuntime.name}.PowerShell`)); this.subNamespaces[this.fullName] = this; const $this = this; - const resolver = (s: Schema, req: boolean) => this.resolver.resolveTypeDeclaration(s, req, state); + const resolver = (s: NewSchema, req: boolean) => this.resolver.resolveTypeDeclaration(s, req, state); // Add typeconverters to model classes (partial) - for (const schema of values(schemas)) { - if (!schema || schema.details.csharp.skip) { - continue; - } + for (const schemaGroup of values(schemas)) { + for (const schema of values(schemaGroup)) { + if (!schema || (schema.language.csharp && schema.language.csharp.skip)) { + continue; + } - const td = this.resolver.resolveTypeDeclaration(schema, true, state); - if (td instanceof ObjectImplementation) { + const td = this.resolver.resolveTypeDeclaration(schema, true, state); + if (td instanceof ObjectImplementation) { - // it's a class object. - const className = td.schema.details.csharp.name; - const interfaceName = td.schema.details.csharp.interfaceName || ''; - const converterClass = `${className}TypeConverter`; + // it's a class object. + const className = td.schema.language.csharp?.name || ''; + const interfaceName = td.schema.language.csharp?.interfaceName || ''; + const converterClass = `${className}TypeConverter`; - if (this.findClassByName(className).length > 0) { - continue; - } + if (this.findClassByName(className).length > 0) { + continue; + } + + // get the actual full namespace for the schema + const fullname = schema.language.csharp?.namespace || this.fullName; + const ns = this.subNamespaces[fullname] || this.add(new ApiVersionModelExtensionsNamespace(this.outputFolder, fullname)); - // get the actual full namespace for the schema - const fullname = schema.details.csharp.namespace || this.fullName; - const ns = this.subNamespaces[fullname] || this.add(new ApiVersionModelExtensionsNamespace(this.outputFolder, fullname)); - - // create the model extensions for each object model - // 2. A partial interface with the type converter attribute - const modelInterface = new Interface(ns, interfaceName, { - partial: true, - description: td.schema.details.csharp.description, - fileName: `${interfaceName}.PowerShell` // make sure that the interface ends up in the same file as the class. - }); - modelInterface.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); - - // 1. A partial class with the type converter attribute - const model = new Class(ns, className, undefined, { - partial: true, - description: td.schema.details.csharp.description, - fileName: `${className}.PowerShell` - }); - - // if the model is supposed to be use 'by-reference' we should create an I*Reference interface for that - // and add that interface to the extension class - if (schema.details.csharp.byReference) { - const refInterface = `${interfaceName}_Reference`; - schema.details.csharp.referenceInterface = `${ns.fullName}.${refInterface}`; - - const referenceInterface = new Interface(ns, refInterface, { + // create the model extensions for each object model + // 2. A partial interface with the type converter attribute + const modelInterface = new Interface(ns, interfaceName, { partial: true, - description: `Reference for model ${fullname}`, + description: td.schema.language.csharp?.description, fileName: `${interfaceName}.PowerShell` // make sure that the interface ends up in the same file as the class. }); - referenceInterface.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); - referenceInterface.add(new InterfaceProperty('Id', dotnet.String, { setAccess: Access.Internal })); - model.interfaces.push(referenceInterface); + modelInterface.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); - // add it to the generic reference type. - // referenceType = referenceType || this.CreateReferenceType(); - // referenceType.interfaces.push(referenceInterface); - } + // 1. A partial class with the type converter attribute + const model = new Class(ns, className, undefined, { + partial: true, + description: td.schema.language.csharp?.description, + fileName: `${className}.PowerShell` + }); + + // if the model is supposed to be use 'by-reference' we should create an I*Reference interface for that + // and add that interface to the extension class + if (schema.language.csharp?.byReference) { + const refInterface = `${interfaceName}_Reference`; + schema.language.csharp.referenceInterface = `${ns.fullName}.${refInterface}`; + const referenceInterface = new Interface(ns, refInterface, { + partial: true, + description: `Reference for model ${fullname}`, + fileName: `${interfaceName}.PowerShell` // make sure that the interface ends up in the same file as the class. + }); + referenceInterface.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); + referenceInterface.add(new InterfaceProperty('Id', dotnet.String, { setAccess: Access.Internal })); + model.interfaces.push(referenceInterface); - model.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); - model.add(new LambdaMethod('FromJsonString', modelInterface, new LiteralExpression(`FromJson(${ClientRuntime.JsonNode.declaration}.Parse(jsonText))`), { - static: Modifier.Static, - parameters: [new Parameter('jsonText', dotnet.String, { description: 'a string containing a JSON serialized instance of this model.' })], - description: `Creates a new instance of , deserializing the content from a json string.`, - returnsDescription: 'an instance of the model class.' - })); - - model.add(new LambdaMethod('ToJsonString', dotnet.String, new LiteralExpression(`ToJson(${dotnet.Null}, ${ClientRuntime.SerializationMode.IncludeAll})?.ToString()`), { - description: 'Serializes this instance to a json string.', - returnsDescription: 'a containing this model serialized to JSON text.' - })); - - const hashDeseralizer = new DeserializerPartialClass(model, modelInterface, System.Collections.IDictionary, 'Dictionary', schema, resolver).init(); - const psDeseralizer = new DeserializerPartialClass(model, modelInterface, PSObject, 'PSObject', schema, resolver).init(); - - // + static FromJsonString(string json); - // + string ToJsonString() - - // 3. A TypeConverter class - const typeConverter = new Class(ns, converterClass, PSTypeConverter, { - description: `A PowerShell PSTypeConverter to support converting to an instance of `, - fileName: `${className}.TypeConverter` - }); - typeConverter.add(new LambdaMethod('CanConvertTo', dotnet.Bool, dotnet.False, { - override: Modifier.Override, - parameters: [ - new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), - new Parameter('destinationType', System.Type, { description: 'the to convert to' }) - ], - description: 'Determines if the parameter can be converted to the parameter', - returnsDescription: 'true if the converter can convert the parameter to the parameter, otherwise false', - })); - typeConverter.add(new LambdaMethod('ConvertTo', dotnet.Object, dotnet.Null, { - override: Modifier.Override, - parameters: [ - new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), - new Parameter('destinationType', System.Type, { description: 'the to convert to' }), - new Parameter('formatProvider', System.IFormatProvider, { description: 'not used by this TypeConverter.' }), - new Parameter('ignoreCase', dotnet.Bool, { description: 'when set to true, will ignore the case when converting.' }), - ], description: 'NotImplemented -- this will return null', - returnsDescription: 'will always return null.' - })); - typeConverter.add(new LambdaMethod('CanConvertFrom', dotnet.Bool, new LiteralExpression('CanConvertFrom(sourceValue)'), { - override: Modifier.Override, - parameters: [ - new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), - new Parameter('destinationType', System.Type, { description: 'the to convert to' }) - ], - description: 'Determines if the converter can convert the parameter to the parameter.', - returnsDescription: 'true if the converter can convert the parameter to the parameter, otherwise false.', - })); - typeConverter.add(new LambdaMethod('ConvertFrom', dotnet.Object, new LiteralExpression('ConvertFrom(sourceValue)'), { - override: Modifier.Override, - parameters: [ - new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), - new Parameter('destinationType', System.Type, { description: 'the to convert to' }), - new Parameter('formatProvider', System.IFormatProvider, { description: 'not used by this TypeConverter.' }), - new Parameter('ignoreCase', dotnet.Bool, { description: 'when set to true, will ignore the case when converting.' }), - ], - description: 'Converts the parameter to the parameter using and ', - returnsDescription: `an instance of , or null if there is no suitable conversion.` - })); - - typeConverter.add(new Method('CanConvertFrom', dotnet.Bool, { - static: Modifier.Static, - parameters: [ - new Parameter('sourceValue', dotnet.Dynamic, { description: `the instance to check if it can be converted to the type.` }), - ], - description: 'Determines if the converter can convert the parameter to the parameter.', - returnsDescription: `true if the instance could be converted to a type, otherwise false ` - })).add(function* () { - yield If('null == sourceValue', Return(dotnet.True)); - - const t = new LocalVariable('type', System.Type, { initializer: 'sourceValue.GetType()' }); - yield t.declarationStatement; - - if (schema.details.default.uid === 'universal-parameter-type' || schema.details.csharp.byReference) { - yield '// we allow string conversion too.'; - yield If(`${t.value} == typeof(${System.String})`, Return(dotnet.True)); + // add it to the generic reference type. + // referenceType = referenceType || this.CreateReferenceType(); + // referenceType.interfaces.push(referenceInterface); } - yield If(IsAssignableFrom(PSObject, t), function* () { - yield '// we say yest to PSObjects'; - yield Return(dotnet.True); - }); - yield If(IsAssignableFrom(System.Collections.IDictionary, t), function* () { - yield '// we say yest to Hashtables/dictionaries'; - yield Return(dotnet.True); - }); - yield Try(If('null != sourceValue.ToJsonString()', Return(dotnet.True))); - yield Catch(undefined, '// Not one of our objects'); + model.add(new Attribute(TypeConverterAttribute, { parameters: [new LiteralExpression(`typeof(${converterClass})`)] })); + model.add(new LambdaMethod('FromJsonString', modelInterface, new LiteralExpression(`FromJson(${ClientRuntime.JsonNode.declaration}.Parse(jsonText))`), { + static: Modifier.Static, + parameters: [new Parameter('jsonText', dotnet.String, { description: 'a string containing a JSON serialized instance of this model.' })], + description: `Creates a new instance of , deserializing the content from a json string.`, + returnsDescription: 'an instance of the model class.' + })); - yield Try(function* () { - const t = new LocalVariable('text', dotnet.String, { initializer: 'sourceValue.ToString()?.Trim()' }); - yield t.declarationStatement; - yield Return(`${dotnet.True} == ${t.value}?.StartsWith("{") && ${dotnet.True} == ${t.value}?.EndsWith("}") && ${ClientRuntime.JsonNode.Parse(t)}.Type == ${ClientRuntime.JsonType.Object}`); + model.add(new LambdaMethod('ToJsonString', dotnet.String, new LiteralExpression(`ToJson(${dotnet.Null}, ${ClientRuntime.SerializationMode.IncludeAll})?.ToString()`), { + description: 'Serializes this instance to a json string.', + returnsDescription: 'a containing this model serialized to JSON text.' + })); + + const hashDeseralizer = new DeserializerPartialClass(model, modelInterface, System.Collections.IDictionary, 'Dictionary', schema, resolver).init(); + const psDeseralizer = new DeserializerPartialClass(model, modelInterface, PSObject, 'PSObject', schema, resolver).init(); + + // + static FromJsonString(string json); + // + string ToJsonString() + + // 3. A TypeConverter class + const typeConverter = new Class(ns, converterClass, PSTypeConverter, { + description: `A PowerShell PSTypeConverter to support converting to an instance of `, + fileName: `${className}.TypeConverter` }); - yield Catch(undefined, '// Doesn\'t look like it can be treated as JSON'); - - yield Return(dotnet.False); - }); - - typeConverter.add(new Method('ConvertFrom', modelInterface, { - static: Modifier.Static, - parameters: [ - new Parameter('sourceValue', dotnet.Dynamic, { - description: `the value to convert into an instance of .` - }), - ], - description: 'Converts the parameter to the parameter using and ', - returnsDescription: `an instance of , or null if there is no suitable conversion.` - })).add(function* () { - // null begets null - yield If('null == sourceValue', Return(dotnet.Null)); - - const t = new LocalVariable('type', System.Type, { initializer: 'sourceValue.GetType()' }); - yield t.declarationStatement; - - if (($this.state.project.azure && schema.details.default.uid === 'universal-parameter-type') || schema.details.csharp.byReference) { - yield '// support direct string to id type conversion.'; - yield If(`${t.value} == typeof(${System.String})`, function* () { - yield Return(`new ${className} { Id = sourceValue }`); + typeConverter.add(new LambdaMethod('CanConvertTo', dotnet.Bool, dotnet.False, { + override: Modifier.Override, + parameters: [ + new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), + new Parameter('destinationType', System.Type, { description: 'the to convert to' }) + ], + description: 'Determines if the parameter can be converted to the parameter', + returnsDescription: 'true if the converter can convert the parameter to the parameter, otherwise false', + })); + typeConverter.add(new LambdaMethod('ConvertTo', dotnet.Object, dotnet.Null, { + override: Modifier.Override, + parameters: [ + new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), + new Parameter('destinationType', System.Type, { description: 'the to convert to' }), + new Parameter('formatProvider', System.IFormatProvider, { description: 'not used by this TypeConverter.' }), + new Parameter('ignoreCase', dotnet.Bool, { description: 'when set to true, will ignore the case when converting.' }), + ], description: 'NotImplemented -- this will return null', + returnsDescription: 'will always return null.' + })); + typeConverter.add(new LambdaMethod('CanConvertFrom', dotnet.Bool, new LiteralExpression('CanConvertFrom(sourceValue)'), { + override: Modifier.Override, + parameters: [ + new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), + new Parameter('destinationType', System.Type, { description: 'the to convert to' }) + ], + description: 'Determines if the converter can convert the parameter to the parameter.', + returnsDescription: 'true if the converter can convert the parameter to the parameter, otherwise false.', + })); + typeConverter.add(new LambdaMethod('ConvertFrom', dotnet.Object, new LiteralExpression('ConvertFrom(sourceValue)'), { + override: Modifier.Override, + parameters: [ + new Parameter('sourceValue', dotnet.Object, { description: 'the to convert from' }), + new Parameter('destinationType', System.Type, { description: 'the to convert to' }), + new Parameter('formatProvider', System.IFormatProvider, { description: 'not used by this TypeConverter.' }), + new Parameter('ignoreCase', dotnet.Bool, { description: 'when set to true, will ignore the case when converting.' }), + ], + description: 'Converts the parameter to the parameter using and ', + returnsDescription: `an instance of , or null if there is no suitable conversion.` + })); + + typeConverter.add(new Method('CanConvertFrom', dotnet.Bool, { + static: Modifier.Static, + parameters: [ + new Parameter('sourceValue', dotnet.Dynamic, { description: `the instance to check if it can be converted to the type.` }), + ], + description: 'Determines if the converter can convert the parameter to the parameter.', + returnsDescription: `true if the instance could be converted to a type, otherwise false ` + })).add(function* () { + yield If('null == sourceValue', Return(dotnet.True)); + + const t = new LocalVariable('type', System.Type, { initializer: 'sourceValue.GetType()' }); + yield t.declarationStatement; + + if (schema.language.default.uid || schema.language.csharp?.byReference) { + yield '// we allow string conversion too.'; + yield If(`${t.value} == typeof(${System.String})`, Return(dotnet.True)); + } + + yield If(IsAssignableFrom(PSObject, t), function* () { + yield '// we say yest to PSObjects'; + yield Return(dotnet.True); + }); + yield If(IsAssignableFrom(System.Collections.IDictionary, t), function* () { + yield '// we say yest to Hashtables/dictionaries'; + yield Return(dotnet.True); }); - } - if (schema.details.csharp.byReference) { - yield '// if Id is present with by-reference schemas, just return the type with Id '; - yield Try(Return(`new ${className} { Id = sourceValue.Id }`)); - yield Catch(undefined, '// Not an Id reference parameter'); - } + yield Try(If('null != sourceValue.ToJsonString()', Return(dotnet.True))); + yield Catch(undefined, '// Not one of our objects'); - // if the type can be assigned directly, do that - yield If(IsAssignableFrom(td, t), Return('sourceValue')); + yield Try(function* () { + const t = new LocalVariable('text', dotnet.String, { initializer: 'sourceValue.ToString()?.Trim()' }); + yield t.declarationStatement; + yield Return(`${dotnet.True} == ${t.value}?.StartsWith("{") && ${dotnet.True} == ${t.value}?.EndsWith("}") && ${ClientRuntime.JsonNode.Parse(t)}.Type == ${ClientRuntime.JsonType.Object}`); + }); + yield Catch(undefined, '// Doesn\'t look like it can be treated as JSON'); - // try using json first (either from string or toJsonString()) - yield Try(Return(`${className}.FromJsonString(typeof(string) == sourceValue.GetType() ? sourceValue : sourceValue.ToJsonString());`)); - yield Catch(undefined, '// Unable to use JSON pattern'); + yield Return(dotnet.False); + }); - yield If(IsAssignableFrom(PSObject, t), Return(`${className}.DeserializeFromPSObject(sourceValue)`)); - yield If(IsAssignableFrom(System.Collections.IDictionary, t), Return(`${className}.DeserializeFromDictionary(sourceValue)`)); + typeConverter.add(new Method('ConvertFrom', modelInterface, { + static: Modifier.Static, + parameters: [ + new Parameter('sourceValue', dotnet.Dynamic, { + description: `the value to convert into an instance of .` + }), + ], + description: 'Converts the parameter to the parameter using and ', + returnsDescription: `an instance of , or null if there is no suitable conversion.` + })).add(function* () { + // null begets null + yield If('null == sourceValue', Return(dotnet.Null)); + + const t = new LocalVariable('type', System.Type, { initializer: 'sourceValue.GetType()' }); + yield t.declarationStatement; + + if (($this.state.project.azure && schema.language.default.uid === 'universal-parameter-type') || schema.language.csharp?.byReference) { + yield '// support direct string to id type conversion.'; + yield If(`${t.value} == typeof(${System.String})`, function* () { + yield Return(`new ${className} { Id = sourceValue }`); + }); + } + + if (schema.language.csharp?.byReference) { + yield '// if Id is present with by-reference schemas, just return the type with Id '; + yield Try(Return(`new ${className} { Id = sourceValue.Id }`)); + yield Catch(undefined, '// Not an Id reference parameter'); + } + + // if the type can be assigned directly, do that + yield If(IsAssignableFrom(td, t), Return('sourceValue')); - // null if not successful - yield Return(dotnet.Null); - }); + // try using json first (either from string or toJsonString()) + yield Try(Return(`${className}.FromJsonString(typeof(string) == sourceValue.GetType() ? sourceValue : sourceValue.ToJsonString());`)); + yield Catch(undefined, '// Unable to use JSON pattern'); + + yield If(IsAssignableFrom(PSObject, t), Return(`${className}.DeserializeFromPSObject(sourceValue)`)); + yield If(IsAssignableFrom(System.Collections.IDictionary, t), Return(`${className}.DeserializeFromDictionary(sourceValue)`)); + + // null if not successful + yield Return(dotnet.Null); + }); + } } } } diff --git a/powershell/module/module-class.ts b/powershell/module/module-class.ts index 18d20a5f45d..8af0a190f5b 100644 --- a/powershell/module/module-class.ts +++ b/powershell/module/module-class.ts @@ -3,14 +3,15 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { Access, Alias, Class, ClassType, Constructor, dotnet, Field, LambdaMethod, LambdaProperty, LazyProperty, LiteralExpression, LocalVariable, MemberVariable, Method, Modifier, Namespace, Parameter, ParameterModifier, PartialMethod, Property, Return, Statements, StringExpression, System, TypeDeclaration, Using, valueOf, Variable, If } from '@azure-tools/codegen-csharp'; + +import { Access, Alias, Class, ClassType, Constructor, dotnet, Field, If, LambdaMethod, LambdaProperty, LazyProperty, LiteralExpression, LocalVariable, MemberVariable, Method, Modifier, Namespace, Parameter, ParameterModifier, PartialMethod, Property, Return, Statements, StringExpression, System, TypeDeclaration, Using, valueOf, Variable } from '@azure-tools/codegen-csharp'; import { InvocationInfo, PSCredential, IArgumentCompleter, CompletionResult, CommandAst, CompletionResultType, } from '../internal/powershell-declarations'; import { State } from '../internal/state'; import { ClientRuntime } from '../llcsharp/exports'; import { DeepPartial } from '@azure-tools/codegen'; -export class ModuleClass extends Class { +export class NewModuleClass extends Class { // get the name of the client API class TaskOfHttpResponseMessage = System.Threading.Tasks.Task(System.Net.Http.HttpResponseMessage); @@ -71,7 +72,7 @@ export class ModuleClass extends Class { fHandler = this.add(new Field('_handler', System.Net.Http.HttpClientHandler, { initialValue: System.Net.Http.HttpClientHandler.new() })); fWebProxy = this.add(new Field('_webProxy', System.Net.WebProxy, { initialValue: System.Net.WebProxy.new() })); - constructor(namespace: Namespace, private readonly state: State, objectInitializer?: DeepPartial) { + constructor(namespace: Namespace, private readonly state: State, objectInitializer?: DeepPartial) { super(namespace, 'Module'); this.apply(objectInitializer); this.partial = true; @@ -86,7 +87,7 @@ export class ModuleClass extends Class { description: 'the singleton of this module class' })); - const clientAPI = new ClassType(this.state.model.details.csharp.namespace, this.state.model.details.csharp.name); + const clientAPI = new ClassType(this.state.model.language.csharp?.namespace, this.state.model.language.csharp?.name || ''); const clientProperty = this.add(new Property('ClientAPI', clientAPI, { description: 'The instance of the Client API' })); if (this.state.project.azure) { diff --git a/powershell/module/module-namespace.ts b/powershell/module/module-namespace.ts index cefbdae32de..f6afc90087e 100644 --- a/powershell/module/module-namespace.ts +++ b/powershell/module/module-namespace.ts @@ -5,22 +5,22 @@ import { ImportDirective, Namespace } from '@azure-tools/codegen-csharp'; import { ClientRuntime } from '../llcsharp/exports'; import { State } from '../internal/state'; -import { ModuleClass } from './module-class'; +import { NewModuleClass } from './module-class'; import { DeepPartial } from '@azure-tools/codegen'; export class ModuleNamespace extends Namespace { - public moduleClass: ModuleClass; + public moduleClass: NewModuleClass; public get outputFolder(): string { return this.state.project.moduleFolder; } constructor(public state: State, objectInitializer?: DeepPartial) { - super(state.model.details.csharp.namespace || 'INVALID.NAMESPACE', state.project); + super(state.model.language.csharp?.namespace || 'INVALID.NAMESPACE', state.project); this.apply(objectInitializer); this.add(new ImportDirective(`static ${ClientRuntime.Extensions}`)); // module class - this.moduleClass = new ModuleClass(this, state); + this.moduleClass = new NewModuleClass(this, state); } } \ No newline at end of file diff --git a/powershell/package.json b/powershell/package.json index 9cb887fd879..cfaae555cd8 100644 --- a/powershell/package.json +++ b/powershell/package.json @@ -1,6 +1,6 @@ { "name": "@autorest/powershell", - "version": "2.1.0", + "version": "3.0.0", "description": "AutoRest PowerShell Cmdlet Generator", "main": "dist/exports.js", "typings": "dist/exports.d.ts", @@ -54,9 +54,11 @@ "eslint": "~6.2.2" }, "dependencies": { - "@azure-tools/codegen": "~2.1.0", + "js-yaml": "3.13.1", + "@azure-tools/codegen": "^2.5.276", "@azure-tools/codegen-csharp": "~3.0.0", "@azure-tools/codemodel-v3": "~3.1.0", + "@azure-tools/codemodel": "~4.13.342", "@azure-tools/autorest-extension-base": "~3.1.0", "@azure-tools/linq": "~3.1.0", "@azure-tools/tasks": "~3.0.0", diff --git a/powershell/plugins/add-azure-completers.ts b/powershell/plugins/add-azure-completers-v2.ts similarity index 72% rename from powershell/plugins/add-azure-completers.ts rename to powershell/plugins/add-azure-completers-v2.ts index 3dd1a7f22cd..557ce432631 100644 --- a/powershell/plugins/add-azure-completers.ts +++ b/powershell/plugins/add-azure-completers-v2.ts @@ -3,11 +3,13 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { codemodel, processCodeModel, ModelState } from '@azure-tools/codemodel-v3'; import { values } from '@azure-tools/linq'; import { Host } from '@azure-tools/autorest-extension-base'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { serialize } from '@azure-tools/codegen'; -type State = ModelState; +type State = ModelState; const resourceGroupNames = new Set([ 'resourcegroupname', @@ -20,7 +22,7 @@ const locationNames = new Set([ 'location', ]); -async function tweakModel(state: State): Promise { +async function tweakModel(state: State): Promise { const model = state.model; for (const operation of values(model.commands.operations)) { for (const parameter of values(operation.parameters)) { @@ -42,6 +44,7 @@ async function tweakModel(state: State): Promise { } -export async function addCompleter(service: Host) { - return processCodeModel(tweakModel, service, 'add-azure-completers'); +export async function addCompleterV2(service: Host) { + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-add-azure-completers-v2.yaml', serialize(await tweakModel(state)), undefined, 'code-model-v4'); } diff --git a/powershell/plugins/create-commands.ts b/powershell/plugins/create-commands-v2.ts similarity index 63% rename from powershell/plugins/create-commands.ts rename to powershell/plugins/create-commands-v2.ts index 1e9c2ee92e3..fc36205364f 100644 --- a/powershell/plugins/create-commands.ts +++ b/powershell/plugins/create-commands-v2.ts @@ -3,16 +3,21 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { JsonType, processCodeModel, codemodel, components, command, http, getAllProperties, ModelState, ParameterLocation, } from '@azure-tools/codemodel-v3'; -import { deconstruct, fixLeadingNumber, pascalCase, EnglishPluralizationService, fail, removeSequentialDuplicates } from '@azure-tools/codegen'; +import { HttpMethod, codeModelSchema, CodeModel, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, VirtualParameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext } from '@azure-tools/codemodel'; +import { deconstruct, fixLeadingNumber, pascalCase, EnglishPluralizationService, fail, removeSequentialDuplicates, serialize } from '@azure-tools/codegen'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import { Schema } from '../llcsharp/exports'; -import { Channel, Host } from '@azure-tools/autorest-extension-base'; +import { Channel, Host, Session, startSession } from '@azure-tools/autorest-extension-base'; import { Lazy } from '@azure-tools/tasks'; import { clone } from '@azure-tools/linq'; import { verbs } from '../internal/verbs'; +import { PwshModel } from '../utils/PwshModel'; +import { IParameter } from '../utils/components'; +import { ModelState } from '../utils/model-state'; +//import { Schema as SchemaV3 } from '../utils/schema'; +import { CommandOperation } from '../utils/command-operation'; -type State = ModelState; +type State = ModelState; // UNUSED: Moved to plugin-tweak-model.ts in remodeler @@ -111,17 +116,17 @@ export /* @internal */ class Inferrer { const model = this.state.model; this.state.message({ - Channel: Channel.Debug, Text: `[CMDLET-PREFIX] => '${model.details.default.prefix}'` + Channel: Channel.Debug, Text: `[CMDLET-PREFIX] => '${model.language.default.prefix}'` }); - model.details.default.serviceName = this.serviceName; + model.language.default.serviceName = this.serviceName; this.state.message({ - Channel: Channel.Debug, Text: `[SERVICE-NAME] => '${model.details.default.serviceName}'` + Channel: Channel.Debug, Text: `[SERVICE-NAME] => '${model.language.default.serviceName}'` }); - model.details.default.subjectPrefix = this.subjectPrefix; + model.language.default.subjectPrefix = this.subjectPrefix; this.state.message({ - Channel: Channel.Debug, Text: `[SUBJECT-PREFIX] => '${model.details.default.subjectPrefix}'` + Channel: Channel.Debug, Text: `[SUBJECT-PREFIX] => '${model.language.default.subjectPrefix}'` }); return this; @@ -135,13 +140,19 @@ export /* @internal */ class Inferrer { }; this.state.message({ Channel: Channel.Debug, Text: 'detecting high level commands...' }); - - for (const operation of values(model.http.operations)) { - for (const variant of await this.inferCommandNames(operation, this.state)) { - // no common parameters (standard variations) - await this.addVariants(operation.parameters, operation, variant, '', this.state); + for (const operationGroup of values(model.operationGroups)) { + for (const operation of values(operationGroup.operations)) { + for (const variant of await this.inferCommandNames(operation, operationGroup.$key, this.state)) { + await this.addVariants(operation.parameters, operation, variant, '', this.state); + } } } + // for (const operation of values(model.http.operations)) { + // for (const variant of await this.inferCommandNames(operation, this.state)) { + // // no common parameters (standard variations) + // await this.addVariants(operation.parameters, operation, variant, '', this.state); + // } + // } return model; } @@ -226,22 +237,22 @@ export /* @internal */ class Inferrer { return [this.createCommandVariant(operation[0], group ? [...deconstruct(group), ...operation.slice(1)] : operation.slice(1), [...suffix, ...operation.slice(1)], this.state.model)]; } - async inferCommandNames(op: http.HttpOperation, state: State): Promise> { - const operationId = op.operationId || ''; - - let [group, method] = operationId.split('_', 2); - if (!method) { - if (!group) { - // no operation id at all? - const path = op.path.replace(/{.*?}/g, '').replace(/\/+/g, '/').replace(/\/$/g, ''); - method = path.split('/').last; - } else { - // no group given, use string as method - method = group; - } - group = pascalCase(op.tags) || ''; + async inferCommandNames(op: Operation, group: string, state: State): Promise> { - } + const method = op.language.default.name; + // skip-for-time-being + // if (!method) { + // if (!group) { + // // no operation id at all? + // const path = op.path.replace(/{.*?}/g, '').replace(/\/+/g, '/').replace(/\/$/g, ''); + // method = path.split('/').last; + // } else { + // // no group given, use string as method + // method = group; + // } + // group = pascalCase(op.tags) || ''; + + // } const groupWords = deconstruct(group); groupWords[groupWords.length - 1] = pluralizationService.singularize(groupWords.last); @@ -258,16 +269,16 @@ export /* @internal */ class Inferrer { return this.inferCommand(operation, group); } - async addVariant(vname: string, body: http.RequestBody | undefined, bodyParameterName: string, parameters: Array, operation: http.HttpOperation, variant: CommandVariant, state: State) { + async addVariant(vname: string, body: Parameter | null, bodyParameterName: string, parameters: Array, operation: Operation, variant: CommandVariant, state: State) { const op = await this.addCommandOperation(vname, parameters, operation, variant, state); // if this has a body with it, let's add that parameter if (body && body.schema) { op.details.default.hasBody = true; - op.parameters.push(new components.IParameter(bodyParameterName, body.schema, { + op.parameters.push(new IParameter(bodyParameterName, body.schema, { details: { default: { - description: body.schema.details.default.description, + description: body.schema.language.default.description, name: pascalCase(bodyParameterName), isBodyParameter: true, } @@ -275,14 +286,14 @@ export /* @internal */ class Inferrer { })); // let's add a variant where it's expanded out. - // *IF* the body is an object - if (body.schema.type === JsonType.Object) { + // *IF* the body is an object or dictionary + if (body.schema.type === SchemaType.Object || body.schema.type === SchemaType.Dictionary || body.schema.type === SchemaType.Any) { const opExpanded = await this.addCommandOperation(`${vname}Expanded`, parameters, operation, variant, state); opExpanded.details.default.dropBodyParameter = true; - opExpanded.parameters.push(new components.IParameter(`${bodyParameterName}Body`, body.schema, { + opExpanded.parameters.push(new IParameter(`${bodyParameterName}Body`, body.schema, { details: { default: { - description: body.schema.details.default.description, + description: body.schema.language.default.description, name: pascalCase(`${bodyParameterName}Body`), isBodyParameter: true, } @@ -292,30 +303,32 @@ export /* @internal */ class Inferrer { } } - - isNameConflict(model: codemodel.Model, variant: CommandVariant, vname: string) { - for (const each of values(model.commands.operations)) { - if (each.details.default.name === vname) { - return true; - } - } - return false; - } + // skip-for-time-being + // isNameConflict(model: codemodel.Model, variant: CommandVariant, vname: string) { + // for (const each of values(model.commands.operations)) { + // if (each.details.default.name === vname) { + // return true; + // } + // } + // return false; + // } // for tracking unique operation identities operationIdentities = new Set(); - async addCommandOperation(vname: string, parameters: Array, operation: http.HttpOperation, variant: CommandVariant, state: State): Promise { - let apiversion = ''; + async addCommandOperation(vname: string, parameters: Array, operation: Operation, variant: CommandVariant, state: State): Promise { + // skip-for-time-being following code seems redundant ----- + // let apiversion = ''; - for (const each of items(operation.responses)) { - for (const rsp of each.value) { - if (rsp.schema && rsp.schema.details && rsp.schema.details.default && rsp.schema.details.default.apiversion) { - apiversion = rsp.schema.details.default.apiversion; - break; - } - } - } + // for (const each of items(operation.responses)) { + // for (const rsp of items(each)) { + // if (rsp.schema && rsp.schema.details && rsp.schema.details.default && rsp.schema.details.default.apiversion) { + // apiversion = rsp.schema.details.default.apiversion; + // break; + // } + // } + // } + // ---------------------------------------------------------- // if vname is > 64 characters, let's trim it // after trimming it, make sure there aren't any other operation with a name that's exactly the same @@ -342,19 +355,19 @@ export /* @internal */ class Inferrer { variant.variant = vname; vname = pascalCase(vname); - const xmsMetadata = operation.pathExtensions ? operation.pathExtensions['x-ms-metadata'] ? clone(operation.pathExtensions['x-ms-metadata']) : {} : {}; + // skip-for-time-being x-ms-metadata looks not supported any more. + //const xmsMetadata = operation.pathExtensions ? operation.pathExtensions['x-ms-metadata'] ? clone(operation.pathExtensions['x-ms-metadata']) : {} : {}; - return state.model.commands.operations[`${length(state.model.commands.operations)}`] = new command.CommandOperation(operation.operationId, { - asjob: operation.details.default.asjob ? true : false, + return state.model.commands.operations[`${length(state.model.commands.operations)}`] = new CommandOperation(operation.language.default.name, { + asjob: operation.language.default.asjob ? true : false, extensions: { - ...operation.pathExtensions, - 'x-ms-metadata': xmsMetadata + }, ...variant, details: { - ...operation.details, + ...operation.language, default: { - ...operation.details.default, + ...operation.language.default, subject: variant.subject, subjectPrefix: variant.subjectPrefix, verb: variant.verb, @@ -362,50 +375,64 @@ export /* @internal */ class Inferrer { alias: variant.alias } }, - operationId: operation.operationId, + // operationId is not needed any more + operationId: '', parameters: parameters.map(httpParameter => { // make it's own copy of the parameter since after this, // the parameter can be altered for each operation individually. const each = clone(httpParameter, false, undefined, undefined, ['schema', 'origin']); + each.language.default = { + ...each.language.default, + name: pascalCase(each.language.default.name), + httpParameter + }; + each.details = {}; each.details.default = { - ...each.details.default, - name: pascalCase(each.details.default.name), + ...each.language.default, + name: pascalCase(each.language.default.name), httpParameter }; + each.name = each.language.default.serializedName; return each; }), + // skip-for-time-being, this callGraph is used in the header comments callGraph: [operation], }); } - async addVariants(parameters: Array, operation: http.HttpOperation, variant: CommandVariant, vname: string, state: State) { + async addVariants(parameters: Array | undefined, operation: Operation, variant: CommandVariant, vname: string, state: State) { // now synthesize parameter set variants multiplexed by the variants. - const [constants, requiredParameters] = values(parameters).bifurcate(parameter => parameter.details.default.constantValue || parameter.details.default.fromHost ? true : false); - const constantParameters = constants.map(each => `'${each.details.default.constantValue}'`); + const [constants, requiredParameters] = values(parameters).bifurcate(parameter => parameter.language.default.constantValue || parameter.language.default.fromHost ? true : false); + const constantParameters = constants.map(each => `'${each.language.default.constantValue}'`); // the body parameter - const body = operation.requestBody; - const bodyParameterName = (operation.requestBody && operation.requestBody.extensions) ? operation.requestBody.extensions['x-ms-requestBody-name'] || 'bodyParameter' : ''; + // xichen: How to handle if has multiple requests? + const body = operation.requests?.[0].parameters?.find((p) => !p.origin || p.origin.indexOf('modelerfour:synthesized') < 0) || null; + // skip-for-time-being, looks x-ms-requestBody-name is not supported any more + //const bodyParameterName = (operation.requestBody && operation.requestBody.extensions) ? operation.requestBody.extensions['x-ms-requestBody-name'] || 'bodyParameter' : ''; + const bodyParameterName = body ? body.language.default.name : ''; // all the properties in the body parameter - const bodyProperties = (body && body.schema) ? values(getAllProperties(body.schema)).where(property => !property.details.default.readOnly).toArray() : []; + const bodyProperties = (body && body.schema && isObjectSchema(body.schema)) ? values(getAllProperties(body.schema)).where(property => !property.language.default.readOnly).toArray() : []; // smash body property names together - const bodyPropertyNames = bodyProperties.joinWith(each => each.details.default.name); + const bodyPropertyNames = bodyProperties.joinWith(each => each.language.default.name); // for each polymorphic body, we should do a separate variant that takes the polymorphic body type instead of the base type - const polymorphicBodies = (body && body.schema && body.schema.details.default.polymorphicChildren && length(body.schema.details.default.polymorphicChildren)) ? (>body.schema.details.default.polymorphicChildren).joinWith(child => child.details.default.name) : ''; + // skip-for-time-being, this is for polymorphism + //const polymorphicBodies = (body && body.schema && body.schema.details.default.polymorphicChildren && length(body.schema.details.default.polymorphicChildren)) ? (>body.schema.details.default.polymorphicChildren).joinWith(child => child.details.default.name) : ''; // wait! "update" should be "set" if it's a POST - if (variant.verb === 'Update' && operation.method === http.HttpMethod.Put) { + if (variant.verb === 'Update' && operation.requests && operation.requests[0].protocol?.http?.method === HttpMethod.Put) { variant.verb = 'Set'; } // create variant - state.message({ Channel: Channel.Debug, Text: `${variant.verb}-${variant.subject} // ${operation.operationId} => ${JSON.stringify(variant)} taking ${requiredParameters.joinWith(each => each.name)}; ${constantParameters} ; ${bodyPropertyNames} ${polymorphicBodies ? `; Polymorphic bodies: ${polymorphicBodies} ` : ''}` }); + // skip-for-time-being, since operationId looks not included in m4. + //state.message({ Channel: Channel.Debug, Text: `${variant.verb}-${variant.subject} // ${operation.operationId} => ${JSON.stringify(variant)} taking ${requiredParameters.joinWith(each => each.name)}; ${constantParameters} ; ${bodyPropertyNames} ${polymorphicBodies ? `; Polymorphic bodies: ${polymorphicBodies} ` : ''}` }); await this.addVariant(pascalCase([variant.action, vname]), body, bodyParameterName, [...constants, ...requiredParameters], operation, variant, state); - const [pathParams, otherParams] = values(requiredParameters).bifurcate(each => each.in === ParameterLocation.Path); + const [pathParams, otherParams] = values(requiredParameters).bifurcate(each => each?.protocol?.http?.in === ParameterLocation.Path); const dvi = await state.getValue('disable-via-identity', false); if (!dvi && length(pathParams) > 0 && variant.action.toLowerCase() != 'list') { @@ -415,7 +442,7 @@ export /* @internal */ class Inferrer { } - createCommandVariant(action: string, subject: Array, variant: Array, model: codemodel.Model): CommandVariant { + createCommandVariant(action: string, subject: Array, variant: Array, model: PwshModel): CommandVariant { const verb = this.getPowerShellVerb(action); if (verb === 'Invoke') { // if the 'operation' name was "post" -- it's kindof redundant. @@ -430,7 +457,7 @@ export /* @internal */ class Inferrer { subject: pascalCase([...removeSequentialDuplicates(subject.map(each => pluralizationService.singularize(each)))]), variant: pascalCase(variant), verb, - subjectPrefix: model.details.default.subjectPrefix, + subjectPrefix: model.language.default.subjectPrefix, action }; } @@ -446,9 +473,14 @@ export /* @internal */ class Inferrer { } -export async function createCommands(service: Host) { +export async function createCommandsV2(service: Host) { // return processCodeModel(commandCreator, service); - return processCodeModel(async (state) => { - return await (await new Inferrer(state).init()).createCommands(); - }, service, 'createCommands'); + //const session = await startSession(service, {}, codeModelSchema); + //const result = tweakModelV2(session); + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-createcommands-v2.yaml', serialize(await (await new Inferrer(state).init()).createCommands()), undefined, 'code-model-v4'); + + // return processCodeModel(async (state) => { + // return await (await new Inferrer(state).init()).createCommands(); + // }, service, 'createCommands-v2'); } \ No newline at end of file diff --git a/powershell/plugins/cs-namer-v2.ts b/powershell/plugins/cs-namer-v2.ts new file mode 100644 index 00000000000..be6a2e70d71 --- /dev/null +++ b/powershell/plugins/cs-namer-v2.ts @@ -0,0 +1,265 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { codeModelSchema, SchemaResponse, CodeModel, Schema, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, VirtualParameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext, StringSchema, ChoiceSchema, SealedChoiceSchema } from '@azure-tools/codemodel'; +import { camelCase, deconstruct, excludeXDash, fixLeadingNumber, pascalCase, lowest, maximum, minimum, getPascalIdentifier, serialize } from '@azure-tools/codegen'; +import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; +import { System } from '@azure-tools/codegen-csharp'; + +import { Channel, Host, Session, startSession } from '@azure-tools/autorest-extension-base'; +import { SchemaDetails } from '../llcsharp/code-model'; +import { SchemaDefinitionResolver } from '../llcsharp/schema/schema-resolver'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { SchemaDetails as NewSchemaDetails } from '../utils/schema'; + +type State = ModelState; + + +function setPropertyNames(schema: Schema) { + // name each property in this schema + // skip-for-time-being + if (!isObjectSchema(schema)) { + return; + } + for (const propertySchema of values(schema.properties)) { + const propertyDetails = propertySchema.language.default; + propertyDetails.required = propertySchema.required ?? false; + propertyDetails.readOnly = propertySchema.readOnly ?? false; + + const className = schema.language.csharp?.name; + + let pname = getPascalIdentifier(propertyDetails.name); + if (pname === className) { + pname = `${pname}Property`; + } + + if (pname === 'default') { + pname = '@default'; + } + + propertySchema.language.csharp = { + ...propertyDetails, + name: pname // and so are the propertyNmaes + }; + + if (propertyDetails.isNamedStream) { + propertySchema.language.csharp.namedStreamPropertyName = pascalCase(fixLeadingNumber([...deconstruct(propertyDetails.name), 'filename'])); + } + } + +} + + +function setSchemaNames(schemaGroups: Dictionary>, azure: boolean, serviceNamespace: string) { + const baseNamespace = new Set(); + const subNamespace = new Map>(); + // dolauli need to notice this -- schemas in the namespace of the lowest supported api version + // in Azure Mode, we want to always put schemas into the namespace of the lowest supported apiversion. + // otherwise, we just want to differientiate with a simple incremental numbering scheme. + + for (const group of values(schemaGroups)) { + for (const schema of group) { + let thisNamespace = baseNamespace; + let thisApiversion = ''; + + // create the namespace if required + if (azure) { + const versions = [...values(schema.apiVersions).select(v => v.version)]; + if (schema.language.default?.uid !== 'universal-parameter-type') { + if (versions && length(versions) > 0) { + thisApiversion = minimum(versions); + thisNamespace = subNamespace.get(thisApiversion) || new Set(); + subNamespace.set(thisApiversion, thisNamespace); + } + } + } + + + // for each schema, we're going to set the name + // to the suggested name, unless we have collisions + // at which point, we're going to add a number (for now?) + const details = schema.language.default; + let schemaName = getPascalIdentifier(details.name); + const apiName = (!thisApiversion) ? '' : getPascalIdentifier(`Api ${thisApiversion}`); + const ns = (!thisApiversion) ? [] : ['.', apiName]; + + + let n = 1; + while (thisNamespace.has(schemaName)) { + schemaName = getPascalIdentifier(`${details.name}_${n++}`); + } + thisNamespace.add(schemaName); + + // object types. + if (schema.type === SchemaType.Object || schema.type === SchemaType.Dictionary || schema.type === SchemaType.Any) { + schema.language.csharp = { + ...details, + apiversion: thisApiversion, + apiname: apiName, + interfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName)])), // objects have an interfaceName + internalInterfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName), 'Internal'])), // objects have an ineternal interfaceName for setting private members. + fullInternalInterfaceName: `${pascalCase([serviceNamespace, '.', 'Models', ...ns])}.${pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName), 'Internal']))}`, + name: getPascalIdentifier(schemaName), + namespace: pascalCase([serviceNamespace, '.', 'Models', ...ns]), // objects have a namespace + fullname: `${pascalCase([serviceNamespace, '.', 'Models', ...ns])}.${getPascalIdentifier(schemaName)}`, + }; + } else if (schema.type === SchemaType.Choice || schema.type === SchemaType.SealedChoice) { + // oh, it's an enum type + const choiceSchema = | SealedChoiceSchema>schema; + schema.language.csharp = { + ...details, + interfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName)])), + name: getPascalIdentifier(schemaName), + namespace: pascalCase([serviceNamespace, '.', 'Support']), + fullname: `${pascalCase([serviceNamespace, '.', 'Support'])}.${getPascalIdentifier(schemaName)}`, + enum: { + ...schema.language.default.enum, + name: getPascalIdentifier(schema.language.default.name), + values: choiceSchema.choices.map(each => { + return { + ...each, + name: getPascalIdentifier(each.language.default.name), + description: each.language.default.description + }; + }) + } + }; + } else { + schema.language.csharp = { + ...details, + interfaceName: '', + internalInterfaceName: '', + name: schemaName, + namespace: '', + fullname: '' + }; + // xichen: for invalid namespace case, we won't create model class. So we do not need consider dup case + thisNamespace.delete(schemaName); + } + + // name each property in this schema + setPropertyNames(schema); + + // fix enum names + if (schema.type === SchemaType.Choice || schema.type === SchemaType.SealedChoice) { + schema.language.csharp.enum.name = getPascalIdentifier(schema.language.default.name); + + // and the value names themselves + for (const value of values(schema.language.csharp.enum.values)) { + // In m3, enum.name and enum.value are same. But in m4, enum.name is named by m4. + // To keep same action as m3, use enum.value here + (value).name = getPascalIdentifier((value).value); + } + } + } + } + +} + +async function setOperationNames(state: State, resolver: SchemaDefinitionResolver) { + // keep a list of operation names that we've assigned. + const operationNames = new Set(); + for (const operationGroup of values(state.model.operationGroups)) { + for (const operation of values(operationGroup.operations)) { + const details = operation.language.default; + + // come up with a name + const oName = getPascalIdentifier(operationGroup.$key + '_' + details.name); + let i = 1; + let operationName = oName; + while (operationNames.has(operationName)) { + // if we have used that name, try again. + operationName = `${oName}${i++}`; + } + operationNames.add(operationName); + + operation.language.csharp = { + ...details, // inherit + name: operationName, + }; + + // parameters are camelCased. + for (const parameter of values(operation.parameters)) { + const parameterDetails = parameter.language.default; + + let propName = camelCase(fixLeadingNumber(deconstruct(parameterDetails.serializedName))); + + if (propName === 'default') { + propName = '@default'; + } + + parameter.language.csharp = { + ...parameterDetails, + name: propName + }; + } + + const responses = [...values(operation.responses), ...values(operation.exceptions)]; + + for (const rsp of responses) { + // per responseCode + const response = rsp; + const responseTypeDefinition = response.schema ? resolver.resolveTypeDeclaration(response.schema, true, state) : undefined; + const headerSchema = response.language.default.headerSchema; + const headerTypeDefinition = headerSchema ? resolver.resolveTypeDeclaration(headerSchema, true, state.path('schemas', headerSchema.language.default.name)) : undefined; + let code = (System.Net.HttpStatusCode[response.protocol.http?.statusCodes[0]] ? System.Net.HttpStatusCode[response.protocol.http?.statusCodes[0]].value : response.protocol.http?.statusCodes[0]).replace('global::System.Net.HttpStatusCode', ''); + let rawValue = code.replace(/\./, ''); + if (response.protocol.http?.statusCodes[0] === 'default' || rawValue === 'default' || '') { + rawValue = 'any response code not handled elsewhere'; + code = 'default'; + response.language.default.isErrorResponse = true; + } + response.language.csharp = { + ...response.language.default, + responseType: responseTypeDefinition ? responseTypeDefinition.declaration : '', + headerType: headerTypeDefinition ? headerTypeDefinition.declaration : '', + name: (length(response.protocol.http?.mimeTypes) <= 1) ? + camelCase(fixLeadingNumber(deconstruct(`on ${code}`))) : // the common type (or the only one.) + camelCase(fixLeadingNumber(deconstruct(`on ${code} ${response.protocol.http?.mimeTypes[0]}`))), + description: (length(response.protocol.http?.mimeTypes) <= 1) ? + `a delegate that is called when the remote service returns ${response.protocol.http?.statusCodes[0]} (${rawValue}).` : + `a delegate that is called when the remote service returns ${response.protocol.http?.statusCodes[0]} (${rawValue}) with a Content-Type matching ${response.protocol.http?.mimeTypes.join(',')}.` + + }; + } + } + } +} + +async function nameStuffRight(state: State): Promise { + const resolver = new SchemaDefinitionResolver(); + const model = state.model; + + // set the namespace for the service + const serviceNamespace = await state.getValue('namespace', 'Sample.API'); + const azure = await state.getValue('azure', false) || await state.getValue('azure-arm', false); + const clientName = getPascalIdentifier(model.language.default.name); + + // dolauli see model.details.csharp for c# related staff + // set c# client details (name) + model.language.csharp = { + ...model.language.default, // copy everything by default + name: clientName, + namespace: serviceNamespace, + fullname: `${serviceNamespace}.${clientName}` + }; + + setSchemaNames(>>model.schemas, azure, serviceNamespace); + await setOperationNames(state, resolver); + + return model; +} + + +export async function csnamerV2(service: Host) { + // dolauli add names for http operations and schemas + //return processCodeModel(nameStuffRight, service, 'csnamer'); + //const session = await startSession(service, {}, codeModelSchema); + //const result = tweakModelV2(session); + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-csnamer-v2.yaml', serialize(await nameStuffRight(state)), undefined, 'code-model-v4'); +} + diff --git a/powershell/plugins/cs-namer.ts b/powershell/plugins/cs-namer.ts deleted file mode 100644 index 3c59d06d59e..00000000000 --- a/powershell/plugins/cs-namer.ts +++ /dev/null @@ -1,242 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { codemodel, JsonType, ModelState, processCodeModel, VirtualProperty } from '@azure-tools/codemodel-v3'; - -import { camelCase, deconstruct, excludeXDash, fixLeadingNumber, pascalCase, lowest, maximum, minimum, getPascalIdentifier } from '@azure-tools/codegen'; -import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; -import { System } from '@azure-tools/codegen-csharp'; - -import { Host } from '@azure-tools/autorest-extension-base'; -import { Schema, SchemaDetails } from '../llcsharp/code-model'; -import { SchemaDefinitionResolver } from '../llcsharp/schema/schema-resolver'; - -type State = ModelState; - -function setPropertyNames(schema: Schema) { - // name each property in this schema - for (const propertySchema of values(schema.properties)) { - const propertyDetails = propertySchema.details.default; - - const className = schema.details.csharp.name; - - let pname = getPascalIdentifier(propertyDetails.name); - if (pname === className) { - pname = `${pname}Property`; - } - - if (pname === 'default') { - pname = '@default'; - } - - propertySchema.details.csharp = { - ...propertyDetails, - name: pname // and so are the propertyNmaes - }; - - if (propertyDetails.isNamedStream) { - propertySchema.details.csharp.namedStreamPropertyName = pascalCase(fixLeadingNumber([...deconstruct(propertyDetails.name), 'filename'])); - } - } - -} - - -function setSchemaNames(schemas: Dictionary, azure: boolean, serviceNamespace: string) { - const baseNamespace = new Set(); - const subNamespace = new Map>(); - - // in Azure Mode, we want to always put schemas into the namespace of the lowest supported apiversion. - // otherwise, we just want to differientiate with a simple incremental numbering scheme. - - for (const schema of values(schemas)) { - let thisNamespace = baseNamespace; - let thisApiversion = ''; - - // create the namespace if required - if (azure) { - const metadata = schema.extensions && schema.extensions['x-ms-metadata']; - if (metadata) { - const apiVersions = | undefined>metadata.apiVersions; - if (apiVersions && length(apiVersions) > 0) { - thisApiversion = minimum(apiVersions); - thisNamespace = subNamespace.get(thisApiversion) || new Set(); - subNamespace.set(thisApiversion, thisNamespace); - } - } - } - - // for each schema, we're going to set the name - // to the suggested name, unless we have collisions - // at which point, we're going to add a number (for now?) - const details = schema.details.default; - let schemaName = getPascalIdentifier(details.name); - const apiName = (!thisApiversion) ? '' : getPascalIdentifier(`Api ${thisApiversion}`); - const ns = (!thisApiversion) ? [] : ['.', apiName]; - - - let n = 1; - while (thisNamespace.has(schemaName)) { - schemaName = getPascalIdentifier(`${details.name}_${n++}`); - } - thisNamespace.add(schemaName); - - // object types. - if (schema.type === JsonType.Object) { - schema.details.csharp = { - ...details, - apiversion: thisApiversion, - apiname: apiName, - interfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName)])), // objects have an interfaceName - internalInterfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName), 'Internal'])), // objects have an ineternal interfaceName for setting private members. - fullInternalInterfaceName: `${pascalCase([serviceNamespace, '.', 'Models', ...ns])}.${pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName), 'Internal']))}`, - name: getPascalIdentifier(schemaName), - namespace: pascalCase([serviceNamespace, '.', 'Models', ...ns]), // objects have a namespace - fullname: `${pascalCase([serviceNamespace, '.', 'Models', ...ns])}.${getPascalIdentifier(schemaName)}`, - }; - } else if (schema.type === JsonType.String && schema.details.default.enum) { - // oh, it's an enum type - schema.details.csharp = { - ...details, - interfaceName: pascalCase(fixLeadingNumber(['I', ...deconstruct(schemaName)])), - name: getPascalIdentifier(schema.details.default.enum.name), - namespace: pascalCase([serviceNamespace, '.', 'Support']), - fullname: `${pascalCase([serviceNamespace, '.', 'Support'])}.${getPascalIdentifier(schema.details.default.enum.name)}`, - enum: { - ...schema.details.default.enum, - name: getPascalIdentifier(schema.details.default.enum.name), - values: schema.details.default.enum.values.map(each => { - return { - ...each, - name: getPascalIdentifier(each.name), - description: each.description - }; - }) - } - }; - } else { - schema.details.csharp = { - ...details, - interfaceName: '', - internalInterfaceName: '', - name: schemaName, - namespace: '', - fullname: '' - }; - } - - // name each property in this schema - setPropertyNames(schema); - - // fix enum names - if (schema.details.default.enum) { - schema.details.csharp.enum = { - ...schema.details.default.enum, - name: getPascalIdentifier(schema.details.default.enum.name) - }; - - // and the value names themselves - for (const value of values(schema.details.csharp.enum.values)) { - value.name = getPascalIdentifier(value.name); - } - } - } - -} - -async function setOperationNames(state: State, resolver: SchemaDefinitionResolver) { - // keep a list of operation names that we've assigned. - const operationNames = new Set(); - - for (const operation of values(state.model.http.operations)) { - const details = operation.details.default; - - // come up with a name - const oName = getPascalIdentifier(details.name); - let i = 1; - let operationName = oName; - while (operationNames.has(operationName)) { - // if we have used that name, try again. - operationName = `${oName}${i++}`; - } - operationNames.add(operationName); - - operation.details.csharp = { - ...details, // inherit - name: operationName, - }; - - // parameters are camelCased. - for (const parameter of values(operation.parameters)) { - const parameterDetails = parameter.details.default; - - let propName = camelCase(fixLeadingNumber(deconstruct(parameterDetails.name))); - - if (propName === 'default') { - propName = '@default'; - } - - parameter.details.csharp = { - ...parameterDetails, - name: propName - }; - } - - for (const responses of values(operation.responses)) { - // per responseCode - for (const response of values(responses)) { - const responseTypeDefinition = response.schema ? resolver.resolveTypeDeclaration(response.schema, true, state.path('schemas', response.schema.details.default.name)) : undefined; - const headerTypeDefinition = response.headerSchema ? resolver.resolveTypeDeclaration(response.headerSchema, true, state.path('schemas', response.headerSchema.details.default.name)) : undefined; - let code = (System.Net.HttpStatusCode[response.responseCode] ? System.Net.HttpStatusCode[response.responseCode].value : response.responseCode).replace('global::System.Net.HttpStatusCode', ''); - let rawValue = code.replace(/\./, ''); - if (response.responseCode === 'default' || rawValue === 'default' || '') { - rawValue = 'any response code not handled elsewhere'; - code = 'default'; - } - response.details.csharp = { - ...response.details.default, - responseType: responseTypeDefinition ? responseTypeDefinition.declaration : '', - headerType: headerTypeDefinition ? headerTypeDefinition.declaration : '', - name: (length(responses) <= 1) ? - camelCase(fixLeadingNumber(deconstruct(`on ${code}`))) : // the common type (or the only one.) - camelCase(fixLeadingNumber(deconstruct(`on ${code} ${response.mimeTypes[0]}`))), - description: (length(responses) <= 1) ? - `a delegate that is called when the remote service returns ${response.responseCode} (${rawValue}).` : - `a delegate that is called when the remote service returns ${response.responseCode} (${rawValue}) with a Content-Type matching ${response.mimeTypes.join(',')}.` - - }; - } - } - } -} - -async function nameStuffRight(state: State): Promise { - const resolver = new SchemaDefinitionResolver(); - const model = state.model; - - // set the namespace for the service - const serviceNamespace = await state.getValue('namespace', 'Sample.API'); - const azure = await state.getValue('azure', false) || await state.getValue('azure-arm', false); - const clientName = getPascalIdentifier(model.details.default.name); - - // set c# client details (name) - model.details.csharp = { - ...model.details.default, // copy everything by default - name: clientName, - namespace: serviceNamespace, - fullname: `${serviceNamespace}.${clientName}` - }; - - setSchemaNames(>model.schemas, azure, serviceNamespace); - await setOperationNames(state, resolver); - - return model; -} - - -export async function csnamer(service: Host) { - return processCodeModel(nameStuffRight, service, 'csnamer'); -} - diff --git a/powershell/plugins/llcsharp.ts b/powershell/plugins/llcsharp-v2.ts similarity index 94% rename from powershell/plugins/llcsharp.ts rename to powershell/plugins/llcsharp-v2.ts index a166a31bc64..c90a8a2c61d 100644 --- a/powershell/plugins/llcsharp.ts +++ b/powershell/plugins/llcsharp-v2.ts @@ -2,16 +2,18 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { Host } from '@azure-tools/autorest-extension-base'; +import { Host, startSession } from '@azure-tools/autorest-extension-base'; +import { codeModelSchema } from '@azure-tools/codemodel'; import { applyOverrides, copyResources, deserialize, serialize, } from '@azure-tools/codegen'; import { join } from 'path'; import { Model } from '../llcsharp/code-model'; -import { State } from '../llcsharp/generator'; import { Project } from '../llcsharp/project'; +import { PwshModel } from '../utils/PwshModel'; +import { Dictionary } from '@azure-tools/linq'; const resources = `${__dirname}/../../resources`; -export async function llcsharp(service: Host) { +export async function llcsharpV2(service: Host) { try { const project = await new Project(service).init(); diff --git a/powershell/plugins/modifiers.ts b/powershell/plugins/modifiers-v2.ts similarity index 88% rename from powershell/plugins/modifiers.ts rename to powershell/plugins/modifiers-v2.ts index c929386a67d..007c698777e 100644 --- a/powershell/plugins/modifiers.ts +++ b/powershell/plugins/modifiers-v2.ts @@ -3,13 +3,18 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { codemodel, processCodeModel, allVirtualParameters, allVirtualProperties, ModelState, command } from '@azure-tools/codemodel-v3'; import { Host, Channel } from '@azure-tools/autorest-extension-base'; -import { pascalCase } from '@azure-tools/codegen'; +import { pascalCase, serialize } from '@azure-tools/codegen'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; -import { CommandOperation } from '@azure-tools/codemodel-v3/dist/code-model/command-operation'; +import { stat } from 'fs'; +import { CommandOperation } from '../utils/command-operation'; +// import { CommandOperation } from '@azure-tools/codemodel-v3/dist/code-model/command-operation'; +import { ModelState } from '../utils/model-state'; +import { PwshModel } from '../utils/PwshModel'; +import { allVirtualParameters, allVirtualProperties } from '../utils/resolve-conflicts'; +import { EnumValue } from '../utils/schema'; -type State = ModelState; +type State = ModelState; let directives: Array = []; @@ -229,9 +234,10 @@ function isWhereEnumDirective(it: any): it is WhereEnumDirective { return false; } -async function tweakModel(state: State): Promise { +async function tweakModel(state: State): Promise { // only look at directives without the `transform` node. + // dolauli for directives with transform are implemented in autorest core for (const directive of directives.filter(each => !each.transform)) { const getPatternToMatch = (selector: string | undefined): RegExp | undefined => { return selector ? !hasSpecialChars(selector) ? new RegExp(`^${selector}$`, 'gi') : new RegExp(selector, 'gi') : undefined; @@ -423,38 +429,39 @@ async function tweakModel(state: State): Promise { const suppressFormat = directive.set['suppress-format']; // select all models - let models = values(state.model.schemas).toArray(); + let models = [...state.model.schemas.objects ?? []]; + // let models = values(state.model.schemas).toArray(); if (modelNameRegex) { models = values(models) .where(model => - !!`${model.details.csharp.name}`.match(modelNameRegex)) + !!`${model.language.csharp?.name}`.match(modelNameRegex)) .toArray(); } if (modelFullNameRegex) { models = values(models) .where(model => - !!`${model.details.csharp.fullname}`.match(modelFullNameRegex)) + !!`${model.language.csharp?.fullname}`.match(modelFullNameRegex)) .toArray(); } if (modelNamespaceRegex) { models = values(models) .where(model => - !!`${model.details.csharp.namespace}`.match(modelNamespaceRegex)) + !!`${model.language.csharp?.namespace}`.match(modelNamespaceRegex)) .toArray(); } if (propertyNameRegex && selectType === 'model') { models = values(models) - .where(model => values(allVirtualProperties(model.details.csharp.virtualProperties)) + .where(model => values(allVirtualProperties(model.language.csharp?.virtualProperties)) .any(property => !!`${property.name}`.match(propertyNameRegex))) .toArray(); } if (propertyNameRegex && (selectType === undefined || selectType === 'property')) { const properties = values(models) - .selectMany(model => allVirtualProperties(model.details.csharp.virtualProperties)) + .selectMany(model => allVirtualProperties(model.language.csharp?.virtualProperties)) .where(property => !!`${property.name}`.match(propertyNameRegex)) .toArray(); for (const property of values(properties)) { @@ -475,12 +482,12 @@ async function tweakModel(state: State): Promise { } else if (models) { for (const model of values(models)) { - if (suppressFormat) { - model.details.csharp.suppressFormat = true; + if (suppressFormat && model.language.csharp) { + model.language.csharp.suppressFormat = true; } if (formatTable !== undefined && !suppressFormat) { - const properties = allVirtualProperties(model.details.csharp.virtualProperties); + const properties = allVirtualProperties(model.language.csharp?.virtualProperties); const propertiesToExclude = formatTable['exclude-properties']; const propertiesToInclude = formatTable.properties; const labels = formatTable.labels; @@ -547,10 +554,12 @@ async function tweakModel(state: State): Promise { } } - const prevName = model.details.csharp.name; - model.details.csharp.name = modelNameReplacer ? modelNameRegex ? model.details.csharp.name.replace(modelNameRegex, modelNameReplacer) : modelNameReplacer : model.details.csharp.name; + const prevName = model.language.csharp?.name; + if (model.language.csharp) { + model.language.csharp.name = modelNameReplacer ? modelNameRegex ? model.language.csharp.name.replace(modelNameRegex, modelNameReplacer) : modelNameReplacer : model.language.csharp.name; + } state.message({ - Channel: Channel.Debug, Text: `[DIRECTIVE] Changed model-name from ${prevName} to ${model.details.csharp.name}.` + Channel: Channel.Debug, Text: `[DIRECTIVE] Changed model-name from ${prevName} to ${model.language.csharp?.name}.` }); } } @@ -565,40 +574,43 @@ async function tweakModel(state: State): Promise { const enumNameReplacer = directive.set['enum-name']; const enumValueNameReplacer = directive.set['enum-value-name']; - let enums = values(state.model.schemas) - .where(each => each.details.csharp.enum !== undefined) - .toArray(); + let enums = [...state.model.schemas.sealedChoices ?? [], ...state.model.schemas.choices ?? []]; + // let enums = values(state.model.schemas) + // .where(each => each.details.csharp.enum !== undefined) + // .toArray(); if (enumNameRegex) { enums = values(enums) - .where(each => !!`${each.details.csharp.name}`.match(enumNameRegex)) + .where(each => !!`${each.language.csharp?.name}`.match(enumNameRegex)) .toArray(); } if (enumValueNameRegex) { const enumsValues = values(enums) - .selectMany(each => each.details.csharp.enum ? each.details.csharp.enum.values : []) - .where(each => !!`${each.name}`.match(enumValueNameRegex)) + .selectMany(each => each.language.csharp?.enum ? each.language.csharp.enum.values : []) + .where(each => !!`${(each).name}`.match(enumValueNameRegex)) .toArray(); for (const enumValue of values(enumsValues)) { - const prevName = enumValue.name; - enumValue.name = enumValueNameReplacer ? enumNameRegex ? enumValue.name.replace(enumValueNameRegex, enumValueNameReplacer) : enumValueNameReplacer : prevName; + const prevName = (enumValue).name; + (enumValue).name = enumValueNameReplacer ? enumNameRegex ? (enumValue).name.replace(enumValueNameRegex, enumValueNameReplacer) : enumValueNameReplacer : prevName; if (enumValueNameRegex) { const enumNames = values(enums) - .select(each => each.details.csharp.name) + .select(each => each.language.csharp?.name) .toArray(); state.message({ - Channel: Channel.Debug, Text: `[DIRECTIVE] Changed enum-value-name from ${prevName} to ${enumValue.name}. Enum: ${JSON.stringify(enumNames, null, 2)}` + Channel: Channel.Debug, Text: `[DIRECTIVE] Changed enum-value-name from ${prevName} to ${(enumValue).name}. Enum: ${JSON.stringify(enumNames, null, 2)}` }); } } } else { for (const each of values(enums)) { - const prevName = each.details.csharp.name; - each.details.csharp.name = enumNameReplacer ? enumNameRegex ? each.details.csharp.name.replace(enumNameRegex, enumNameReplacer) : enumNameReplacer : prevName; - state.message({ - Channel: Channel.Debug, Text: `[DIRECTIVE] Changed enum-name from ${prevName} to ${each.details.csharp.name}.` - }); + const prevName = each.language.csharp?.name ?? ''; + if (each.language.csharp) { + each.language.csharp.name = enumNameReplacer ? enumNameRegex ? each.language.csharp.name.replace(enumNameRegex, enumNameReplacer) : enumNameReplacer : prevName; + state.message({ + Channel: Channel.Debug, Text: `[DIRECTIVE] Changed enum-name from ${prevName} to ${each.language.csharp?.name}.` + }); + } } } @@ -688,12 +700,16 @@ async function tweakModel(state: State): Promise { return state.model; } -export async function applyModifiers(service: Host) { +export async function applyModifiersV2(service: Host) { + // dolauli implement directives const allDirectives = await service.GetValue('directive'); directives = values(allDirectives) // .select(directive => directive) .where(directive => isWhereCommandDirective(directive) || isWhereModelDirective(directive) || isWhereEnumDirective(directive) || isRemoveCommandDirective(directive)) .toArray(); - return processCodeModel(tweakModel, service, 'modifiers'); + const state = await new ModelState(service).init(); + const result = await tweakModel(state); + + await service.WriteFile('code-model-v4-modifiers-v2.yaml', serialize(result), undefined, 'code-model-v4'); } diff --git a/powershell/plugins/plugin-create-inline-properties.ts b/powershell/plugins/plugin-create-inline-properties.ts new file mode 100644 index 00000000000..223d9c5f18a --- /dev/null +++ b/powershell/plugins/plugin-create-inline-properties.ts @@ -0,0 +1,403 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { codeModelSchema, CodeModel, ObjectSchema, ConstantSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, ImplementationLocation, OperationGroup, Request, SchemaContext } from '@azure-tools/codemodel'; +//import { VirtualParameter } from '@azure-tools/codemodel-v3'; +import { getPascalIdentifier, removeSequentialDuplicates, pascalCase, fixLeadingNumber, deconstruct, selectName, EnglishPluralizationService, serialize } from '@azure-tools/codegen'; +import { length, values, } from '@azure-tools/linq'; +import { Host, Session, startSession } from '@azure-tools/autorest-extension-base'; +//import { CommandOperation } from '@azure-tools/codemodel-v3/dist/code-model/command-operation'; +import { CommandOperation } from '../utils/command-operation'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { VirtualParameter } from '../utils/command-operation'; +import { VirtualProperty, getAllProperties, getAllPublicVirtualProperties } from '../utils/schema'; +import { resolveParameterNames } from '../utils/resolve-conflicts'; + +function getPluralizationService(): EnglishPluralizationService { + const result = new EnglishPluralizationService(); + result.addWord('Database', 'Databases'); + result.addWord('database', 'databases'); + return result; +} + +type State = ModelState; + +export function singularize(word: string): string { + return getPluralizationService().singularize(word); +} + +function getNameOptions(typeName: string, components: Array) { + const result = new Set(); + + // add a variant for each incrementally inclusive parent naming scheme. + for (let i = 0; i < length(components); i++) { + const subset = pascalCase([...removeSequentialDuplicates(components.slice(-1 * i, length(components)))]); + result.add(subset); + } + + // add a second-to-last-ditch option as . + result.add(pascalCase([...removeSequentialDuplicates([...fixLeadingNumber(deconstruct(typeName)), ...deconstruct(components.last)])])); + return [...result.values()]; +} + + +function createVirtualProperties(schema: ObjectSchema, stack: Array, threshold: number, conflicts: Array) { + // dolauli + // owned: all properties(obj & nonobj) in the schema, + // inherited: Properties from parents, + // inlined: for obj properties, flatten them to children, + // did we already inline this object + if (schema.language.default.inline === 'yes') { + return true; + } + + if (schema.language.default.inline === 'no') { + return false; + } + + // this is bad. This would happen when we have a circular reference in the tree. + // dolauli curious in which case this will happen, got it to use no-inline to skip inline and avoid circular reference + if (schema.language.default.inline === 'inprogress') { + let text = (`Note: during processing of '${schema.language.default.name}' a circular reference has been discovered.`); + text += '\n In order to proceed, you must add a directive to indicate which model you want to not inline.\n'; + text += '\ndirective:'; + text += '\n- no-inline: # choose ONE of these models to disable inlining'; + for (const each of stack) { + text += (`\n - ${each} `); + } + text += '\n'; + conflicts.push(text); + /* `directive: + - no-inline: + - MyModel + - YourModel + - HerModel + ` */ + + // `, and we're skipping inlining.\n ${stack.join(' => ')}`); + // mark it as 'not-inlining' + schema.language.default.inline = 'no'; + return false; + } + + // ok, set to in progress now. + schema.language.default.inline = 'inprogress'; + + // virutual property set. + const virtualProperties = schema.language.default.virtualProperties = { + owned: new Array(), + inherited: new Array(), + inlined: new Array(), + }; + + // First we should run thru the properties in parent classes and create inliners for each property they have. + // dolauli handle properties in parents + for (const parentSchema of values(schema.parents?.immediate)) { + // make sure that the parent is done. + + // Guess parent should always be an object. + if (!isObjectSchema(parentSchema)) + continue; + + createVirtualProperties(parentSchema, [...stack, `${schema.language.default.name}`], threshold, conflicts); + + const parentProperties = parentSchema.language.default.virtualProperties || { + owned: [], + inherited: [], + inlined: [], + }; + + // now we go thru the parent's virutal properties and create our own copies + for (const virtualProperty of [...parentProperties.inherited, ...parentProperties.inlined, ...parentProperties.owned]) { + // make sure that we have a list of shared owners of this property. + virtualProperty.sharedWith = virtualProperty.sharedWith || [virtualProperty]; + + // we are just copying over theirs to ours. + const inheritedProperty = { + name: virtualProperty.name, + property: virtualProperty.property, + private: virtualProperty.private, + nameComponents: virtualProperty.nameComponents, + nameOptions: virtualProperty.nameOptions, + accessViaProperty: virtualProperty, + accessViaMember: virtualProperty, + accessViaSchema: parentSchema, + originalContainingSchema: virtualProperty.originalContainingSchema, + description: virtualProperty.description, + alias: [], + required: virtualProperty.required || !!values(>virtualProperty.originalContainingSchema.properties).first(each => !!each && !!each.required && !!each.serializedName && each.serializedName.toLowerCase() === virtualProperty.property.language.default.name.toLowerCase()), + sharedWith: virtualProperty.sharedWith, + }; + // add it to the list of virtual properties that share this property. + virtualProperty.sharedWith.push(inheritedProperty); + + // add it to this class. + virtualProperties.inherited.push(inheritedProperty); + } + } + + // dolauli figure out object properties and non object properties in this class + const [objectProperties, nonObjectProperties] = values(schema.properties).bifurcate(each => + !schema.language.default['skip-inline'] && // if this schema is marked skip-inline, none can be inlined, treat them all as straight properties. + !each.schema.language.default['skip-inline'] && // if the property schema is marked skip-inline, then it should not be processed either. + each.schema.type === SchemaType.Object && // is it an object + getAllProperties(each.schema).length > 0 // does it have properties (or inherit properties) + ); + + // run thru the properties in this class. + // dolauli handle properties in this class + for (const property of objectProperties) { + const propertyName = property.language.default.name; + + // for each object member, make sure that it's inlined it's children that it can. + createVirtualProperties(property.schema, [...stack, `${schema.language.default.name}`], threshold, conflicts); + + // this happens if there is a circular reference. + // this means that this class should not attempt any inlining of that property at all . + // dolauli pay attention to the condition check + const isDict = property.schema.type === SchemaType.Dictionary || (property.schema).parents?.immediate?.find((s) => s.type === SchemaType.Dictionary); + const canInline = + (!property.schema.language.default['skip-inline']) && + (!property.schema.language.default.byReference) && + (!isDict) && + (property.schema).language.default.inline === 'yes'; + + // the target has properties that we can inline + const virtualChildProperties = property.schema.language.default.virtualProperties || { + owned: [], + inherited: [], + inlined: [], + }; + + const allNotRequired = values(getAllPublicVirtualProperties()).all(each => !each.property.language.default.required); + + const childCount = length(virtualChildProperties.owned) + length(virtualChildProperties.inherited) + length(virtualChildProperties.inlined); + if (canInline && (property.language.default.required || allNotRequired) && (childCount < threshold || propertyName === 'properties')) { + + + // if the child property is low enough (or it's 'properties'), let's create virtual properties for each one. + // create a private property for the inlined ones to use. + const privateProperty = { + name: getPascalIdentifier(propertyName), + propertySchema: schema, + property, + nameComponents: [getPascalIdentifier(propertyName)], + nameOptions: getNameOptions(schema.language.default.name, [propertyName]), + private: true, + description: property.summary || '', + originalContainingSchema: schema, + alias: [], + required: property.required || property.language.default.required, + }; + virtualProperties.owned.push(privateProperty); + + for (const inlinedProperty of [...virtualChildProperties.inherited, ...virtualChildProperties.owned]) { + // child properties are be inlined without prefixing the name with the property name + // unless there is a collision, in which case, we have to resolve + + // (scan back from the far right) + // deeper child properties should be inlined with their parent's name + // ie, this.[properties].owner.name should be this.ownerName + + const proposedName = getPascalIdentifier(`${propertyName === 'properties' || /*objectProperties.length === 1*/ propertyName === 'error' ? '' : pascalCase(fixLeadingNumber(deconstruct(propertyName)).map(each => singularize(each)))} ${inlinedProperty.name}`); + + const components = [...removeSequentialDuplicates([propertyName, ...inlinedProperty.nameComponents])]; + virtualProperties.inlined.push({ + name: proposedName, + property: inlinedProperty.property, + private: inlinedProperty.private, + nameComponents: components, + nameOptions: getNameOptions(inlinedProperty.property.schema.language.default.name, components), + accessViaProperty: privateProperty, + accessViaMember: inlinedProperty, + accessViaSchema: schema, + originalContainingSchema: schema, + description: inlinedProperty.description, + alias: [], + required: inlinedProperty.required && privateProperty.required, + }); + } + + + for (const inlinedProperty of [...virtualChildProperties.inlined]) { + // child properties are be inlined without prefixing the name with the property name + // unless there is a collision, in which case, we have to resolve + + // (scan back from the far right) + // deeper child properties should be inlined with their parent's name + // ie, this.[properties].owner.name should be this.ownerName + + + const proposedName = getPascalIdentifier(inlinedProperty.name); + const components = [...removeSequentialDuplicates([propertyName, ...inlinedProperty.nameComponents])]; + virtualProperties.inlined.push({ + name: proposedName, + property: inlinedProperty.property, + private: inlinedProperty.private, + nameComponents: components, + nameOptions: getNameOptions(inlinedProperty.property.schema.language.default.name, components), + accessViaProperty: privateProperty, + accessViaMember: inlinedProperty, + accessViaSchema: schema, + originalContainingSchema: schema, + description: inlinedProperty.description, + alias: [], + required: inlinedProperty.required && privateProperty.required + }); + } + } else { + // otherwise, we're not below the threshold, and we should treat this as a non-inlined property + nonObjectProperties.push(property); + } + } + + for (const property of nonObjectProperties) { + const name = getPascalIdentifier(property.language.default.name); + // this is not something that has properties, + // so we don't need to do any inlining + // however, we can add it to our list of virtual properties + // so that our consumers can get it. + virtualProperties.owned.push({ + name, + property, + nameComponents: [name], + nameOptions: [name], + description: property.summary || '', + originalContainingSchema: schema, + alias: [], + required: property.required || property.language.default.required + }); + } + + // resolve name collisions. + const allProps = [...virtualProperties.owned, ...virtualProperties.inherited, ...virtualProperties.inlined]; + const inlined = new Map(); + + for (const each of allProps) { + // track number of instances of a given name. + inlined.set(each.name, (inlined.get(each.name) || 0) + 1); + } + + const usedNames = new Set(inlined.keys()); + for (const each of virtualProperties.inlined.sort((a, b) => length(a.nameOptions) - length(b.nameOptions))) { + const ct = inlined.get(each.name); + if (ct && ct > 1) { + // console.error(`Fixing collision on name ${each.name} #${ct} `); + each.name = selectName(each.nameOptions, usedNames); + } + } + schema.language.default.inline = 'yes'; + return true; +} + +function createVirtualParameters(operation: CommandOperation) { + // dolauli expand body parameter + // for virtual parameters, there are two keys, operation and body + const virtualParameters = { + operation: new Array(), + body: new Array() + }; + + const dropBodyParameter = !!operation.details.default.dropBodyParameter; + // loop thru the parameters of the command operation, and if there is a body parameter, expand it if necessary. + for (const parameter of values(operation.parameters)) { + if (parameter.details.default.constantValue) { + // this parameter has a constant value -- SKIP IT + continue; + } + // dolauli fromhost and apiversion are not exposed, this if block looks useless + if (parameter.details.default.fromHost || parameter.details.default.apiversion) { + // handled in the generator right now. Not exposed to the user directly. + continue; + } + + if (dropBodyParameter && parameter.details.default.isBodyParameter) { + // the client will make a hidden body parameter for this, and we're expected to fill it. + const vps = parameter.schema.language.default.virtualProperties; + if (vps) { + for (const virtualProperty of [...vps.inherited, ...vps.owned, ...vps.inlined]) { + // dolauli add virtual parameter for virtual property + if (virtualProperty.private || virtualProperty.property.readOnly || virtualProperty.property.language.default.constantValue !== undefined || virtualProperty.property.language.default.HeaderProperty === 'Header') { + // private or readonly properties aren't needed as parameters. + continue; + } + virtualParameters.body.push({ + name: virtualProperty.name, + description: virtualProperty.property.language.default.description, + nameOptions: virtualProperty.nameOptions, + required: virtualProperty.required, + schema: virtualProperty.property.schema, + origin: virtualProperty, + alias: [] + }); + } + } + } else { + // dolauli if not drop body or not body parameter add it to operation + virtualParameters.operation.push({ + name: parameter.details.default.name, + nameOptions: [parameter.details.default.name], + description: parameter.details.default.description, + required: parameter.details.default.isBodyParameter ? true : parameter.required, + schema: parameter.schema, + origin: parameter, + alias: [] + }); + } + } + + resolveParameterNames([], virtualParameters); + + // dolauli see operation.details.default.virtualParameters + operation.details.default.virtualParameters = virtualParameters; +} + + +async function createVirtuals(state: State): Promise { + /* + A model class should provide inlined properties for anything in a property called properties + + Classes that have $THRESHOLD number of properties should be inlined. + + Individual models can change the $THRESHOLD for generate + */ + const threshold = await state.getValue('inlining-threshold', 24); + const conflicts = new Array(); + + for (const schema of values(state.model.schemas.objects)) { + // did we already inline this objecct + if (schema.language.default.inlined) { + continue; + } + // we have an object, let's process it. + + createVirtualProperties(schema, new Array(), threshold, conflicts); + + } + if (length(conflicts) > 0) { + // dolauli need to figure out how inline-properties is used in readme.md + state.error('You have one or more circular references in your model, you must add configuration entries to specify which models won\'t be inlined.', ['inline-properties']); + for (const each of conflicts) { + state.error(each, ['circular reference']); + } + throw new Error('Circular references exists, must mark models as `no-inline`'); + } + //dolauli update operations under commands + for (const operation of values(state.model.commands.operations)) { + createVirtualParameters(operation); + } + + return state.model; +} + + +export async function createInlinedPropertiesPlugin(service: Host) { + //const session = await startSession(service, {}, codeModelSchema); + //const result = tweakModelV2(session); + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-create-virtual-properties-v2.yaml', serialize(await createVirtuals(state)), undefined, 'code-model-v4'); + //return processCodeModel(createVirtuals, service, 'create-virtual-properties-v2'); +} diff --git a/powershell/plugins/plugin-tweak-m4-model.ts b/powershell/plugins/plugin-tweak-m4-model.ts new file mode 100644 index 00000000000..858c6f5f0ea --- /dev/null +++ b/powershell/plugins/plugin-tweak-m4-model.ts @@ -0,0 +1,176 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ +import { ArraySchema, CodeModel, DictionarySchema, getAllProperties, HttpHeader, ObjectSchema, Property, Schema, SchemaType } from '@azure-tools/codemodel'; +import { serialize } from '@azure-tools/codegen'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { StatusCodes } from '../utils/http-definitions'; +import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; + +import { Host } from '@azure-tools/autorest-extension-base'; + +type State = ModelState; + +let directives: Array = []; + +async function tweakModel(state: State): Promise { + const model = state.model; + + addResponseHeaderSchema(model); + + addDictionaryApiVersion(model); + + removeM4DefaultDescription(model); + + handleNoinlineDirective(state); + + return model; +} + +function handleNoinlineDirective(state: State) { + var inlineModels: string[] = new Array; + for (const directive of directives.filter(each => each['no-inline'])) { + inlineModels = inlineModels.concat(>values(directive['no-inline']).toArray()); + } + for (const model of state.model.schemas.objects || []) { + if (inlineModels.includes(model.language.default.name)) { + model.language.default['skip-inline'] = true; + } + } +} +function addResponseHeaderSchema(model: CodeModel) { + // In remodeler, each operations response headers will has its own scheam. Each header will be schema's property. + // But in m4, if 'schema' is not explicitly defined, even 'headers' is specified, there won't be a schema for headers. + // To keep backward compatiable, we will create headers schema here + + model.operationGroups.forEach((group) => { + group.operations?.forEach((op) => { + if (!op.responses) { + return; + } + op.responses.forEach((resp) => { + if ((resp).schema) { + return; + } + + const headers = resp.protocol.http?.headers as Array; + if (headers === undefined) { + return; + } + + const responseCode = resp.protocol.http?.statusCodes?.[0]; + if (responseCode === undefined) { + return; + } + + // Follow naming pattern in m3 + const code = ((StatusCodes)[responseCode] || '') || responseCode; + const schemaName = `${group.language.default.name}_${op.language.default.name} ${code} ResponseHeaders`; + + const newSchema = model.schemas.objects?.find((schema) => schema.language.default.name === schemaName) || + new ObjectSchema(schemaName, ''); + newSchema.language.default.isHeaderModel = true; + + if (!model.schemas.objects) { + model.schemas.objects = []; + } + model.schemas.objects.push(newSchema); + + headers.forEach((head) => { + // We lost description and x-ms-client-name in m4. So newProp's description is empty and use header as serializedName + const newProp = new Property(head.header, '', head.schema, { + readOnly: false, + required: false, + serializedName: head.header + }); + newProp.language.default.HeaderProperty = 'Header'; + + if (!newSchema.properties) { + newSchema.properties = []; + } + newSchema.properties.push(newProp); + }); + + // Set response header use new schema + resp.language.default.headerSchema = newSchema; + }); + }); + }); +} + +function addDictionaryApiVersion(model: CodeModel) { + + model.schemas.dictionaries?.forEach((schema) => { + if (schema.apiVersions) { + return; + } + if (schema.elementType && schema.elementType.apiVersions) { + schema.apiVersions = JSON.parse(JSON.stringify(schema.elementType.apiVersions)); + } + }) + + // If we cannot find api version from element type, try to get it from object schema who refers the dict or any. + + model.schemas.objects?.forEach((schema) => { + if (!schema.apiVersions) { + return; + } + + for (const prop of getAllProperties(schema)) { + if (prop.schema.type !== SchemaType.Dictionary || prop.schema.apiVersions) { + continue; + } + prop.schema.apiVersions = JSON.parse(JSON.stringify(schema.apiVersions)); + } + }) +} + +function removeM4DefaultDescription(model: CodeModel) { + // For dictionary and arrya schema and property, if there is no description assigned, m4 will set a default description like: Dictionary of or Array of + // To keep same action as m3, we will set it to empty string + + const visited = new Set(); + [...model.schemas.objects ?? [], ...model.schemas.dictionaries ?? [], ...model.schemas.arrays ?? []].forEach((schema) => { + recursiveRemoveM4DefaultDescription(schema, visited); + }) +} + +function recursiveRemoveM4DefaultDescription(schema: Schema, visited: Set) { + if (visited.has(schema) || (schema.type !== SchemaType.Object && schema.type !== SchemaType.Dictionary && schema.type !== SchemaType.Array)) { + return; + } + // Default description pattern in m4 + const defaultDictDescPattern = /Dictionary of <.?>$/; + const defaultArrayDescPattern = /Array of .?$/; + visited.add(schema); + if (schema.type === SchemaType.Dictionary) { + const dictSchema = schema as DictionarySchema; + recursiveRemoveM4DefaultDescription(dictSchema.elementType, visited); + if (defaultDictDescPattern.test(dictSchema.language.default.description)) { + dictSchema.language.default.description = ''; + } + } else if (schema.type === SchemaType.Array) { + const arrSchema = schema as ArraySchema; + recursiveRemoveM4DefaultDescription(arrSchema.elementType, visited); + if (defaultArrayDescPattern.test(schema.language.default.description)) { + schema.language.default.description = ''; + } + } else if (schema.type === SchemaType.Object) { + const objSchema = schema as ObjectSchema; + for (const prop of getAllProperties(objSchema)) { + recursiveRemoveM4DefaultDescription(prop.schema, visited); + if (prop.schema.type === SchemaType.Dictionary && (defaultDictDescPattern.test(prop.language.default.description) || defaultArrayDescPattern.test(prop.language.default.description))) { + prop.language.default.description = ''; + } + } + } +} + +export async function tweakM4ModelPlugin(service: Host) { + const allDirectives = await service.GetValue('directive'); + directives = values(allDirectives).toArray(); + const state = await new ModelState(service).init(); + service.WriteFile('code-model-v4-tweakm4codemodel.yaml', serialize(await tweakModel(state)), undefined, 'code-model-v4'); +} diff --git a/powershell/plugins/plugin-tweak-model-azure-v2.ts b/powershell/plugins/plugin-tweak-model-azure-v2.ts new file mode 100644 index 00000000000..50f2ab18828 --- /dev/null +++ b/powershell/plugins/plugin-tweak-model-azure-v2.ts @@ -0,0 +1,202 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + + +import { keys, length, values } from '@azure-tools/linq'; + +import { Channel, Host } from '@azure-tools/autorest-extension-base'; +import { ModelState } from '../utils/model-state'; +import { PwshModel } from '../utils/PwshModel'; +import { getAllProperties, ObjectSchema, Response, SchemaType, Schema } from '@azure-tools/codemodel'; +import { serialize } from '@azure-tools/codegen'; +type State = ModelState; + +const xmsPageable = 'x-ms-pageable'; + +async function tweakModel(state: State): Promise { + const model = state.model; + + // service.Message({ Channel: Channel.Debug, Text: "THIS IS THE AZURE TWEAKER" }); + + // TODO: + // look at models, and extract out any case that has an IRESOURCE, IPROXYRESOURCE, etc. + // and use the common versions of those models. + + // Is the result marked x-ms-pagable? + // identify the next link (null means just get the results as an array) + // if nextLinkName is null, then it won't actually page, but we'd like to unroll the contents anyway. + for (const group of values(model.operationGroups)) { + for (const operation of values(group.operations)) { + if (operation.extensions && operation.extensions[xmsPageable]) { + // it's marked pagable. + operation.language.default.pageable = { + responseType: 'pageable', + nextLinkName: operation.extensions[xmsPageable].nextLinkName || undefined, + itemName: operation.extensions[xmsPageable].itemName || 'value', + operationName: operation.extensions[xmsPageable].operationName || `${operation.language.default.name}Next`, + }; + continue; + } + + // let's just check to see if it looks like it's supposed to be a collection + for (const response of values(operation.responses)) { + // does the response have a schema? + // TODO: check schema + if (getSchema(response)) { + const schema = getSchema(response); + + // is this just an array response? + if (schema.type === SchemaType.Array) { + operation.language.default.pageable = { + responseType: 'array', + }; + continue; + } + + // if it returns an object, let's see what's inside... + if (schema.type === SchemaType.Object) { + const objSchema = schema as ObjectSchema; + + // does it have a single member that is an array (ie, value : [...]) + if (length(objSchema.properties) === 1 && !objSchema.parents) { + const property = objSchema.properties?.[0]; + if (property) { + if (property.schema.type === SchemaType.Array) { + // nested array! + operation.language.default.pageable = { + responseType: 'nested-array', + itemName: property.serializedName, + }; + } + continue; + } + } + + // xichen: If response schema has only 2 properties and one of it is nextLink, the other is array + + // does it kinda look like a x-ms-pagable (value/nextlink?) + if (length(objSchema.properties) === 2 && !objSchema.parents) { + const hasNextLink = objSchema.properties?.some((prop) => prop.serializedName === 'nextLink'); + if (hasNextLink) { + const property = objSchema.properties?.find((prop) => prop.serializedName !== 'nextLink'); + if (property) { + if (property.schema.type === SchemaType.Array) { + // nested array! + operation.language.default.pageable = { + responseType: 'nested-array', + itemName: property.serializedName, + nextLinkName: 'nextLink' + }; + } + continue; + } + } + + } + + } + } + } + } + } + + + // make sure that all operations with lro have an options block. + for (const group of values(model.operationGroups)) { + for (const operation of values(group.operations)) { + if (operation.extensions && operation.extensions['x-ms-long-running-operation']) { + operation.language.default.asjob = true; + + operation.language.default.lro = operation.extensions['x-ms-long-running-operation-options'] || { + 'final-state-via': 'default' + }; + + // LRO 201 and 202 responses are handled internally, so remove any 201/202 responses in the operation + operation.responses = (>(operation.responses)).filter(each => each.protocol.http?.statusCodes[0] !== '201' && each.protocol.http?.statusCodes[0] !== '202'); + //delete operation.responses['201']; + //delete operation.responses['202']; + } + } + } + + // xichen: Cannot find 'x-ms-metadata' from swagger repo. Are we still using it? + + // Api Version parameter handling for Azure. + // if there is only a single api-version for the operation, let's just make it a constant + // otherwise, we need to make it selectable, but default to the 'latest' version there is. + for (const group of values(model.operationGroups)) { + for (const operation of values(group.operations)) { + const apiVersions = operation.apiVersions; + for (const parameter of values(operation.parameters)) { + + if (parameter.language.default.serializedName === 'api-version') { + // only set it if it hasn't been set yet. + // if (parameter.details.default.constantValue) { + //continue; + //} + + if (apiVersions) { + // set the constant value to the first one + if (length(apiVersions) === 1) { + parameter.language.default.constantValue = apiVersions[0].version; + continue; + } + + // otherwise, the parameter can't have a constant value + parameter.language.default.constantValue = undefined; + + // mark it so that we can add profile support in the method generation + parameter.language.default.apiversion = true; + } + } + } + } + } + + // when make-sub-resources-byreference is specified, mark models with a writable id as byref. + if (await state.getValue('azure', false) && await state.getValue('make-sub-resources-byreference', false)) { + + for (const schema of values(model.schemas.objects ?? [])) { + // find schemas that have an 'id' and are not readonly + if (values(getAllProperties(schema)).any(prop => prop.serializedName === 'id' && !prop.language.default.readOnly)) { + + // look thru the operations, and the PUT methods + for (const group of model.operationGroups) { + for (const op of values(group.operations)) { + for (const request of op.requests ?? []) { + + if (request.protocol.http?.method === 'put') { + for (const response of op.responses ?? []) { + // see if any of the responses have the same schema as we are looking for + if (getSchema(response) === schema) { + // tell it not to inline that + schema.language.default.byReference = true; + break; + } + } + break; + } + + } + } + } + } + } + } + + return model; +} + +function getSchema(response: Response): Schema { + return (response).schema; +} + +// Azure version - +// Additional tweaks the code model to adjust things so that the code will generate better. + +export async function tweakModelAzurePluginV2(service: Host) { + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-tweakcodemodelazure-v2.yaml', serialize(await tweakModel(state)), undefined, 'code-model-v4'); +} diff --git a/powershell/plugins/plugin-tweak-model.ts b/powershell/plugins/plugin-tweak-model.ts new file mode 100644 index 00000000000..837d3f32c83 --- /dev/null +++ b/powershell/plugins/plugin-tweak-model.ts @@ -0,0 +1,642 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ +import { Property, SealedChoiceSchema, codeModelSchema, CodeModel, StringSchema, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, VirtualParameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext, ChoiceSchema, Scheme, Schema, ConstantSchema, ConditionalValue } from '@azure-tools/codemodel'; +import { pascalCase, deconstruct, fixLeadingNumber, serialize, KnownMediaType } from '@azure-tools/codegen'; +import { items, keys, values, Dictionary, length } from '@azure-tools/linq'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; + +import { Channel, Host, Session, startSession } from '@azure-tools/autorest-extension-base'; +import { defaultCipherList } from 'constants'; +import { String } from '../llcsharp/schema/string'; +import { JsonType } from '../utils/schema'; + +export const HeaderProperty = 'HeaderProperty'; +export enum HeaderPropertyType { + Header = 'Header', + HeaderAndBody = 'HeaderAndBody' +} +type State = ModelState; + + +// For now, we are not dynamically changing the service-name. Instead, we would figure out a method to change it during the creation of service readme's. +export function titleToAzureServiceName(title: string): string { + const titleCamelCase = pascalCase(deconstruct(title)).trim(); + const serviceName = titleCamelCase + // Remove: !StartsWith(Management)AndContains(Management), Client, Azure, Microsoft, APIs, API, REST + .replace(/(?!^Management)(?=.*)Management|Client|Azure|Microsoft|APIs|API|REST/g, '') + // Remove: EndsWith(ServiceResourceProvider), EndsWith(ResourceProvider), EndsWith(DataPlane), EndsWith(Data) + .replace(/ServiceResourceProvider$|ResourceProvider$|DataPlane$|Data$/g, ''); + return serviceName || titleCamelCase; +} + + +function dropDuplicatePropertiesInChildSchemas(schema: ObjectSchema, state: State, map: Map = new Map()) { + let success = true; + for (const parent of values(schema.parents?.immediate)) { + //handle parents first + if (!dropDuplicatePropertiesInChildSchemas(parent, state, map)) { + return false; + } + } + for (const { key: id, value: property } of items(schema.properties)) { + //see if it's in the parent. + const pProp = map.get(property.serializedName); + if (pProp) { + //if the parent prop is the same type as the child prop + //we're going to drop the child property. + if (pProp.schema.type === property.schema.type) { + //if it's an object type, it has to be the exact same schema type too + if (pProp.schema.type != SchemaType.Object || pProp.schema === property.schema) { + state.verbose(`Property '${property.serializedName}' in '${schema.language.default.name}' has a property the same as the parent, and is dropping the duplicate.`, {}); + if (schema.properties) { + delete schema.properties[id]; + } + } else { + const conflict = `Property '${property.serializedName}' in '${schema.language.default.name}' has a conflict with a parent schema (allOf ${schema.parents?.immediate.joinWith(each => each.language.default.name)}.`; + state.error(conflict, [], {}); + success = false; + } + } + } + else { + map.set(property.serializedName, property); + } + } + return success; +} + +async function tweakModelV2(state: State): Promise { + const title = pascalCase(fixLeadingNumber(deconstruct(await state.getValue('title', state.model.info.title)))); + state.setValue('title', title); + + const serviceName = await state.getValue('service-name', titleToAzureServiceName(title)); + state.setValue('service-name', serviceName); + + const model = state.model; + const schemas = model.schemas; + // xichen: do we need other schema types? + const allSchemas: Schema[] = [...schemas.objects ?? [], ...schemas.choices ?? [], ...schemas.sealedChoices ?? []]; + + model.commands = { + operations: new Dictionary(), + parameters: new Dictionary(), + }; + + // we're going to create a schema that represents the distinct sum + // of all operation PATH parameters + const universalId = new ObjectSchema(`${serviceName}Identity`, ''); + // xichen: Add 'universal-parameter-type' in language.default.uid, so that we can find it later + universalId.language.default.uid = 'universal-parameter-type'; + universalId.apiVersions = universalId.apiVersions || []; + state.model.schemas.objects = state.model.schemas.objects || []; + (universalId.language.default).uid = 'universal-parameter-type'; + state.model.schemas.objects.push(universalId); + + for (const group of values(model.operationGroups)) { + for (const operation of values(group.operations)) { + for (const param of values(operation.parameters).where(each => each.protocol?.http?.in === ParameterLocation.Path)) { + const name = param.language.default.name; + const hasName = universalId.properties?.find((prop) => prop.language.default.name.toLocaleLowerCase() === name.toLocaleLowerCase()); + if (!hasName) { + if (!universalId.properties) { + universalId.properties = []; + } + const newProp = new Property(name, param.language.default.description, param.schema); + newProp.required = false; + newProp.readOnly = false; + newProp.serializedName = param.language.default.serializedName; + universalId.properties.push(newProp); + } + } + } + } + + if (await state.getValue('azure', false)) { + const idScheam = new Schema('_identity_type_', 'Resource identity path', SchemaType.String); + const idProp = new Property('id', 'Resource identity path', idScheam); + idProp.readOnly = false; + idProp.required = false; + idProp.language.default.uid = 'universal-parameter:resource identity'; + if (!universalId.properties) { + universalId.properties = []; + } + universalId.properties.push(idProp); + } + + // xichen: do nothing in m3 logic. Comment it out + // if an operation has a response that has a schema with string/binary we should make the response application/octet-stream + // for (const operationGroups of values(model.operationGroups)) { + // for (const operation of values(operationGroups.operations)) { + // for (const response of values(operation.responses)) { + // if ((response as any).schema) { + // const respSchema = response as any; + // if (respSchema.type === SchemaType.String && respSchema.format === StringFormat.Binary) { + // // WHY WAS THIS HERE?! + // // response.mimeTypes = [KnownMediaType.Stream]; + // } + // } + // } + // } + // } + + // schemas that have parents and implement properties that are in the parent schemas + // will have the property dropped in the child schema + for (const schema of values(model.schemas.objects)) { + if (length(schema.parents?.immediate) > 0) { + if (!dropDuplicatePropertiesInChildSchemas(schema, state)) { + throw new Error('Schemas are in conflict.'); + } + } + } + + + if (await state.getValue('use-storage-pipeline', false)) { + // we're going to create new models for the reponse headers ? + + } else { + + // if an operation has a body parameter with string/binary, we should make the request application/octet-stream + + // === Header Schemas === + // go thru the operations, find responses that have header values, and add a property to the schemas that are returned with those values + for (const operationGroups of values(model.operationGroups)) { + for (const operation of values(operationGroups.operations)) { + for (const response of values(operation.responses)) { + // for a given response, find the possible models that can be returned from the service + for (const header of values(response.protocol.http?.headers)) { + + if (!(response as any).schema) { + // no response schema? can we fake one? + // service.Message({ Channel: Channel.Debug, Text: `${header.key} is in ${operation.details.default.name} but there is no response model` }); + continue; + } + + + // if the method response has a schema and it's an object, we're going to add our properties to the schema object. + // yes, this means that the reponse model may have properties that are undefined if the server doesn't send back the header + // and other operations might add other headers that are not the same. + + // if the method's response is a primitive value (string, boolean, null, number) or an array, we can't modify that type obviously + // in which case, we're going to add a header + + // work with schemas that have objects only. + + if ((response as any).schema.type === SchemaType.Object) { + const respSchema = (response as any).schema as ObjectSchema; + const curHeader = header as any; + const headerKey = curHeader.header as string; + + respSchema.language.default.hasHeaders = true; + + const property = values(getAllProperties(respSchema)).first((each) => each.language.default.name === headerKey); + if (!property) { + state.message({ Channel: Channel.Debug, Text: `Adding header property '${headerKey}' to model ${respSchema.language.default.name}` }); + + // create a property for the header value + const newProperty = new Property(headerKey, curHeader.description || '', curHeader.schema); + newProperty.language.default.required = false; + + // mark it that it's a header-only property + newProperty.language.default[HeaderProperty] = HeaderPropertyType.Header; + + // add it to this model. + if (!respSchema.properties) { + respSchema.properties = []; + } + respSchema.properties.push(newProperty); + } else { + // there is a property with this name already. + // was this previously declared as a header only property? + if (!property.language.default[HeaderProperty]) { + + state.message({ Channel: Channel.Debug, Text: `Property ${headerKey} in model ${respSchema.language.default.name} can also come from the header.` }); + // no.. There is duplication between header and body property. Probably because of etags. + // tell it to be a header-and-body property. + property.language.default[HeaderProperty] = HeaderPropertyType.HeaderAndBody; + property.language.default.name = headerKey; + } + } + } + } + } + } + } + } + + // remove well-known header parameters from operations and add mark the operation has supporting that feature + + for (const operationGruops of values(model.operationGroups)) { + for (const operation of values(operationGruops.operations)) { + // if we have an operation with a body, and content-type is a multipart/formdata + // then we should go thru the parameters of the body and look for a string/binary parameters + // and remember to add another parameter for the filename of the string/binary + const request = operation.requests?.[0]; + request?.parameters?.filter((param) => param.schema.type !== SchemaType.Object && param.protocol.http?.in === 'body' && param.protocol.http?.style === KnownMediaType.Multipart) + .forEach((param) => { + for (const prop of values(getAllProperties(param.schema as ObjectSchema))) { + if (prop.schema.type === SchemaType.Binary) { + prop.language.default.isNamedStream = true; + } + } + }); + + // move well-known hearder parameters into details, and we can process them in the generator how we please. + // operation.details.default.headerparameters = values(operation.parameters).where(p => p.in === ParameterLocation.Header && ['If-Match', 'If-None-Match'].includes(p.name)).toArray(); + + // remove if-match and if-none-match parameters from the operation itself. + // operation.parameters = values(operation.parameters).where(p => !(p.in === ParameterLocation.Header && ['If-Match', 'If-None-Match'].includes(p.name))).toArray(); + + } + } + + // identify models that are polymorphic in nature + for (const schema of allSchemas) { + if (schema instanceof ObjectSchema) { + const objSchema = schema as ObjectSchema; + // if this actual type is polymorphic, make sure we know that. + // parent class + if (objSchema.discriminator) { + objSchema.language.default.isPolymorphic = true; + if (objSchema.children) { + objSchema.language.default.polymorphicChildren = objSchema.children?.all; + } + } + + // sub class + if (objSchema.discriminatorValue) { + objSchema.language.default.discriminatorValue = objSchema.extensions?.['x-ms-discriminator-value']; + } + } + } + + // identify parameters that are constants + for (const group of values(model.operationGroups)) { + for (const operation of values(group.operations)) { + for (const parameter of values(operation.parameters)) { + if (parameter.required) { + if (parameter.schema.type === SchemaType.Choice) { + const choiceSchema = parameter.schema as ChoiceSchema; + if (choiceSchema.choices.length === 1) { + parameter.language.default.constantValue = choiceSchema.choices[0].value; + } + } else if (parameter.schema.type === SchemaType.Constant) { + const constantSchema = parameter.schema as ConstantSchema; + parameter.language.default.constantValue = constantSchema.value.value; + } else if (parameter.schema.type === SchemaType.SealedChoice) { + const sealedChoiceSchema = parameter.schema as SealedChoiceSchema; + if (sealedChoiceSchema.choices.length === 1) { + parameter.language.default.constantValue = sealedChoiceSchema.choices[0].value; + if (sealedChoiceSchema.language.default.skip !== false) { + sealedChoiceSchema.language.default.skip = true; + } + } + } + } else { + if (parameter.schema.type === SchemaType.SealedChoice) { + const sealedChoiceSchema = parameter.schema as SealedChoiceSchema; + if (sealedChoiceSchema.choices.length === 1) { + sealedChoiceSchema.language.default.skip = false; + } + } + } + } + } + } + + // identify properties that are constants + for (const schema of values(schemas.objects)) { + for (const property of values(schema.properties)) { + if (property === undefined) { + continue; + } + if (property.required) { + if (property.schema.type === SchemaType.Choice) { + const choiceSchema = property.schema as ChoiceSchema; + if (choiceSchema.choices.length === 1) { + // properties with an enum single value are constants + // add the constant value + property.language.default.constantValue = choiceSchema.choices[0].value; + } + } else if (property.schema.type === SchemaType.Constant) { + const constantSchema = property.schema as ConstantSchema; + property.language.default.constantValue = constantSchema.value.value; + } else if (property.schema.type === SchemaType.SealedChoice) { + const sealedChoiceSchema = property.schema as SealedChoiceSchema; + if (sealedChoiceSchema.choices.length === 1) { + property.language.default.constantValue = sealedChoiceSchema.choices[0].value; + if (sealedChoiceSchema.language.default.skip !== false) { + sealedChoiceSchema.language.default.skip = true; + } + } + } + } else { + if (property.schema.type === SchemaType.SealedChoice) { + const sealedChoiceSchema = property.schema as SealedChoiceSchema; + if (sealedChoiceSchema.choices.length === 1) { + sealedChoiceSchema.language.default.skip = false; + } + } + } + } + } + + // xichen: Do we need skip? + // const enumsToSkip = new Set(); + // // identify properties that are constants + // for (const schema of values(model.schemas)) { + // for (const property of values(schema.properties)) { + // if (property.details.default.required && length(property.schema.enum) === 1) { + // // properties with an enum single value are constants + // // add the constant value + // property.details.default.constantValue = property.schema.enum[0]; + + // // mark as skip the generation of this model + // enumsToSkip.add(property.schema.details.default.uid); + + // // make it a string and keep its name + // property.schema = new Schema(property.schema.details.default.name, { type: property.schema.type }); + // } else { + // enumsToSkip.delete(property.schema.details.default.uid); + // } + // } + // } + + // // mark enums that shouldn't be generated + // for (const schema of values(model.schemas)) { + // if (enumsToSkip.has(schema.details.default.uid)) { + // schema.details.default.skip = true; + // } + // } + + return model; +} +// async function tweakModel(state: State): Promise { +// const title = pascalCase(fixLeadingNumber(deconstruct(await state.getValue('title', state.model.info.title)))); +// state.setValue('title', title); + +// const serviceName = await state.getValue('service-name', titleToAzureServiceName(title)); +// state.setValue('service-name', serviceName); + +// const model = state.model; +// model.schemas = model.schemas || []; + +// const set = new Set(); +// const removes = new Set(); + +// for (const key of keys(model.schemas)) { +// const value = model.schemas[key]; +// if (set.has(value)) { +// // this schema is already in the collection. let's drop it when we're done +// removes.add(key); +// } else { +// set.add(value); +// } +// } + +// // we're going to create a schema that represents the distinct sum +// // of all operation PATH parameters +// const universalId = new Schema(`${serviceName}Identity`, { +// type: JsonType.Object, description: 'Resource Identity', details: { +// default: { +// uid: 'universal-parameter-type' +// } +// } +// }); +// model.schemas['universal-parameter-type'] = universalId; + +// for (const operation of values(model.http.operations)) { +// for (const param of values(operation.parameters).where(each => each.in === ParameterLocation.Path)) { +// const name = param.details.default.name; +// if (!universalId.properties[name]) { +// universalId.properties[name] = new Property(name, { +// schema: param.schema, description: param.description, serializedName: name, details: { +// default: { +// description: param.description, +// name: name, +// required: false, +// readOnly: false, +// uid: `universal-parameter:${name}` +// } +// } +// }); +// } +// } +// } + +// if (await state.getValue('azure', false)) { +// universalId.properties['id'] = new Property('id', { +// schema: new Schema('_identity_type_', { type: JsonType.String, description: 'Resource identity path' }), +// description: 'Resource identity path', serializedName: 'id', details: { +// default: { +// description: 'Resource identity path', +// name: 'id', +// required: false, +// readOnly: false, +// uid: 'universal-parameter:resource identity' +// } +// } +// }); +// } + +// // remove schemas that are referenced elsewhere previously. +// for (const each of removes.values()) { +// delete model.schemas[each]; +// } + +// // if an operation has a response that has a schema with string/binary we should make the response application/octet-stream +// for (const operation of values(model.http.operations)) { +// for (const responses of values(operation.responses)) { +// for (const response of responses) { +// if (response.schema) { +// if (response.schema.type === JsonType.String && response.schema.format === StringFormat.Binary) { +// // WHY WAS THIS HERE?! +// // response.mimeTypes = [KnownMediaType.Stream]; +// } +// } +// } +// } +// } + +// // schemas that have parents and implement properties that are in the parent schemas +// // will have the property dropped in the child schema +// for (const schema of values(model.schemas)) { +// if (length(schema.allOf) > 0) { +// if (!dropDuplicatePropertiesInChildSchemas(schema, state)) { +// throw new Error('Schemas are in conflict.'); +// } +// } +// } + + +// if (await state.getValue('use-storage-pipeline', false)) { +// // we're going to create new models for the reponse headers ? + +// } else { + +// // if an operation has a body parameter with string/binary, we should make the request application/octet-stream + +// // === Header Schemas === +// // go thru the operations, find responses that have header values, and add a property to the schemas that are returned with those values +// for (const operation of values(model.http.operations)) { +// for (const responses of values(operation.responses)) { +// for (const response of responses) { +// // for a given response, find the possible models that can be returned from the service +// for (const header of values(response.headers)) { + +// if (!response.schema) { +// // no response schema? can we fake one? +// // service.Message({ Channel: Channel.Debug, Text: `${header.key} is in ${operation.details.default.name} but there is no response model` }); +// continue; +// } + +// // if the method response has a schema and it's an object, we're going to add our properties to the schema object. +// // yes, this means that the reponse model may have properties that are undefined if the server doesn't send back the header +// // and other operations might add other headers that are not the same. + +// // if the method's response is a primitive value (string, boolean, null, number) or an array, we can't modify that type obviously +// // in which case, we're going to add a header + +// // work with schemas that have objects only. + +// if (isSchemaObject(response.schema)) { +// response.schema.details.default.hasHeaders = true; +// const property = response.schema.properties[header.key]; +// if (!property) { +// state.message({ Channel: Channel.Debug, Text: `Adding header property '${header.key}' to model ${response.schema.details.default.name}` }); + +// // create a property for the header value +// const newProperty = new Property(header.key, { schema: header.schema, description: header.description }); +// newProperty.details.default.name = header.key; +// newProperty.details.default.required = false; + +// // mark it that it's a header-only property +// newProperty.details.default[HeaderProperty] = HeaderPropertyType.Header; + +// // add it to this model. +// response.schema.properties[header.key] = newProperty; +// } else { +// // there is a property with this name already. +// // was this previously declared as a header only property? +// if (!property.details.default[HeaderProperty]) { + +// state.message({ Channel: Channel.Debug, Text: `Property ${header.key} in model ${response.schema.details.default.name} can also come from the header.` }); +// // no.. There is duplication between header and body property. Probably because of etags. +// // tell it to be a header-and-body property. +// property.details.default[HeaderProperty] = HeaderPropertyType.HeaderAndBody; +// property.details.default.name = header.key; +// } +// } +// } +// } +// } +// } +// } +// } + +// // remove well-known header parameters from operations and add mark the operation has supporting that feature + +// for (const operation of values(model.http.operations)) { +// // if we have an operation with a body, and content-type is a multipart/formdata +// // then we should go thru the parameters of the body and look for a string/binary parameters +// // and remember to add another parameter for the filename of the string/binary +// if (operation.requestBody && knownMediaType(operation.requestBody.contentType) === KnownMediaType.Multipart) { +// for (const prop of values(operation.requestBody.schema.properties)) { +// if (prop.schema.type === JsonType.String && prop.schema.format === 'binary') { +// prop.details.default.isNamedStream = true; +// } +// } +// } + +// // move well-known hearder parameters into details, and we can process them in the generator how we please. +// // operation.details.default.headerparameters = values(operation.parameters).where(p => p.in === ParameterLocation.Header && ['If-Match', 'If-None-Match'].includes(p.name)).toArray(); + +// // remove if-match and if-none-match parameters from the operation itself. +// // operation.parameters = values(operation.parameters).where(p => !(p.in === ParameterLocation.Header && ['If-Match', 'If-None-Match'].includes(p.name))).toArray(); + +// } + +// // identify models that are polymorphic in nature +// for (const schema of values(model.schemas)) { +// // if this actual type is polymorphic, make sure we know that. +// if (schema.discriminator) { +// schema.details.default.isPolymorphic = true; +// } + + +// const parents = getPolymorphicBases(schema); +// if (length(parents) > 0) { +// // if our parent is polymorphic, then we must have a discriminator value +// schema.details.default.discriminatorValue = schema.extensions['x-ms-discriminator-value'] || schema.details.default.name; + +// // and make sure that all our polymorphic parents have a reference to this type. +// for (const parent of getPolymorphicBases(schema)) { + +// parent.details.default.polymorphicChildren = parent.details.default.polymorphicChildren || new Array(); +// parent.details.default.polymorphicChildren.push(schema); +// } +// } +// } + +// // identify parameters that are constants +// for (const operation of values(model.http.operations)) { +// for (const parameter of values(operation.parameters)) { +// if (parameter.required && length(parameter.schema.enum) === 1) { +// // parameters with an enum single value are constants +// parameter.details.default.constantValue = parameter.schema.enum[0]; +// } +// } +// } + +// const enumsToSkip = new Set(); +// // identify properties that are constants +// for (const schema of values(model.schemas)) { +// for (const property of values(schema.properties)) { +// if (property.details.default.required && length(property.schema.enum) === 1) { +// // properties with an enum single value are constants +// // add the constant value +// property.details.default.constantValue = property.schema.enum[0]; + +// // mark as skip the generation of this model +// enumsToSkip.add(property.schema.details.default.uid); + +// // make it a string and keep its name +// property.schema = new Schema(property.schema.details.default.name, { type: property.schema.type }); +// } else { +// enumsToSkip.delete(property.schema.details.default.uid); +// } +// } +// } + +// // mark enums that shouldn't be generated +// for (const schema of values(model.schemas)) { +// if (enumsToSkip.has(schema.details.default.uid)) { +// schema.details.default.skip = true; +// } +// } + +// for (const operation of values(model.http.operations)) { +// for (const { key: responseCode, value: responses } of items(operation.responses)) { +// for (const response of values(responses)) { +// if (responseCode === 'default' || response.extensions['x-ms-error-response'] === true) { +// response.details.default.isErrorResponse = true; +// } +// } +// } +// } + +// return model; +// } + +// Universal version - +// tweaks the code model to adjust things so that the code will generate better. + +export async function tweakModelPlugin(service: Host) { + //const session = await startSession(service, {}, codeModelSchema); + const state = await new ModelState(service).init(); + //const result = tweakModelV2(session); + await service.WriteFile('code-model-v4-tweakcodemodel-v2.yaml', serialize(await tweakModelV2(state)), undefined, 'code-model-v4'); + //return processCodeModel(tweakModelV2, service, 'tweakcodemodel-v2'); +} diff --git a/powershell/plugins/powershell.ts b/powershell/plugins/powershell-v2.ts similarity index 91% rename from powershell/plugins/powershell.ts rename to powershell/plugins/powershell-v2.ts index 4020637cf7b..580fc183734 100644 --- a/powershell/plugins/powershell.ts +++ b/powershell/plugins/powershell-v2.ts @@ -3,12 +3,11 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { codemodel } from '@azure-tools/codemodel-v3'; +//import { codemodel } from '@azure-tools/codemodel-v3'; import { deserialize, applyOverrides, copyResources, copyBinaryResources, safeEval } from '@azure-tools/codegen'; import { Host } from '@azure-tools/autorest-extension-base'; import { join } from 'path'; import { Project } from '../internal/project'; -import { State } from '../internal/state'; import { generatePsm1 } from '../generators/psm1'; import { generateCsproj } from '../generators/csproj'; import { generatePsm1Custom } from '../generators/psm1.custom'; @@ -32,6 +31,9 @@ async function copyRequiredFiles(project: Project) { // Runtime files await copyResources(join(resources, 'psruntime'), async (fname, content) => project.state.writeFile(join(project.runtimeFolder, fname), content, undefined, sourceFileCSharp), project.overrides, transformOutput); + // utils cmdlets + await copyResources(join(resources, 'utils'), async (fname, content) => project.state.writeFile(join(project.utilsFolder, fname), content, undefined, sourceFileCSharp), project.overrides, transformOutput); + // Modules files await copyBinaryResources(join(resources, 'modules'), async (fname, content) => project.state.writeFile(join(project.dependencyModuleFolder, fname), content, undefined, 'binary-file')); @@ -42,7 +44,7 @@ async function copyRequiredFiles(project: Project) { } -export async function powershell(service: Host) { +export async function powershellV2(service: Host) { const debug = await service.GetValue('debug') || false; try { diff --git a/powershell/plugins/ps-namer.ts b/powershell/plugins/ps-namer-v2.ts similarity index 55% rename from powershell/plugins/ps-namer.ts rename to powershell/plugins/ps-namer-v2.ts index aea242a2307..fdc15b97a90 100644 --- a/powershell/plugins/ps-namer.ts +++ b/powershell/plugins/ps-namer-v2.ts @@ -3,14 +3,18 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ -import { Host, Channel } from '@azure-tools/autorest-extension-base'; -import { codemodel, processCodeModel, allVirtualParameters, allVirtualProperties, resolveParameterNames, resolvePropertyNames, ModelState, ParameterLocation, isMediaTypeMultipartFormData, VirtualParameter } from '@azure-tools/codemodel-v3'; -import { deconstruct, removeProhibitedPrefix, removeSequentialDuplicates, pascalCase } from '@azure-tools/codegen'; +import { codeModelSchema, CodeModel, Schema, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext } from '@azure-tools/codemodel'; +import { Host, Channel, Session, startSession } from '@azure-tools/autorest-extension-base'; +//import { allVirtualParameters, allVirtualProperties, resolveParameterNames, resolvePropertyNames } from '@azure-tools/codemodel-v3'; +import { deconstruct, removeProhibitedPrefix, removeSequentialDuplicates, pascalCase, serialize } from '@azure-tools/codegen'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; import * as linq from '@azure-tools/linq'; import { singularize } from '../internal/name-inferrer'; +import { PwshModel } from '../utils/PwshModel'; +import { ModelState } from '../utils/model-state'; +import { allVirtualParameters, allVirtualProperties, resolveParameterNames, resolvePropertyNames } from '../utils/resolve-conflicts'; -type State = ModelState; +type State = ModelState; function getCmdletName(verb: string, subjectPrefix: string, subject: string): string { return `${verb}-${subjectPrefix}${subject}`; @@ -26,7 +30,7 @@ export function getDeduplicatedNoun(subjectPrefix: string, subject: string): { s // figure out what belongs to the subject const reversedFinalSubject = new Array(); - for (let mCount = length(dedupedMerge) - 1, sCount = length(dedupedSubject) - 1; sCount >= 0 && mCount >= 0; mCount-- , sCount--) { + for (let mCount = length(dedupedMerge) - 1, sCount = length(dedupedSubject) - 1; sCount >= 0 && mCount >= 0; mCount--, sCount--) { if (dedupedMerge[mCount] !== dedupedSubject[sCount]) { break; } @@ -43,18 +47,19 @@ export function getDeduplicatedNoun(subjectPrefix: string, subject: string): { s return { subjectPrefix: pascalCase(finalPrefix), subject: pascalCase(reversedFinalSubject.reverse()) }; } -async function tweakModel(state: State): Promise { +async function tweakModel(state: State): Promise { // get the value const isAzure = await state.getValue('azure', false); + // without setting snitize-names, isAzure is applied const shouldSanitize = await state.getValue('sanitize-names', isAzure); // make sure recursively that every details field has csharp for (const { index, instance } of linq.visitor(state.model)) { - if (index === 'details' && instance.default && !instance.csharp) { + if ((index === 'details' || index === 'language') && instance.default && !instance.csharp) { instance.csharp = linq.clone(instance.default, false, undefined, undefined, ['schema', 'origin']); } } - + // dolauli sanitize name if (shouldSanitize) { for (const operation of values(state.model.commands.operations)) { // clean the noun (i.e. subjectPrefix + subject) @@ -97,7 +102,7 @@ async function tweakModel(state: State): Promise { // now remove the subject from the beginning of the parameter // to reduce naming redundancy, but just for path parameters // e.g. get-vm -vmname ---> get-vm -name - if ((parameter.origin).in === ParameterLocation.Path) { + if ((parameter.origin).protocol?.http?.in === ParameterLocation.Path) { const sanitizedName = removeProhibitedPrefix( parameter.name, operation.details.csharp.subject, @@ -121,53 +126,55 @@ async function tweakModel(state: State): Promise { } } - for (const schema of values(state.model.schemas)) { - const virtualProperties = [...allVirtualProperties(schema.details.csharp.virtualProperties)]; + for (const schemaGroup of values(>>state.model.schemas)) { + for (const schema of schemaGroup) { + const virtualProperties = [...allVirtualProperties(schema.language.csharp?.virtualProperties)]; + + for (const property of virtualProperties) { + let prevName = property.name; + const otherPropertiesNames = values(virtualProperties) + .select(each => each.name) + .where(name => name !== property.name) + .toArray(); + + // first try to singularize the property + const singularName = singularize(property.name); + if (prevName != singularName) { + property.name = singularName; + state.message({ Channel: Channel.Debug, Text: `Sanitized property-name -> Changed property-name from ${prevName} to singular ${property.name} from model ${schema.language.csharp?.name}` }); + } - for (const property of virtualProperties) { - let prevName = property.name; - const otherPropertiesNames = values(virtualProperties) - .select(each => each.name) - .where(name => name !== property.name) - .toArray(); + // save the name again to compare in case it was modified + prevName = property.name; - // first try to singularize the property - const singularName = singularize(property.name); - if (prevName != singularName) { - property.name = singularName; - state.message({ Channel: Channel.Debug, Text: `Sanitized property-name -> Changed property-name from ${prevName} to singular ${property.name} from model ${schema.details.csharp.name}` }); - } + // now remove the model=name from the beginning of the property-name + // to reduce naming redundancy + const sanitizedName = removeProhibitedPrefix( + property.name, + schema.language.csharp?.name ? schema.language.csharp?.name : '', + otherPropertiesNames + ); - // save the name again to compare in case it was modified - prevName = property.name; - - // now remove the model=name from the beginning of the property-name - // to reduce naming redundancy - const sanitizedName = removeProhibitedPrefix( - property.name, - schema.details.csharp.name, - otherPropertiesNames - ); + if (prevName !== sanitizedName) { + property.alias = property.alias || []; + + // saved the prev name as alias + property.alias.push(property.name); - if (prevName !== sanitizedName) { - property.alias = property.alias || []; - - // saved the prev name as alias - property.alias.push(property.name); - - // change name - property.name = sanitizedName; - state.message({ Channel: Channel.Debug, Text: `Sanitized property-name -> Changed property-name from ${prevName} to ${property.name} from model ${schema.details.csharp.name}` }); - state.message({ Channel: Channel.Debug, Text: ` -> And, added alias '${prevName}'` }); - - // update shared properties too - if (property.sharedWith) { - for (const sharedProperty of property.sharedWith) { - if (sharedProperty.name !== sanitizedName) { - state.message({ Channel: Channel.Debug, Text: `Changing shared property ${sharedProperty.name} to ${sanitizedName}` }); - sharedProperty.alias = sharedProperty.alias || []; - sharedProperty.alias.push(sharedProperty.name); - sharedProperty.name = sanitizedName; + // change name + property.name = sanitizedName; + state.message({ Channel: Channel.Debug, Text: `Sanitized property-name -> Changed property-name from ${prevName} to ${property.name} from model ${schema.language.csharp?.name}` }); + state.message({ Channel: Channel.Debug, Text: ` -> And, added alias '${prevName}'` }); + + // update shared properties too + if (property.sharedWith) { + for (const sharedProperty of property.sharedWith) { + if (sharedProperty.name !== sanitizedName) { + state.message({ Channel: Channel.Debug, Text: `Changing shared property ${sharedProperty.name} to ${sanitizedName}` }); + sharedProperty.alias = sharedProperty.alias || []; + sharedProperty.alias.push(sharedProperty.name); + sharedProperty.name = sanitizedName; + } } } } @@ -178,22 +185,29 @@ async function tweakModel(state: State): Promise { // do collision detection work. for (const command of values(state.model.commands.operations)) { - const vp = command.details.csharp.virtualParameters; + const vp = command.details.csharp?.virtualParameters; if (vp) { resolveParameterNames([], vp); } } - for (const schema of values(state.model.schemas)) { - const vp = schema.details.csharp.virtualProperties; - if (vp) { - resolvePropertyNames([schema.details.csharp.name], vp); + for (const schemaGroup of values(>>state.model.schemas)) { + for (const schema of schemaGroup) { + const vp = schema.language.csharp?.virtualProperties; + if (vp) { + resolvePropertyNames(schema.language.csharp?.name ? [schema.language.csharp?.name] : [], vp); + } } } return state.model; } -export async function namer(service: Host) { - return processCodeModel(tweakModel, service, 'psnamer'); +export async function namerV2(service: Host) { + // dolauli add csharp for cmdlets in the command->operation node + //return processCodeModel(tweakModel, service, 'psnamer'); + //const session = await startSession(service, {}, codeModelSchema); + //const result = tweakModelV2(session); + const state = await new ModelState(service).init(); + await service.WriteFile('code-model-v4-psnamer-v2.yaml', serialize(await tweakModel(state)), undefined, 'code-model-v4'); } \ No newline at end of file diff --git a/powershell/resources/utils/Unprotect-SecureString.ps1 b/powershell/resources/utils/Unprotect-SecureString.ps1 new file mode 100644 index 00000000000..cb05b51a622 --- /dev/null +++ b/powershell/resources/utils/Unprotect-SecureString.ps1 @@ -0,0 +1,16 @@ +#This script converts securestring to plaintext + +param( + [Parameter(Mandatory, ValueFromPipeline)] + [System.Security.SecureString] + ${SecureString} +) + +$ssPtr = [System.Runtime.InteropServices.Marshal]::SecureStringToBSTR($SecureString) +try { + $plaintext = [System.Runtime.InteropServices.Marshal]::PtrToStringBSTR($ssPtr) +} finally { + [System.Runtime.InteropServices.Marshal]::ZeroFreeBSTR($ssPtr) +} + +return $plaintext \ No newline at end of file diff --git a/powershell/test/noun-cleaning.ts b/powershell/test/noun-cleaning.ts index 0a6374f5aab..11e6cd28ffb 100644 --- a/powershell/test/noun-cleaning.ts +++ b/powershell/test/noun-cleaning.ts @@ -5,7 +5,7 @@ import { suite, test } from 'mocha-typescript'; import * as assert from 'assert'; -import { getDeduplicatedNoun } from '../plugins/ps-namer'; +import { getDeduplicatedNoun } from '../plugins/ps-namer-v2'; @suite class TestNounCleaning { diff --git a/powershell/test/operationId-inferring.ts b/powershell/test/operationId-inferring.ts index 981d69c548b..27d35348c08 100644 --- a/powershell/test/operationId-inferring.ts +++ b/powershell/test/operationId-inferring.ts @@ -6,8 +6,7 @@ import * as assert from 'assert'; import { suite, test } from 'mocha-typescript'; -import * as pp from '../plugins/create-commands'; -import { ModelState, JsonType, processCodeModel, codemodel, components, command, http, getAllProperties, } from '@azure-tools/codemodel-v3'; +import * as pp from '../plugins/create-commands-v2'; import { Channel, JsonPath, Mapping, RawSourceMap, Message } from '@azure-tools/autorest-extension-base'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; require('source-map-support').install(); diff --git a/powershell/test/test-service-name.ts b/powershell/test/test-service-name.ts index 02ca436459f..5e780faae76 100644 --- a/powershell/test/test-service-name.ts +++ b/powershell/test/test-service-name.ts @@ -7,7 +7,7 @@ import { suite, test } from 'mocha-typescript'; import * as assert from 'assert'; import * as aio from '@azure-tools/async-io'; import { items, values, keys, Dictionary, length } from '@azure-tools/linq'; -import { titleToAzureServiceName } from '../plugins/create-commands'; +import { titleToAzureServiceName } from '../plugins/create-commands-v2'; @suite class TestServiceName { diff --git a/powershell/utils/PwshModel.ts b/powershell/utils/PwshModel.ts new file mode 100644 index 00000000000..acacaaa7d0f --- /dev/null +++ b/powershell/utils/PwshModel.ts @@ -0,0 +1,16 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ +import { codeModelSchema, CodeModel, Schema, ObjectSchema, GroupSchema, isObjectSchema, SchemaType, GroupProperty, ParameterLocation, Operation, Parameter, VirtualParameter, getAllProperties, ImplementationLocation, OperationGroup, Request, SchemaContext } from '@azure-tools/codemodel'; +import { DeepPartial } from '@azure-tools/codegen'; +import { CommandComponents } from '../utils/command-operation'; + +export class PwshModel extends CodeModel { + public commands = new CommandComponents(); + constructor(title: string, sourceTracking = false, initializer?: DeepPartial) { + super(title, sourceTracking); + + this.apply(initializer); + } +} diff --git a/powershell/utils/command-operation.ts b/powershell/utils/command-operation.ts new file mode 100644 index 00000000000..66e9bcf10ae --- /dev/null +++ b/powershell/utils/command-operation.ts @@ -0,0 +1,80 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Components, IParameter, LanguageDetails } from './components'; +import { Extensions } from './extensions'; +import { ProgramaticOperationDetails, ProgrammaticOperation } from './programatic-operation'; +import { VirtualProperty } from './schema'; +import { Schema } from '@azure-tools/codemodel'; +import { DeepPartial } from '@azure-tools/codegen'; +import { Operation } from '@azure-tools/codemodel'; + +import { uid } from './uid'; +import { Dictionary } from '@azure-tools/linq'; + +export interface VirtualParameters { + body: Array; + operation: Array; +} + +export interface CommandOperationDetails extends ProgramaticOperationDetails { + virtualParameters?: VirtualParameters; +} + +export interface CompleterInfo { + script: string; + name: string; + description: string; +} + +export interface CommandOperation extends ProgrammaticOperation { + alias: Array; + verb: string; + noun: string; + variant: string; + category: string; + asjob: boolean; + callGraph: Array; +} + +export interface VirtualParameter { + name: string; + description: string; + required: boolean; + schema: Schema; + nameOptions: Array; + origin: VirtualProperty | IParameter; + alias: Array; + completerInfo?: CompleterInfo; +} + +export class CommandOperation extends Extensions implements CommandOperation { + public extensions = new Dictionary(); + public details: LanguageDetails; + + public responses = new Dictionary>(); + + constructor(name: string, initializer?: DeepPartial) { + super(); + this.details = { + default: { + uid: `command-operation:${uid()}`, + description: initializer?.description || '', + name, + } + }; + this.deprecated = false; + this.pure = true; + + this.apply(initializer); + } +} + +export interface CommandComponents extends Components { + +} + +export class CommandComponents extends Components { +} diff --git a/powershell/utils/components.ts b/powershell/utils/components.ts new file mode 100644 index 00000000000..81b6a5b3f7c --- /dev/null +++ b/powershell/utils/components.ts @@ -0,0 +1,214 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ +import { DeepPartial } from '@azure-tools/codegen'; +import { Dictionary } from '@azure-tools/linq'; +import { Schema } from '@azure-tools/codemodel'; +import { Extensions } from './extensions'; +//import { Schema } from './schema'; +import { uid } from './uid'; + +export interface IOperationBase { + +} + +export interface IParameter extends Extensions { + name: string; + schema: Schema; + description: string; + + allowEmptyValue: boolean; + deprecated: boolean; + required: boolean; + details: LanguageDetails; +} + +export class IParameter extends Extensions { + constructor(public name: string, public schema: Schema, initializer?: DeepPartial) { + super(); + this.description = ''; + this.deprecated = false; + this.required = false; + this.details = { + default: { + uid: `parameter:${uid()}`, + description: this.description, + name, + } + }; + this.allowEmptyValue = false; + this.apply(initializer); + } +} + + +export interface IOperation extends IOperationBase { + operationId: string; + description: string; + + summary?: string; + deprecated: boolean; + + parameters: Array; +} + +export interface Components, TParameter extends IParameter> extends Extensions { + operations: Dictionary; + parameters: Dictionary; +} + +export class Components, TParameter extends IParameter> extends Extensions implements Components { + /** + * Dictionary of Operations in this model. + * + * This is intended to invert the original PathItems in the OAI model, and focus on operations, not endpoints. + */ + public operations = new Dictionary(); + + constructor(initializer?: DeepPartial>) { + super(); + this.apply(initializer); + } +} + +export interface ParameterDetails extends ImplementationDetails { + +} + +export interface ResponseDetails extends ImplementationDetails { + isErrorResponse: boolean; +} + +/** LanguageDetails contains a map of languages to details for a given node in the code-model */ +export interface LanguageDetails extends Dictionary { + default: T; + +} + +export interface ImplementationDetails extends Dictionary { + /** a unique id for correlation between cloned objects */ + uid: string; + + /** name used in actual implementation */ + name: string; + + /** description text */ + description: string; + + /** message used to go along with deprecation */ + deprecationMessage?: string; +} + +export enum ImplementationLocation { + Method = 'Method', + Client = 'Client', +} + +export class Example extends Extensions implements Example { + extensions = new Dictionary(); + + constructor(initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class ExternalDocumentation extends Extensions implements ExternalDocumentation { + extensions = new Dictionary(); + + constructor(public url: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class Link extends Extensions implements Link { + extensions = new Dictionary(); + parameters = new Dictionary(); + + constructor(initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class Server extends Extensions implements Server { + extensions = new Dictionary(); + variables = new Dictionary(); + + constructor(public url: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class ServerVariable extends Extensions implements ServerVariable { + extensions = new Dictionary(); + enum = new Array(); + + constructor(defaultValue: string, initializer?: DeepPartial) { + super(); + this.default = defaultValue; + this.apply(initializer); + } +} + +export class Tag extends Extensions implements Tag { + extensions = new Dictionary(); + + constructor(public name: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +/** + * @description common ways of serializing simple parameters + * @see https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.1.md#style-values + */ + +export interface SecurityRequirement extends Dictionary { +} + +export interface Example extends Extensions { + summary?: string; + description?: string; + value?: any; + externalValue?: string; // uriref +} + +export interface ExternalDocumentation extends Extensions { + description?: string; + url: string; // uriref +} + +export interface Link extends Extensions { + operationRef?: string; // uriref + operationId?: string; + parameters: Dictionary; + requestBody?: any; + description?: string; + server?: Server; +} + +export interface Server extends Extensions { + + url: string; + description?: string; + variables: Dictionary; +} + +export interface ServerVariable extends Extensions { + enum: Array; + default: string; + description?: string; +} + +export interface Tag extends Extensions { + + name: string; + description?: string; + externalDocs?: ExternalDocumentation; +} + diff --git a/powershell/utils/extensions.ts b/powershell/utils/extensions.ts new file mode 100644 index 00000000000..707de2c3eac --- /dev/null +++ b/powershell/utils/extensions.ts @@ -0,0 +1,20 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Initializer, DeepPartial } from '@azure-tools/codegen'; +import { Dictionary } from '@azure-tools/linq'; + +export class Extensions extends Initializer implements Extensions { + extensions = new Dictionary(); + + constructor() { + super(); + } +} + +export interface Extensions { + /** additional metadata extensions */ + extensions: Dictionary; +} diff --git a/powershell/utils/http-definitions.ts b/powershell/utils/http-definitions.ts new file mode 100644 index 00000000000..cbf5a9cfd79 --- /dev/null +++ b/powershell/utils/http-definitions.ts @@ -0,0 +1,44 @@ +export const StatusCodes = { + 100: 'Continue', + 101: 'SwitchingProtocols', + 200: 'OK', + 201: 'Created', + 202: 'Accepted', + 203: 'NonAuthoritativeInformation', + 204: 'NoContent', + 205: 'ResetContent', + 206: 'PartialContent', + 300: 'Ambiguous', + 301: 'Moved', + 302: 'Redirect', + 303: 'SeeOther', + 304: 'NotModified', + 305: 'UseProxy', + 306: 'Unused', + 307: 'TemporaryRedirect', + 400: 'BadRequest', + 401: 'Unauthorized', + 402: 'PaymentRequired', + 403: 'Forbidden', + 404: 'NotFound', + 405: 'MethodNotAllowed', + 406: 'NotAcceptable', + 407: 'ProxyAuthenticationRequired', + 408: 'RequestTimeout', + 409: 'Conflict', + 410: 'Gone', + 411: 'LengthRequired', + 412: 'PreconditionFailed', + 413: 'RequestEntityTooLarge', + 414: 'RequestUriTooLong', + 415: 'UnsupportedMediaType', + 416: 'RequestedRangeNotSatisfiable', + 417: 'ExpectationFailed', + 426: 'UpgradeRequired', + 500: 'InternalServerError', + 501: 'NotImplemented', + 502: 'BadGateway', + 503: 'ServiceUnavailable', + 504: 'GatewayTimeout', + 505: 'HttpVersionNotSupported' +}; \ No newline at end of file diff --git a/powershell/utils/http-operation.ts b/powershell/utils/http-operation.ts new file mode 100644 index 00000000000..1d893c86dbe --- /dev/null +++ b/powershell/utils/http-operation.ts @@ -0,0 +1,300 @@ +// /*--------------------------------------------------------------------------------------------- +// * Copyright (c) Microsoft Corporation. All rights reserved. +// * Licensed under the MIT License. See License.txt in the project root for license information. +// *--------------------------------------------------------------------------------------------*/ + +// import { Components, Example, ExternalDocumentation, ImplementationDetails, ImplementationLocation, IOperation, IOperationBase, IParameter, LanguageDetails, Link, ParameterDetails, ResponseDetails, SecurityRequirement, Server } from './components'; +// import { Extensions } from './extensions'; +// import { Schema } from './schema'; +// import { SecurityScheme } from './security-scheme'; +// import { DeepPartial } from '@azure-tools/codegen'; +// import { Dictionary } from '@azure-tools/linq'; +// import { uid } from './uid'; + +// export interface HttpOperationDetails extends ImplementationDetails { +// } + +// /** +// * An encoding attribute is introduced to give you control over the serialization of parts of multipart request bodies. +// * This attribute is only applicable to multipart and application/x-www-form-urlencoded request bodies. +// */ +// export class Encoding extends Extensions implements Encoding { +// public headers = new Array
(); + +// constructor(public key: string, initializer?: DeepPartial) { +// super(); +// this.apply(initializer); +// } +// } + +// export class Header extends Extensions implements Header { + +// public content = new Array(); + +// constructor(initializer?: DeepPartial
) { +// super(); +// this.apply(initializer); +// } +// } + +// export class MediaType extends Extensions implements MediaType { +// public encoding = new Array(); +// public accepts = new Array(); + +// constructor(public key: string, initializer?: DeepPartial) { +// super(); +// this.apply(initializer); +// } +// } + +// export class RequestBody extends Extensions implements RequestBody { + +// constructor(initializer?: DeepPartial) { +// super(); +// this.apply(initializer); +// } +// } + +// export class Response extends Extensions implements Response { + +// public content = new Dictionary(); +// public links = new Dictionary(); +// public headers = new Array
(); + +// constructor(public description: string, initializer?: DeepPartial) { +// super(); +// this.apply(initializer); +// } +// } + +export enum ParameterLocation { + Uri = 'uri', + Query = 'query', + Header = 'header', + Cookie = 'cookie', + Path = 'path', +} + +// export enum EncodingStyle { +// Matrix = 'matrix', +// Label = 'label', +// Simple = 'simple', +// Form = 'form', +// SpaceDelimited = 'spaceDelimited', +// PipeDelimited = 'pipeDelimited', +// DeepObject = 'deepObject' +// } + +// export type QueryEncodingStyle = +// EncodingStyle.Form +// | EncodingStyle.SpaceDelimited +// | EncodingStyle.PipeDelimited +// | EncodingStyle.DeepObject; +// export type PathEncodingStyle = EncodingStyle.Matrix | EncodingStyle.Label | EncodingStyle.Simple; + +// export interface Encoding extends Extensions { +// key: string; +// contentType?: string; +// headers: Array
; +// style?: QueryEncodingStyle; +// explode?: boolean; +// allowReserved?: boolean; +// } + +// export interface Header extends Extensions { +// key: string; +// schema: Schema; +// explode?: boolean; +// examples: Dictionary; + +// description?: string; +// required: boolean; +// deprecated: boolean; +// allowEmptyValue: boolean; +// allowReserved: boolean; +// } + +// export interface MediaType extends Extensions { +// key: string; +// accepts: Array; // equivalent media types for this media type (ie, text/json, application/json) +// examples: Array; +// encoding: Array; +// schema?: Schema; +// } + +// export interface RequestBody extends Extensions { +// description?: string; +// contentType: string; +// schema: Schema; +// required: boolean; +// } + +// export interface Response extends Extensions { +// description: string; +// headers: Array
; +// content: Dictionary; +// links: Dictionary; +// } + +// export interface HttpParameterDetails extends ParameterDetails { +// location: ImplementationLocation; +// } + +// export enum HttpMethod { +// Get = 'get', +// Put = 'put', +// Post = 'post', +// Delete = 'delete', +// Options = 'options', +// Head = 'head', +// Patch = 'patch', +// Trace = 'trace' +// } + +// export interface NewResponse { +// details: LanguageDetails; +// responseCode: string; +// description: string; +// headers: Array
; +// headerSchema?: Schema; +// mimeTypes: Array; // accepted equivalent media types for this media type (ie, text/json, application/json) +// schema?: Schema; +// } + +// export class NewResponse extends Extensions implements NewResponse { +// public details: LanguageDetails; + +// constructor(public responseCode: string, public description: string, public mimeTypes: Array, objectInitializer?: DeepPartial) { +// super(); +// this.details = { +// default: { +// uid: `response:${uid()}`, +// isErrorResponse: false, +// description: description || objectInitializer?.description || '', +// name: `${responseCode} ${mimeTypes.join(' ')}`, +// } +// }; +// this.headers = new Array
(); +// this.apply(objectInitializer); +// } +// } + +// export class HttpOperation extends Extensions implements HttpOperation { +// public details: LanguageDetails; +// public tags = new Array(); +// public parameters = new Array(); +// public responses = new Dictionary>(); +// public callbacks = new Dictionary(); +// public security = new Array(); +// public servers = new Array(); +// public deprecated = false; + +// constructor(operationId: string, public baseUrl: string, public path: string, public method: HttpMethod, initializer?: DeepPartial) { +// super(); +// this.details = { +// default: { +// uid: `http-operation:${uid()}`, +// description: initializer?.description || '', +// name: operationId, +// } +// }; + +// this.apply(initializer); +// } +// } + +// export interface HttpOperation extends IOperation, Extensions { +// details: LanguageDetails; + +// tags: Array; +// summary?: string; + +// externalDocs?: ExternalDocumentation; + +// parameters: Array; +// requestBody?: RequestBody; +// responses: Dictionary>; + +// callbacks: Dictionary; +// deprecated: boolean; +// security: Array; +// servers: Array; + +// path: string; +// baseUrl: string; +// method: HttpMethod; +// pathDescription?: string; +// pathSummary?: string; +// pathExtensions?: Dictionary; +// } + +// export interface HttpOperationParameter extends IParameter { + +// in: ParameterLocation; +// explode?: boolean; + +// encoding?: Array; +// mediaType?: string; +// style: EncodingStyle; +// examples?: Dictionary; +// allowReserved?: boolean; +// } + +// export class HttpOperationParameter extends Extensions implements HttpOperationParameter { + +// public details: LanguageDetails; +// public deprecated = false; +// public required = false; +// public allowEmptyValue = false; + +// constructor(public name: string, inWhere: ParameterLocation, implementation: ImplementationLocation, initializer?: DeepPartial) { +// super(); +// this.in = inWhere; +// this.details = { +// default: { +// uid: `http-parameter:${uid()}`, +// description: initializer?.description || '', +// location: implementation, +// name, +// } +// }; +// this.required = inWhere === ParameterLocation.Path; +// this.apply(initializer); +// } +// } + +// export function isHttpOperation(operation: IOperationBase): operation is HttpOperation { +// if ((operation).path) { +// return true; +// } +// return false; +// } + +// export class Callback implements Callback { +// constructor() { +// // unimplemented. +// } +// } + +// export interface Callback extends Dictionary { +// } + +// export interface HttpComponents extends Components { + +// examples: Dictionary; +// securitySchemes: Dictionary; +// links: Dictionary; +// callbacks: Dictionary; +// } + +// export class HttpComponents extends Components implements HttpComponents { +// public examples = new Dictionary(); +// public securitySchemes = new Dictionary(); +// public links = new Dictionary(); +// public callbacks = new Dictionary(); + +// constructor(initializer?: DeepPartial) { +// super(); +// this.apply(initializer); +// } +// } diff --git a/powershell/utils/info.ts b/powershell/utils/info.ts new file mode 100644 index 00000000000..e2722389ed3 --- /dev/null +++ b/powershell/utils/info.ts @@ -0,0 +1,55 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Extensions } from './extensions'; +import { Dictionary } from '@azure-tools/linq'; +import { DeepPartial } from '@azure-tools/codegen'; + +export class Contact extends Extensions implements Contact { + extensions = new Dictionary(); + + constructor(initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class Info extends Extensions implements Info { + extensions = new Dictionary(); + + constructor(public title: string, public version: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class License extends Extensions implements License { + extensions = new Dictionary(); + + constructor(public name: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export interface Contact extends Extensions { + name?: string; + url?: string; // uriref + email?: string; // email +} + +export interface Info extends Extensions { + title: string; + description?: string; + termsOfService?: string; // uriref + contact?: Contact; + license?: License; + version: string; +} + +export interface License extends Extensions { + name: string; + url?: string; // uriref +} diff --git a/powershell/utils/model-state.ts b/powershell/utils/model-state.ts new file mode 100644 index 00000000000..d809d798f1b --- /dev/null +++ b/powershell/utils/model-state.ts @@ -0,0 +1,239 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Channel, Host, JsonPath, Mapping, RawSourceMap, Message } from '@azure-tools/autorest-extension-base'; +import { safeEval, deserialize, Initializer, DeepPartial } from '@azure-tools/codegen'; +import { Dictionary } from '@azure-tools/linq'; + +export class ModelState> extends Initializer { + public model!: T; + protected documentName!: string; + protected currentPath: JsonPath = new Array(); + private context!: any; + private _debug = false; + private _verbose = false; + + public constructor(protected service: Host, objectInitializer?: DeepPartial>) { + super(); + this.apply(objectInitializer); + } + + async init(project?: any) { + const m = await ModelState.getModel(this.service); + this.model = m.model; + this.documentName = m.filename; + this.initContext(project); + this._debug = await this.getValue('debug', false); + this._verbose = await this.getValue('verbose', false); + return this; + } + + async initContext(project: any) { + this.context = this.context || { + $config: await this.service.GetValue(''), + $project: project, + $lib: { + path: require('path') + } + }; + return this; + } + + async readFile(filename: string): Promise { + return this.service.ReadFile(filename); + } + + + async getValue(key: string, defaultValue?: V): Promise { + // check if it's in the model first + let value = this.model && this.model.language && this.model.language.default ? (this.model.language.default)[key] : undefined; + + // fall back to the configuration + if (value == null || value === undefined) { + value = await this.service.GetValue(key); + } + + // try as a safe eval execution. + if (value === null || value === undefined) { + try { + value = safeEval(key, this.context); + } + catch { + value = null; + } + } + + if (defaultValue === undefined && value === null) { + throw new Error(`No value for configuration key '${key}' was provided`); + } + + if (typeof value === 'string') { + value = await this.resolveVariables(value); + } + + // ensure that any content variables are resolved at the end. + return (value !== null ? value : defaultValue); + } + + async setValue(key: string, value: V) { + (this.model.language.default)[key] = value; + } + + async listInputs(artifactType?: string | undefined): Promise> { + return this.service.ListInputs(artifactType); + } + + async protectFiles(path: string): Promise { + return this.service.ProtectFiles(path); + } + writeFile(filename: string, content: string, sourceMap?: Array | RawSourceMap | undefined, artifactType?: string | undefined): void { + return this.service.WriteFile(filename, content, sourceMap, artifactType); + } + + message(message: Message): void { + if (message.Channel === Channel.Debug && this._debug === false) { + return; + } + if (message.Channel === Channel.Verbose && this._verbose === false) { + return; + } + return this.service.Message(message); + } + + updateConfigurationFile(filename: string, content: string): void { + return this.service.UpdateConfigurationFile(filename, content); + } + async getConfigurationFile(filename: string): Promise { + return this.service.GetConfigurationFile(filename); + } + protected errorCount = 0; + + protected static async getModel(service: Host) { + const files = await service.ListInputs(); + const filename = files[0]; + if (files.length === 0) { + throw new Error('Inputs missing.'); + } + return { + filename, + model: deserialize(await service.ReadFile(filename), filename) + }; + } + + + cache!: Array; + replacer(key: string, value: any) { + this.cache = this.cache || new Array(); + + if (typeof value === 'object' && value !== null) { + if (this.cache.indexOf(value) !== -1) { + // Duplicate reference found + try { + // If this value does not reference a parent it can be deduped + return JSON.parse(JSON.stringify(value)); + } catch (error) { + // discard key if value cannot be deduped + return; + } + } + // Store value in our collection + this.cache.push(value); + } + return value; + } + + async resolveVariables(input: string): Promise { + let output = input; + for (const rx of [/\$\((.*?)\)/g, /\$\{(.*?)\}/g]) { + /* eslint-disable */ + for (let match; match = rx.exec(input);) { + const text = match[0]; + const inner = match[1]; + let value = await this.getValue(inner, null); + + if (value !== undefined && value !== null) { + if (typeof value === 'object') { + value = JSON.stringify(value, this.replacer, 2); + } + if (value === '{}') { + value = 'true'; + } + output = output.replace(text, value); + } + } + } + return output; + } + + + public path(...childPath: JsonPath) { + // this strategy for tracking source path locations + // has proved fundementally crappy. + + // will be removing this stuff and transitioning to source-track method. + + //const result = new ModelState(this.service, this); + //result.currentPath = [...this.currentPath, ...childPath]; + // return result; + return this; + } + + public checkpoint() { + if (this.errorCount > 0) { + throw new Error(); + } + } + + protected msg(channel: Channel, message: string, key: Array, details: any) { + this.message({ + Channel: channel, + Key: key, + Source: [ + { + document: this.documentName, + Position: { + path: this.currentPath + } + } + ], + Text: message, + Details: details + }); + } + + public warning(message: string, key: Array, details?: any) { + this.msg(Channel.Warning, message, key, details); + } + public hint(message: string, key: Array, details?: any) { + this.msg(Channel.Hint, message, key, details); + } + + public error(message: string, key: Array, details?: any) { + this.errorCount++; + this.msg(Channel.Error, message, key, details); + } + public fatal(message: string, key: Array, details?: any) { + this.errorCount++; + this.msg(Channel.Fatal, message, key, details); + } + + protected output(channel: Channel, message: string, details?: any) { + this.message({ + Channel: channel, + Text: message, + Details: details + }); + } + + public debug(message: string, details: any) { + this.output(Channel.Debug, message, details); + } + public verbose(message: string, details: any) { + this.output(Channel.Verbose, message, details); + } + public log(message: string, details: any) { + this.output(Channel.Information, message, details); + } +} diff --git a/powershell/utils/programatic-operation.ts b/powershell/utils/programatic-operation.ts new file mode 100644 index 00000000000..98c8fe7ede1 --- /dev/null +++ b/powershell/utils/programatic-operation.ts @@ -0,0 +1,46 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { ImplementationDetails, IOperation, IParameter, LanguageDetails } from './components'; +import { Extensions } from './extensions'; +//import { Schema } from './schema'; +import { Schema } from '@azure-tools/codemodel'; +import { Dictionary } from '@azure-tools/linq'; +import { uid } from './uid'; +import { DeepPartial } from '@azure-tools/codegen'; + +export interface ProgrammaticOperation extends IOperation { + responses: Dictionary>; + pure: boolean; // side-effect free? May be helpful for deciding how to generate code. +} + +export interface IntrinsicOperation extends ProgrammaticOperation { + +} + +export class IntrinsicOperation extends Extensions implements IntrinsicOperation { + public details: LanguageDetails; + + public responses = new Dictionary>(); + public operationType: 'IntrinsicOperation' = 'IntrinsicOperation'; + + constructor(name: string, deprecated: boolean, pure: boolean, initializer?: DeepPartial) { + super(); + this.details = { + default: { + uid: `intrinsic-operation:${uid()}`, + description: initializer?.description || '', + name, + } + }; + this.deprecated = deprecated; + this.pure = pure; + + this.apply(initializer); + } +} + +export interface ProgramaticOperationDetails extends ImplementationDetails { +} diff --git a/powershell/utils/resolve-conflicts.ts b/powershell/utils/resolve-conflicts.ts new file mode 100644 index 00000000000..ce9f4a6b62c --- /dev/null +++ b/powershell/utils/resolve-conflicts.ts @@ -0,0 +1,60 @@ +import { VirtualProperties } from './schema'; +import { VirtualParameters, VirtualParameter } from './command-operation'; +import { selectName } from '@azure-tools/codegen'; +import { values } from '@azure-tools/linq'; + +export function resolvePropertyNames(reservedNames: Iterable, virtualProperties: VirtualProperties) { + const usedNames = new Set(reservedNames); + + const allProps = values(virtualProperties.owned, virtualProperties.inherited, virtualProperties.inlined).toArray(); + + for (const prop of allProps) { + if (usedNames.has(prop.name)) { + prop.name = selectName(prop.nameOptions, usedNames); + } else { + usedNames.add(prop.name); + } + } + +} + +export function resolveParameterNames(reservedNames: Iterable, virtualParameters: VirtualParameters) { + const usedNames = new Set(reservedNames); + const collisions = new Set(); + + // we need to make sure we avoid name collisions. operation parameters get first crack. + for (const each of values(virtualParameters.operation)) { + if (usedNames.has(each.name)) { + collisions.add(each); + } else { + usedNames.add(each.name); + } + } + + // handle operation parameters + for (const each of collisions) { + each.name = selectName(each.nameOptions, usedNames); + } + collisions.clear(); + + // now do body parameters. + for (const each of values(virtualParameters.body)) { + if (usedNames.has(each.name)) { + collisions.add(each); + } else { + usedNames.add(each.name); + } + } + + for (const each of collisions) { + each.name = selectName(each.nameOptions, usedNames); + } +} + +export function allVirtualProperties(virtualProperties?: VirtualProperties) { + return virtualProperties ? values(virtualProperties.owned, virtualProperties.inherited, virtualProperties.inlined).toArray() : []; +} + +export function allVirtualParameters(virtualParameters?: VirtualParameters) { + return virtualParameters ? values(virtualParameters.operation, virtualParameters.body).toArray() : []; +} \ No newline at end of file diff --git a/powershell/utils/schema.ts b/powershell/utils/schema.ts new file mode 100644 index 00000000000..77a0ac62ece --- /dev/null +++ b/powershell/utils/schema.ts @@ -0,0 +1,313 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { ExternalDocumentation, ImplementationDetails, LanguageDetails } from './components'; +import { Extensions } from './extensions'; +import { DeepPartial, } from '@azure-tools/codegen'; +import { Dictionary, values } from '@azure-tools/linq'; +import { uid } from './uid'; +import { Schema, ObjectSchema, Property, SchemaType, isObjectSchema } from '@azure-tools/codemodel'; +import { EnhancedTypeDeclaration } from '../llcsharp/schema/extended-type-declaration'; +import { ModelClass } from '../llcsharp/model/model-class'; +import { ModelInterface } from '../llcsharp/model/interface'; + +export interface PropertyDetails extends ImplementationDetails { + required: boolean; + readOnly: boolean; +} + +export interface EnumValue { + value: any; + description: string; + name: string; +} + +export interface EnumDetails { + modelAsString: boolean; + values: Array; + name: string; +} + +export enum Purpose { + Header = 'Header', +} + +export interface VirtualProperty { + /** The property that this represents */ + property: Property; + + /** The things that went into building the name */ + nameComponents: Array; + + /** Names To use in priority order */ + nameOptions: Array; + + /** the name of this virtual property */ + name: string; + + /** the member that should be called to get to the virtual property. (may be recursive) */ + accessViaProperty?: VirtualProperty; + + accessViaMember?: VirtualProperty; + + /** the member's schema */ + accessViaSchema?: Schema; + + originalContainingSchema: Schema; + + private?: boolean; + + alias: Array; + + description: string; + + format?: PropertyFormat; + + required: boolean; + + sharedWith?: Array; +} + + +interface PropertyFormat { + suppressFormat?: boolean; + index?: number; + width?: number; + label?: string; +} + +export interface VirtualProperties { + owned: Array; + inherited: Array; + inlined: Array; +} + +export interface SchemaDetails extends ImplementationDetails { + /** namespace of the implementation of this item */ + namespace?: string; + + enum?: EnumDetails; + purpose?: Purpose; + virtualProperties?: VirtualProperties; + + /** if this is a child of a polymorphic class, this will have the value of the descriminator. */ + discriminatorValue?: string; + + suppressFormat?: boolean; + + typeDeclaration?: EnhancedTypeDeclaration; + classImplementation?: ModelClass; + interfaceImplementation?: ModelInterface; + internalInterfaceImplementation?: ModelInterface; + interfaceName?: string; + internalInterfaceName?: string; + fullInternalInterfaceName?: string; + fullname?: string; +} + +// export class Schema extends Extensions implements Schema { +// public details: LanguageDetails; +// public required = new Array(); +// public enum = new Array(); +// public allOf = new Array(); +// public oneOf = new Array(); +// public anyOf = new Array(); +// public properties = new Dictionary(); +// public extensions = new Dictionary(); + +// constructor(name: string, initializer?: DeepPartial) { +// super(); +// this.details = { +// default: { +// uid: `schema:${uid()}`, +// description: '', +// name +// } +// }; +// this.apply(initializer); +// } +// } + +export function getPolymorphicBases(schema: ObjectSchema): Array { + // are any of my parents polymorphic directly, or any of their parents? + return [...values(schema.parents?.all).where(parent => (parent).discriminator ? true : false)]; +} + +export function getAllProperties(schema: Schema): Array { + if (isObjectSchema(schema)) { + return [...values(schema.parents ? schema.parents.immediate : []).selectMany(getAllProperties), ...values(schema.properties)]; + } else { + return []; + } +} + +export function getAllPublicVirtualProperties(virtualProperties?: VirtualProperties): Array { + const props = virtualProperties || { + owned: [], + inherited: [], + inlined: [] + }; + + return values(props.owned, props.inherited, props.inlined).where(each => !each.private).toArray(); +} + +export function getAllVirtualProperties(virtualProperties?: VirtualProperties): Array { + const props = virtualProperties || { + owned: [], + inherited: [], + inlined: [] + }; + + return values(props.owned, props.inherited, props.inlined).toArray(); +} + +export function getVirtualPropertyFromPropertyName(virtualProperties: VirtualProperties | undefined, propertyName: string): VirtualProperty | undefined { + const props = virtualProperties || { + owned: [], + inherited: [], + inlined: [] + }; + return values([...values(props.owned), ...values(props.inherited), ...values(props.inlined)]).first(each => each.property.serializedName === propertyName); +} + + +// export interface Property extends Extensions { +// details: LanguageDetails; + +// /** description can be on the property reference, so that properties can have a description different from the type description. */ +// description?: string; + +// schema: Schema; +// } + +// export class Property extends Extensions implements Property { +// public serializedName: string; +// public details: LanguageDetails; +// public extensions = new Dictionary(); + +// constructor(name: string, initializer?: DeepPartial) { +// super(); +// this.serializedName = name; +// this.details = { +// default: { +// readOnly: false, +// uid: `property:${uid()}`, +// description: initializer?.description || '', +// name, +// required: false +// } +// }; +// this.apply(initializer); +// } +// } + +export class Discriminator extends Extensions implements Discriminator { + public extensions = new Dictionary(); + public mapping = new Dictionary(); + + constructor(public propertyName: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export interface Discriminator extends Extensions { + propertyName: string; + mapping: Dictionary; +} + +export enum JsonType { + Array = 'array', + Boolean = 'boolean', + Integer = 'integer', + Number = 'number', + Object = 'object', + String = 'string' +} + +export function isJsonType(type: JsonType, schema?: Schema): schema is Schema { + return schema ? schema.type === SchemaType.Object : false; +} + +export function isSchemaObject(schema?: Schema): schema is Schema { + return isJsonType(JsonType.Object, schema); +} + +export class XML extends Extensions implements XML { + public extensions = new Dictionary(); + public attribute = false; + public wrapped = false; + + constructor(initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export interface XML extends Extensions { + name?: string; + namespace?: string; // url + prefix?: string; + attribute: boolean; + wrapped: boolean; +} + +// export interface Schema extends Extensions { + +// details: LanguageDetails; + +// /* common properties */ +// type?: JsonType; +// title?: string; +// description?: string; +// format?: string; +// nullable: boolean; +// readOnly: boolean; +// writeOnly: boolean; +// deprecated: boolean; +// required: Array; + +// /* number restrictions */ +// multipleOf?: number; +// maximum?: number; +// exclusiveMaximum?: boolean; +// minimum?: number; +// exclusiveMinimum?: boolean; + +// /* string restrictions */ +// maxLength?: number; +// minLength?: number; +// pattern?: string; // regex + +// /* array restrictions */ +// maxItems?: number; +// minItems?: number; +// uniqueItems?: boolean; + +// /* object restrictions */ +// maxProperties?: number; +// minProperties?: number; + +// /* unbounded properties */ +// example?: any; +// default?: any; + +// /* Properties that are objects */ +// discriminator?: Discriminator; +// externalDocs?: ExternalDocumentation; +// xml?: XML; + +// /* Properties that are collections of things that are not references */ +// enum: Array; + +// /* properties with potential references */ +// not?: Schema; +// allOf: Array; +// oneOf: Array; +// anyOf: Array; +// items?: Schema; +// properties: Dictionary; +// additionalProperties?: boolean | Schema; +// } diff --git a/powershell/utils/security-scheme.ts b/powershell/utils/security-scheme.ts new file mode 100644 index 00000000000..5ad1d05e51b --- /dev/null +++ b/powershell/utils/security-scheme.ts @@ -0,0 +1,192 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { Extensions } from './extensions'; +import { ParameterLocation } from './http-operation'; +import { Dictionary } from '@azure-tools/linq'; +import { DeepPartial } from '@azure-tools/codegen'; + +export enum Scheme { + Bearer = 'bearer' +} +export enum SecurityType { + ApiKey = 'apiKey', + Http = 'http', + OAuth2 = 'oauth2', + OpenIDConnect = 'openIdConnect' +} + +export class APIKeySecurityScheme extends Extensions implements APIKeySecurityScheme { + extensions = new Dictionary(); + + constructor(public name: string, inWhere: ParameterLocation, initializer?: DeepPartial) { + super(); + this.in = inWhere; + this.type = SecurityType.ApiKey; + this.apply(initializer); + } +} + +export class BearerHTTPSecurityScheme extends Extensions implements BearerHTTPSecurityScheme { + extensions = new Dictionary(); + scheme = Scheme.Bearer; + + constructor(initializer?: DeepPartial) { + super(); + this.type = SecurityType.Http; + this.apply(initializer); + } +} + +export class ImplicitOAuthFlow extends Extensions implements ImplicitOAuthFlow { + extensions = new Dictionary(); + scopes = new Dictionary(); + + constructor(public authorizationUrl: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export class NonBearerHTTPSecurityScheme extends Extensions implements NonBearerHTTPSecurityScheme { + extensions = new Dictionary(); + + constructor(public scheme: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + this.type = SecurityType.Http; + } +} + +export class OAuth2SecurityScheme extends Extensions implements OAuth2SecurityScheme { + extensions = new Dictionary(); + + constructor(public flows: OAuthFlows, initializer?: DeepPartial) { + super(); + this.type = SecurityType.OAuth2; + this.apply(initializer); + } + +} + +export class OAuthFlows extends Extensions implements OAuthFlows { + extensions = new Dictionary(); + + constructor(initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export interface OpenIdConnectSecurityScheme extends Extensions { + + type: SecurityType.OpenIDConnect; + openIdConnectUrl: string; // url + description?: string; +} + +export class OpenIdConnectSecurityScheme extends Extensions implements OpenIdConnectSecurityScheme { + extensions = new Dictionary(); + + constructor(public openIdConnectUrl: string, initializer?: DeepPartial) { + super(); + this.type = SecurityType.OpenIDConnect; + this.apply(initializer); + } +} + +export interface PasswordOAuthFlow extends Extensions { + + tokenUrl: string; // uriref + refreshUrl?: string; // uriref + scopes: Dictionary; +} + +export class PasswordOAuthFlow extends Extensions implements PasswordOAuthFlow { + extensions = new Dictionary(); + scopes = new Dictionary(); + + constructor(public tokenUrl: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export type HTTPSecurityScheme = NonBearerHTTPSecurityScheme | BearerHTTPSecurityScheme; +export type SecurityScheme = + APIKeySecurityScheme + | HTTPSecurityScheme + | OAuth2SecurityScheme + | OpenIdConnectSecurityScheme; + +export interface APIKeySecurityScheme extends Extensions { + + type: SecurityType.ApiKey; + name: string; + in: ParameterLocation; + description?: string; +} + +export class AuthorizationCodeOAuthFlow extends Extensions implements AuthorizationCodeOAuthFlow { + extensions = new Dictionary(); + scopes = new Dictionary(); + constructor(public authorizationUrl: string, tokenUrl: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} +export class ClientCredentialsFlow extends Extensions implements ClientCredentialsFlow { + extensions = new Dictionary(); + scopes = new Dictionary(); + constructor(public tokenUrl: string, initializer?: DeepPartial) { + super(); + this.apply(initializer); + } +} + +export interface AuthorizationCodeOAuthFlow extends Extensions { + authorizationUrl: string; // uriref + tokenUrl: string; // uriref + refreshUrl?: string; // uriref + scopes: Dictionary; +} + +export interface BearerHTTPSecurityScheme extends Extensions { + scheme: Scheme.Bearer; + bearerFormat?: string; + type: SecurityType.Http; + description?: string; +} + +export interface ClientCredentialsFlow extends Extensions { + tokenUrl: string; // uriref + refreshUrl?: string; // uriref + scopes: Dictionary; +} + +export interface ImplicitOAuthFlow extends Extensions { + authorizationUrl: string; // uriref + refreshUrl?: string; // uriref + scopes: Dictionary; +} + +export interface NonBearerHTTPSecurityScheme extends Extensions { + scheme: string; + description?: string; + type: SecurityType.Http; +} + +export interface OAuth2SecurityScheme extends Extensions { + type: SecurityType.OAuth2; + flows: OAuthFlows; + description?: string; +} + +export interface OAuthFlows extends Extensions { + implicit?: ImplicitOAuthFlow; + password?: PasswordOAuthFlow; + clientCredentials?: ClientCredentialsFlow; + authorizationCode?: AuthorizationCodeOAuthFlow; +} diff --git a/powershell/utils/uid.ts b/powershell/utils/uid.ts new file mode 100644 index 00000000000..dbeb7ffd9db --- /dev/null +++ b/powershell/utils/uid.ts @@ -0,0 +1,5 @@ +let n = 0; + +export function uid() { + return n++; +} \ No newline at end of file diff --git a/samples/Xkcd/xkcd.yaml b/samples/Xkcd/xkcd.yaml index 9994099c81c..12dc56adfae 100644 --- a/samples/Xkcd/xkcd.yaml +++ b/samples/Xkcd/xkcd.yaml @@ -30,6 +30,7 @@ consumes: - application/json paths: + /info.0.json: get: operationId: xkcd_getComicForToday @@ -39,45 +40,5 @@ paths: '200': description: OK schema: - $ref: '#/definitions/comic' - '/{comicId}/info.0.json': - get: - operationId: xkcd_getComic - description: | - Fetch comics and metadata by comic id. - parameters: - - in: path - name: comicId - required: true - type: number - responses: - '200': - description: OK - schema: - $ref: '#/definitions/comic' -definitions: - comic: - properties: - alt: - type: string - day: - type: string - img: - type: string - link: - type: string - month: - type: string - news: - type: string - num: - type: number - safe_title: - type: string - title: - type: string - transcript: - type: string - year: - type: string - type: object \ No newline at end of file + type: string + \ No newline at end of file diff --git a/tests-upgrade/.gitignore b/tests-upgrade/.gitignore new file mode 100644 index 00000000000..6b78566a203 --- /dev/null +++ b/tests-upgrade/.gitignore @@ -0,0 +1,4 @@ +generated +generate + +CompareResult \ No newline at end of file diff --git a/tests-upgrade/AutoRestUpgradeTest.ps1 b/tests-upgrade/AutoRestUpgradeTest.ps1 new file mode 100644 index 00000000000..3b5f512175d --- /dev/null +++ b/tests-upgrade/AutoRestUpgradeTest.ps1 @@ -0,0 +1,348 @@ +param([switch]$Generate,[string]$TestName,[switch]$SourceVersion,[switch]$TargetVersion,[switch]$AllowList,[switch]$BlackList) +#Need to use the right version of node.js +#nvs use 10.16.0 +#Create folder to save Compare Results +if(!(Test-Path CompareResult)) +{ + New-Item CompareResult -ItemType "directory" +} +#Define the success code names +$global:testNameStr +#Define the global param 'isError' to determine wheather throw the error +$global:isError = $false +#Import the Configuration Json +$conf = (Get-Content 'Configuration.json') | ConvertFrom-Json +if($AllowList) +{ + #Get the whiteList from json + $whiteList = $conf.WhiteList +} +if($BlackList) +{ + #Get the blackList from json + $blackTestList = $conf.BlackList +} + +#Determine whether the difference is command +function IsCommand([Object]$SourceFile , [Object]$TargetFile) +{ + $isCommandResult = $True + $difference = Compare-Object $SourceFile $TargetFile + foreach($line in $difference) + { + $lineInfo = $line.InputObject.Replace(' ','') + $lineCompareResult =$lineInfo.Startswith('//') + if(!$lineCompareResult) + { + $isCommandResult = $false + break + } + } + return $isCommandResult +} + +#Determine the filefolder in BlackList +function IsInBlackList([Object]$JudgList,[Array]$BlackContent) +{ + $isInBlackListResult = $false + foreach($BlackDetail in $BlackContent) + { + if((!((Get-Item $JudgList.PSPath) -is [System.IO.DirectoryInfo])) -or ($JudgList.Name.Startswith($BlackDetail)) -or ($JudgList.Name.Startswith('Compare'))) + { + $isInBlackListResult = $True + break + } + } + return $isInBlackListResult +} + +#Determine whether the file needs to be ignored +function IsNeedIgnore([string]$inputFileName , [Array]$ignoreArray) +{ + $Ignore = $false + foreach($ignoreDetail in $ignoreArray) + { + if($inputFileName.Startswith($ignoreDetail)) + { + $Ignore =$True + break + } elseif ($ignoreDetail.Contains("*.") -and $inputFileName.EndsWith($ignoreDetail.Split(".")[-1])) { + $Ignore =$True + break + } + } + return $Ignore +} + +#Code generation +function GenerateCode() +{ + $GenerateResult=$True + #source and generate codes all need to be generated + if((-not $SourceVersion) -and (-not $TargetVersion)) + { + #generate source code + Write-Host -ForegroundColor Green 'M3' + $GenerateSourceResult = autorest-beta --use:@autorest/powershell@2.1.400 --output-folder:.\generate\m3 --Debug | Out-string + #generate target code + Write-Host -ForegroundColor Green 'M4' + $GenerateTargetResult = autorest-beta --use:..\..\ --output-folder:.\generate\m4 --Debug | Out-string + if(!$GenerateSourceResult.Contains('Generation Complete') -or !$GenerateTargetResult.Contains('Generation Complete')) + { + $GenerateResult = $false + } + }elseif($SourceVersion) + { + Write-Host -ForegroundColor Green 'M3' + $GenerateSourceResult = autorest-beta --use:@autorest/powershell@2.1.400 --output-folder:.\generate\m3 --Debug | Out-string + if(!$GenerateSourceResult.Contains('Generation Complete')) + { + $GenerateResult = $false + } + }else + { + Write-Host -ForegroundColor Green 'M4' + $GenerateTargetResult = autorest-beta --use:..\..\ --output-folder:.\generate\m4 --Debug | Out-string + if(!$GenerateTargetResult.Contains('Generation Complete')) + { + $GenerateResult = $false + } + } + return $GenerateResult +} + +#Compare the gap between the two versions +function CompareGeneratedCode([string]$inputSourcePath,[string]$inputTargetPath,[string]$testFileName) +{ + #to creare ecah dictionary (the struct is (string,obj)) + #the key is the path of each file,and the obj has two parameters(hashcodevalue,status) + $initialDict = @{} + #in m3Path + cd $inputSourcePath + $initFileList = Get-ChildItem -Recurse -force + $initIgnoreFileList = (($inputSourcePath+'\generated\modules'), ($inputSourcePath+'\utils'), ($inputSourcePath+'\*.nuspec'), ($inputSourcePath+'\.gitignore'),($inputSourcePath+'\tools\Resources\.gitignore')) + $targetIgnoreFileList = (($inputTargetPath+'\generated\modules'), ($inputTargetPath+'\utils'),($inputTargetPath+'\*.nuspec'),($inputTargetPath+'\.gitignore'),($inputTargetPath+'\tools\Resources\.gitignore')) + #foreach initFileList and get the hashcode of them + foreach( $initFile in $initFileList) + { + $ignoreResult = IsNeedIgnore -inputFileName $initFile.FullName -ignoreArray $initIgnoreFileList + if(!$ignoreResult) + { + #create an object with HashCode, Status + $obj = New-Object psobject | Select-Object -Property HashCode, Status + #if the file is not filefolder + if(!((Get-Item $initFile.PSPath) -is [System.IO.DirectoryInfo])) + { + #get the hashcode of the file + $hashTable = $initFile.PSPath.Replace('Microsoft.PowerShell.Core\FileSystem::','') | get-filehash + # $hashTable + $obj.HashCode = $hashTable.Hash + #get the path of the file + $detailPath = $hashTable.Path.Replace($inputSourcePath,'') + $initialDict.Add($detailPath,$obj) + } + } + } + $targetDict = @{} + #in TargetPath + cd $inputTargetPath + $targetFileList = Get-ChildItem -Recurse -force + #foreach initFileList and get the hashcode of them + foreach( $targetFile in $targetFileList) + { + $ignoreResult = IsNeedIgnore -inputFileName $targetFile.FullName -ignoreArray $targetIgnoreFileList + if(!$ignoreResult) + { + $obj = New-Object psobject | Select-Object -Property HashCode, Status + #if the file is not filefolder + if(!((Get-Item $targetFile.PSPath) -is [System.IO.DirectoryInfo])) + { + #get the hashcode of the file + $hashTable = $targetFile.PSPath.Replace('Microsoft.PowerShell.Core\FileSystem::','') | get-filehash + $obj.HashCode = $hashTable.Hash + #get the path of the file + $detailPath = $hashTable.path.Replace($inputTargetPath,'') + $targetDict.Add($detailPath,$obj) + } + } + } + [object[]] $difArray=@() + + #search each dictDetail in targetDict + #the status means: 0 this file do not exist in anouther filefolder + # 1 the hashcode of the file is the same as that in another filefolder + # 2 the hashcode of the file is different from that in another filefolder + foreach($initDictDetail in $initialDict.Keys) + { + $difDetail = New-Object psobject | Select-Object -Property fileName,Path,fileFolderName,Status + #if the file not exists in targetDict + if($targetDict[$initDictDetail] -eq $null) + { + $difDetail.Path = $initDictDetail + $difDetail.fileFolderName = 'M3' + $splitStrings = $initDictDetail.Split('\') + $difDetail.fileName = $splitStrings[$splitStrings.count-1] + $difDetail.status = 'lack in M4' + #sign up the status of the file + $initialDict[$initDictDetail].status = 0 + $difArray+= $difDetail + }elseif($targetDict[$initDictDetail].HashCode -ne $initialDict[$initDictDetail].HashCode) + { + $M3CompareFile = Get-Content ($inputSourcePath + $initDictDetail) + $M4CompareFile = Get-Content ($inputTargetPath + $initDictDetail) + $isCommandResult = IsCommand -SourceFile $M3CompareFile -TargetFile $M4CompareFile + if( $isCommandResult -ne $True) + { + $difDetail.Path = $initDictDetail + $difDetail.fileFolderName = 'M3' + $splitStrings = $initDictDetail.Split('\') + $difDetail.fileName = $splitStrings[$splitStrings.count-1] + $difDetail.status = 'different' + #sign up the status of the file + $initialDict[$initDictDetail].status = 2 + $targetDict[$initDictDetail].status = 2 + $difArray+=$difDetail + }else + { + $initialDict[$initDictDetail].status = 1 + $targetDict[$initDictDetail].status = 1 + } + }else + { + $initialDict[$initDictDetail].status = 1 + $targetDict[$initDictDetail].status = 1 + } + } + #search those files which status is null + foreach($targetDetail in $targetDict.Keys) + { + $difDetail = New-Object psobject | Select-Object -Property fileName,Path,fileFolderName,Status + if($targetDict[$targetDetail].Status -eq $null) + { + $difDetail.Path = $targetDetail + $difDetail.fileFolderName = 'M4' + $splitStrings = $targetDetail.Split('\') + $difDetail.fileName = $splitStrings[$splitStrings.count-1] + $difDetail.Status = 'lack in m3' + $difArray+=$difDetail + } + } + if($difArray.Count -gt 0) + { + $global:isError=$True + $filename = Join-Path $PSScriptRoot 'CompareResult' ($testFileName + (get-date -format 'yyyyMMddhhmmss') + '.csv') + $difArray | Select-Object -Property fileName,Path,fileFolderName,Status | Sort-Object -Property fileName | Export-CSV -path $filename + Write-Warning ('There are ' + $difArray.Count + ' different files') + }else + { + $global:testNameStr+= $testFileName +"`n" + } +} + +$currentPath = Get-Location +$fileList = Get-ChildItem +#if only one case +if($TestName -ne $null -and ($TestName -ne '')) +{ + cd ($PSScriptRoot+'\'+$TestName) + try + { + $IsGenerateSuccess = GenerateCode + if(-not $Generate -and $IsGenerateSuccess) + { + $sourceFilePath = Join-Path $PSScriptRoot $TestName 'generate\m3' + $targetFilePath = Join-Path $PSScriptRoot $TestName 'generate\m4' + CompareGeneratedCode -inputSourcePath $sourceFilePath -inputTargetPath $targetFilePath -testFileName $TestName + } + } + catch + { + Write-Host -ForegroundColor yellow 'Generate error:' + $fileDetail.Name + } +}elseif($AllowList) +{ + #get each testfolder in whiteList + foreach($eachTest in $whiteList) + { + $eachTest + cd (Join-Path $PSScriptRoot $eachTest) + try + { + $IsGenerateSuccess = GenerateCode + if(-not $Generate -and $IsGenerateSuccess) + { + $sourceFilePath = Join-Path $PSScriptRoot $eachTest 'generate\m3' + $targetFilePath = Join-Path $PSScriptRoot $eachTest 'generate\m4' + CompareGeneratedCode -inputSourcePath $sourceFilePath -inputTargetPath $targetFilePath -testFileName $eachTest + } + } + catch + { + Write-Host -ForegroundColor yellow 'Generate error:' + $fileDetail.Name + } + } +}elseif($BlackList) +{ + #get each testfolder and except those tests in blacklist + foreach($fileDetail in $fileList) + { + $InBlackListResult = IsInBlackList -JudgList $fileDetail -BlackContent $blackTestList + if(!$InBlackListResult) + { + try + { + Write-Host $fileDetail.Name + cd (Join-Path $PSScriptRoot $fileDetail.Name) + $IsGenerateSuccess = GenerateCode + if(-not $Generate -and $IsGenerateSuccess) + { + $sourceFilePath = Join-Path $PSScriptRoot $fileDetail.Name 'generate\m3' + $targetFilePath = Join-Path $PSScriptRoot $fileDetail.Name 'generate\m4' + CompareGeneratedCode -inputSourcePath $sourceFilePath -inputTargetPath $targetFilePath -testFileName $fileDeatil.Name + } + } + catch + { + Write-Host -ForegroundColor yellow 'Generate error:' + $fileDetail.Name + } + } + } +} +else +{ + foreach($fileDetail in $fileList) + { + if(((Get-Item $fileDetail.PSPath) -is [System.IO.DirectoryInfo]) -and (!$fileDetail.Name.Startswith('Compare'))) + { + $g1 = Join-Path $PSScriptRoot $fileDetail.Name + cd ($PSScriptRoot +'\' +$fileDetail.Name) + $deatilPath =Join-Path $PSScriptRoot $fileDetail.Name + try + { + Write-Host -ForegroundColor Blue $fileDetail.Name + $IsGenerateSuccess = GenerateCode + if(-not $Generate -and $IsGenerateSuccess) + { + $sourceFilePath = Join-Path $deatilPath 'generate\m3' + $targetFilePath = Join-Path $deatilPath 'generate\m4' + CompareGeneratedCode -inputSourcePath $sourceFilePath -inputTargetPath $targetFilePath -testFileName $fileDetail.Name + } + } + catch + { + Write-Host -ForegroundColor yellow 'Generate error:' + $fileDetail.Name + } + } + } +} +cd $PSScriptRoot +write-Host $global:testNameStr +$global:testNameStr | Out-File .\CompareResult\GenerateSuccessList.Txt +#Throw error if there are some different +if($global:isError) +{ + throw 'Error: The code generated by the target file is different from the code generated by the source file.' +}else +{ + Write-Host -ForegroundColor blue 'All generated codes are the same' +} diff --git a/tests-upgrade/Configuration.json b/tests-upgrade/Configuration.json new file mode 100644 index 00000000000..045f89ef52b --- /dev/null +++ b/tests-upgrade/Configuration.json @@ -0,0 +1,76 @@ +{ + "WhiteList": [ + "basic-disableazure-get", + "basic-disableazure-response", + "basic-get", + "basic-get-delete", + "basic-get-delete-put-patch", + "basic-get-querystr", + "basic-get-response-operation", + "basic-polymorphism", + "basic-request-methods", + "basic-response-multioperation", + "basic-spec-required", + "basic-spec-root", + "component-definitions-combined", + "component-definitions-local", + "component-definitions-remote", + "component-multiparam", + "component-param", + "component-param-inbody", + "component-param-localremote", + "component-param-remote", + "datamodels-datatypes-array", + "datamodels-datatypes-integer", + "datamodels-datatypes-string", + "datamodels-datatypes-object", + "datamodels-datatypes-unixtime", + "datamodels-combineschema", + "directive-model", + "directive-tableformat", + "extension-ms-azureresource", + "extension-ms-clientflatten", + "extension-ms-clientname", + "extension-ms-discriminatorvalue", + "extension-ms-enum", + "extension-ms-examples", + "extension-ms-longruningoperation", + "extension-ms-mutability", + "extension-ms-pageable", + "extension-ms-paramlocation", + "directive-aliasremoval", + "directive-cmdlet", + "directive-parameter", + "datamodels-datatypes-file" + ], + "BlackList": [ + "basic-get-querystr", + "basic-get-delete", + "basic-get-delete-put-patch", + "basic-request-methods", + "basic-get-response-operation", + "basic-response-multioperation", + "basic-response-defaultoperation", + "basic-disableazure-get", + "component-param", + "component-multiparam", + "component-param-remote", + "component--param-localremote", + "component-param-inbody", + "component-definitions-local", + "component-definitions-remote", + "component-definitions-combined", + "datamodels-datatypes-mixedtypes", + "datamodels-datatypes-integer", + "datamodels-datatypes-string", + "datamodels-datatypes-nullable", + "datamodels-datatypes-array", + "datamodels-datatypes-object", + "datamodels-datatypes-file", + "datamodels-datatypes-anytype", + "datamodels-enums", + "datamodels-keyvalue", + "datamodels-combineschema", + "datamodels-inheritpolymorphism" + ] +} \ No newline at end of file diff --git a/tests-upgrade/README.md b/tests-upgrade/README.md new file mode 100644 index 00000000000..d77d3941d22 --- /dev/null +++ b/tests-upgrade/README.md @@ -0,0 +1,2 @@ +### Description +Add test case for autorest powershell upgrade. \ No newline at end of file diff --git a/tests-upgrade/basic-disableazure-get/readme.md b/tests-upgrade/basic-disableazure-get/readme.md new file mode 100644 index 00000000000..f64f8c74647 --- /dev/null +++ b/tests-upgrade/basic-disableazure-get/readme.md @@ -0,0 +1,13 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md + +azure: false + +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-disableazure-get/swagger.json b/tests-upgrade/basic-disableazure-get/swagger.json new file mode 100644 index 00000000000..ec29bdce647 --- /dev/null +++ b/tests-upgrade/basic-disableazure-get/swagger.json @@ -0,0 +1,53 @@ +{ + + "swagger": "2.0", + "info": { + "title": "AutoRestUpgradeClient", + "version": "2018-04-01", + "description": "ARM AutoRestUpgrade" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "Ok-Return" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-disableazure-response/readme.md b/tests-upgrade/basic-disableazure-response/readme.md new file mode 100644 index 00000000000..f64f8c74647 --- /dev/null +++ b/tests-upgrade/basic-disableazure-response/readme.md @@ -0,0 +1,13 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md + +azure: false + +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-disableazure-response/swagger.json b/tests-upgrade/basic-disableazure-response/swagger.json new file mode 100644 index 00000000000..4e3ddee1c72 --- /dev/null +++ b/tests-upgrade/basic-disableazure-response/swagger.json @@ -0,0 +1,61 @@ +{ + + "swagger": "2.0", + "info": { + "title": "AutoRestUpgradeClient", + "version": "2018-04-01", + "description": "ARM AutoRestUpgrade" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "Ok-Return", + "schema": { + "$ref": "#/definitions/Workspaces" + } + } + } + } + } + }, + "definitions": { + "Workspaces": { + "type": "string" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-get-delete-put-patch/readme.md b/tests-upgrade/basic-get-delete-put-patch/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-get-delete-put-patch/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-get-delete-put-patch/swagger.json b/tests-upgrade/basic-get-delete-put-patch/swagger.json new file mode 100644 index 00000000000..1e492fbdb81 --- /dev/null +++ b/tests-upgrade/basic-get-delete-put-patch/swagger.json @@ -0,0 +1,89 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Deletes the workspace", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "responses": { + "202": { + "description": "Accepted" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-get-delete/readme.md b/tests-upgrade/basic-get-delete/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-get-delete/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-get-delete/swagger.json b/tests-upgrade/basic-get-delete/swagger.json new file mode 100644 index 00000000000..99515cde797 --- /dev/null +++ b/tests-upgrade/basic-get-delete/swagger.json @@ -0,0 +1,65 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags":[ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Delete the workspace.", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-get-querystr/readme.md b/tests-upgrade/basic-get-querystr/readme.md new file mode 100644 index 00000000000..43b8fe48491 --- /dev/null +++ b/tests-upgrade/basic-get-querystr/readme.md @@ -0,0 +1,9 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json +``` diff --git a/tests-upgrade/basic-get-querystr/swagger.json b/tests-upgrade/basic-get-querystr/swagger.json new file mode 100644 index 00000000000..8ec03daef8f --- /dev/null +++ b/tests-upgrade/basic-get-querystr/swagger.json @@ -0,0 +1,61 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/users": { + "get": { + "tags": [ + "users" + ], + "operationId": "users_Get", + "description": "get user list.", + "parameters": [ + { + "in": "query", + "name": "role", + "type": "string", + "required": true + } + ], + "responses": { + "200": { + "description": "OK" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-get-response-operation/readme.md b/tests-upgrade/basic-get-response-operation/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-get-response-operation/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-get-response-operation/swagger.json b/tests-upgrade/basic-get-response-operation/swagger.json new file mode 100644 index 00000000000..58b0c253858 --- /dev/null +++ b/tests-upgrade/basic-get-response-operation/swagger.json @@ -0,0 +1,56 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-get/readme.md b/tests-upgrade/basic-get/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-get/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-get/swagger.json b/tests-upgrade/basic-get/swagger.json new file mode 100644 index 00000000000..5480c062852 --- /dev/null +++ b/tests-upgrade/basic-get/swagger.json @@ -0,0 +1,53 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-polymorphism/readme.md b/tests-upgrade/basic-polymorphism/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-polymorphism/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-polymorphism/swagger.json b/tests-upgrade/basic-polymorphism/swagger.json new file mode 100644 index 00000000000..2f2c6269b45 --- /dev/null +++ b/tests-upgrade/basic-polymorphism/swagger.json @@ -0,0 +1,1599 @@ +{ + "swagger": "2.0", + "info": { + "title": "TimeSeriesInsightsClient", + "description": "Time Series Insights client", + "version": "2018-08-15-preview" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}": { + "put": { + "tags": [ + "Environments" + ], + "operationId": "Environments_CreateOrUpdate", + "x-ms-examples": { + "EnvironmentsCreate": { + "$ref": "./examples/EnvironmentsCreate.json" + } + }, + "x-ms-long-running-operation": true, + "description": "Create or update an environment in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "name": "environmentName", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "description": "Name of the environment" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/EnvironmentCreateOrUpdateParameters" + }, + "description": "Parameters for creating an environment resource." + } + ], + "responses": { + "200": { + "description": "The existing environment definition was successfully updated.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "201": { + "description": "The environment create request was accepted. Environment provisioning is an asynchronous operation. You can periodically get your environment definition and monitor progress via the provisioningState property.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "404": { + "description": "The subscription or resource group could not be found." + }, + "default": { + "description": "HTTP 400 (Bad Request): The given environment request body is invalid; See the error code and message in the response for details.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "Environments" + ], + "operationId": "Environments_Get", + "x-ms-examples": { + "EnvironmentsGet": { + "$ref": "./examples/EnvironmentsGet.json" + } + }, + "description": "Gets the environment with the specified name in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ExpandParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The environment definition was successfully retrieved and is in the response. If you are polling for the completion of a provisioning or scale operation, you can check its status via the provisioningState property.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + } + }, + "definitions": { + "OperationListResult": { + "description": "Result of the request to list Time Series Insights operations. It contains a list of operations and a URL link to get the next set of results.", + "properties": { + "value": { + "description": "List of Time Series Insights operations supported by the Microsoft.TimeSeriesInsights resource provider.", + "type": "array", + "readOnly": true, + "items": { + "$ref": "#/definitions/Operation" + } + }, + "nextLink": { + "description": "URL to get the next set of operation list results if there are any.", + "type": "string", + "readOnly": true + } + } + }, + "Operation": { + "description": "A Time Series Insights REST API operation", + "type": "object", + "properties": { + "name": { + "description": "The name of the operation being performed on this particular object.", + "type": "string", + "readOnly": true + }, + "display": { + "description": "Contains the localized display information for this particular operation / action.", + "readOnly": true, + "properties": { + "provider": { + "description": "The localized friendly form of the resource provider name.", + "type": "string", + "readOnly": true + }, + "resource": { + "description": "The localized friendly form of the resource type related to this action/operation.", + "type": "string", + "readOnly": true + }, + "operation": { + "description": "The localized friendly name for the operation.", + "type": "string", + "readOnly": true + }, + "description": { + "description": "The localized friendly description for the operation.", + "type": "string", + "readOnly": true + } + } + } + } + }, + "Resource": { + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Resource Id" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "Resource name" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "Resource type" + } + }, + "description": "Time Series Insights resource", + "x-ms-azure-resource": true + }, + "TrackedResource": { + "properties": { + "location": { + "type": "string", + "description": "Resource location", + "x-ms-mutability": [ + "read", + "create" + ] + }, + "tags": { + "type": "string", + "description": "Resource tags" + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "required": [ + "location" + ], + "description": "Time Series Insights resource that is tracked by Azure Resource Manager." + }, + "ResourceProperties": { + "properties": { + "provisioningState": { + "$ref": "#/definitions/ProvisioningState", + "description": "Provisioning state of the resource." + }, + "creationTime": { + "readOnly": true, + "type": "string", + "format": "date-time", + "description": "The time the resource was created." + } + }, + "description": "Properties that are common to all tracked resources." + }, + "ProvisioningState": { + "readOnly": true, + "type": "string", + "description": "Provisioning state of the resource.", + "enum": [ + "Accepted", + "Creating", + "Updating", + "Succeeded", + "Failed", + "Deleting" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": false + } + }, + "Sku": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of this SKU.", + "enum": [ + "S1", + "S2", + "P1", + "L1" + ], + "x-ms-enum": { + "name": "SkuName", + "modelAsString": false + } + }, + "capacity": { + "format": "int32", + "type": "integer", + "description": "The capacity of the sku. For standard environments, this value can be changed to support scale out of environments after they have been created.", + "minimum": 1, + "maximum": 10 + } + }, + "required": [ + "name", + "capacity" + ], + "description": "The sku determines the type of environment, either standard (S1 or S2) or long-term (L1). For standard environments the sku determines the capacity of the environment, the ingress rate, and the billing rate." + }, + "WarmStoreConfigurationProperties": { + "type": "object", + "properties": { + "dataRetention": { + "type": "string", + "description": "ISO8601 timespan specifying the number of days the environment's events will be available for query from the warm store." + } + }, + "required": [ + "dataRetention" + ], + "description": "The warm store configuration provides the details to create a warm store cache that will retain a copy of the environment's data available for faster query." + }, + "LongTermStorageConfigurationInput": { + "type": "object", + "properties": { + "accountName": { + "type": "string", + "description": "The name of the storage account that will hold the environment's long term data." + }, + "managementKey": { + "type": "string", + "description": "The value of the management key that grants the Time Series Insights service write access to the storage account. This property is not shown in environment responses." + } + }, + "required": [ + "accountName", + "managementKey" + ], + "description": "The storage configuration provides the connection details that allows the Time Series Insights service to connect to the customer storage account that is used to store the environment's data." + }, + "LongTermStorageConfigurationOutput": { + "type": "object", + "properties": { + "accountName": { + "type": "string", + "description": "The name of the storage account that will hold the environment's long term data." + } + }, + "required": [ + "accountName" + ], + "description": "The storage configuration provides the non-secret connection details about the customer storage account that is used to store the environment's data." + }, + "LongTermStorageConfigurationMutableProperties": { + "type": "object", + "properties": { + "managementKey": { + "type": "string", + "description": "The value of the management key that grants the Time Series Insights service write access to the storage account. This property is not shown in environment responses." + } + }, + "required": [ + "managementKey" + ], + "description": "The storage configuration provides the connection details that allows the Time Series Insights service to connect to the customer storage account that is used to store the environment's data." + }, + "CreateOrUpdateTrackedResourceProperties": { + "properties": { + "location": { + "type": "string", + "description": "The location of the resource.", + "x-ms-mutability": [ + "read", + "create" + ] + }, + "tags": { + "type": "string", + "description": "Key-value pairs of additional properties for the resource." + } + }, + "required": [ + "location" + ], + "description": "Properties required to create any resource tracked by Azure Resource Manager." + }, + "EnvironmentCreateOrUpdateParameters": { + "discriminator": "kind", + "properties": { + "kind": { + "type": "string", + "description": "The kind of the environment.", + "enum": [ + "Standard", + "LongTerm" + ], + "x-ms-enum": { + "name": "Kind", + "modelAsString": false + } + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The sku determines the type of environment, either standard (S1 or S2) or long-term (L1). For standard environments the sku determines the capacity of the environment, the ingress rate, and the billing rate." + } + }, + "required": [ + "kind", + "sku" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ], + "description": "Parameters supplied to the CreateOrUpdate Environment operation." + }, + "StandardEnvironmentCreateOrUpdateParameters": { + "x-ms-discriminator-value": "Standard", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/StandardEnvironmentCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EnvironmentCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Environment operation for a standard environment." + }, + "LongTermEnvironmentCreateOrUpdateParameters": { + "x-ms-discriminator-value": "LongTerm", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/LongTermEnvironmentCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EnvironmentCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Environment operation for a long-term environment." + }, + "EnvironmentUpdateParameters": { + "type": "object", + "properties": { + "tags": { + "type": "string", + "description": "Key-value pairs of additional properties for the environment." + } + }, + "description": "Parameters supplied to the Update Environment operation." + }, + "StandardEnvironmentUpdateParameters": { + "type": "object", + "properties": { + "sku": { + "$ref": "#/definitions/Sku", + "description": "The sku of the environment." + }, + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/StandardEnvironmentMutableProperties", + "description": "Properties of the standard environment." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EnvironmentUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Environment operation to update a standard environment." + }, + "LongTermEnvironmentUpdateParameters": { + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/LongTermEnvironmentMutableProperties", + "description": "Properties of the long-term environment." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EnvironmentUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Environment operation to update a long-term environment." + }, + "EnvironmentListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/EnvironmentResource" + }, + "description": "Result of the List Environments operation." + } + }, + "description": "The response of the List Environments operation." + }, + "EnvironmentResource": { + "type": "object", + "discriminator": "kind", + "properties": { + "sku": { + "$ref": "#/definitions/Sku", + "description": "The sku determines the type of environment, either standard (S1 or S2) or long-term (L1). For standard environments the sku determines the capacity of the environment, the ingress rate, and the billing rate." + }, + "kind": { + "type": "string", + "description": "The kind of the environment.", + "enum": [ + "Standard", + "LongTerm" + ] + } + }, + "required": [ + "kind", + "sku" + ], + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "An environment is a set of time-series data available for query, and is the top level Azure Time Series Insights resource." + }, + "StandardEnvironmentResource": { + "x-ms-discriminator-value": "Standard", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/StandardEnvironmentResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EnvironmentResource" + } + ], + "description": "An environment is a set of time-series data available for query, and is the top level Azure Time Series Insights resource. Standard environments have data retention limits." + }, + "LongTermEnvironmentResource": { + "x-ms-discriminator-value": "LongTerm", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/LongTermEnvironmentResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EnvironmentResource" + } + ], + "description": "An environment is a set of time-series data available for query, and is the top level Azure Time Series Insights resource. LongTerm environments do not have set data retention limits." + }, + "StandardEnvironmentCreationProperties": { + "properties": { + "dataRetentionTime": { + "type": "string", + "description": "ISO8601 timespan specifying the minimum number of days the environment's events will be available for query." + }, + "storageLimitExceededBehavior": { + "type": "string", + "description": "The behavior the Time Series Insights service should take when the environment's capacity has been exceeded. If \"PauseIngress\" is specified, new events will not be read from the event source. If \"PurgeOldData\" is specified, new events will continue to be read and old events will be deleted from the environment. The default behavior is PurgeOldData.", + "enum": [ + "PurgeOldData", + "PauseIngress" + ], + "x-ms-enum": { + "name": "StorageLimitExceededBehavior", + "modelAsString": false + } + }, + "partitionKeyProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/TimeSeriesIdProperty" + }, + "description": "The list of event properties which will be used to partition data in the environment." + } + }, + "required": [ + "dataRetentionTime" + ], + "description": "Properties used to create a standard environment." + }, + "LongTermEnvironmentCreationProperties": { + "properties": { + "timeSeriesIdProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/TimeSeriesIdProperty" + }, + "description": "The list of event properties which will be used to define the environment's time series id." + }, + "storageConfiguration": { + "$ref": "#/definitions/LongTermStorageConfigurationInput", + "description": "The storage configuration provides the connection details that allows the Time Series Insights service to connect to the customer storage account that is used to store the environment's data." + }, + "warmStoreConfiguration": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WarmStoreConfigurationProperties", + "description": "The warm store configuration provides the details to create a warm store cache that will retain a copy of the environment's data available for faster query." + } + }, + "required": [ + "timeSeriesIdProperties", + "storageConfiguration" + ], + "description": "Properties used to create a long-term environment." + }, + "EnvironmentResourceProperties": { + "properties": { + "dataAccessId": { + "readOnly": true, + "type": "string", + "description": "An id used to access the environment data, e.g. to query the environment's events or upload reference data for the environment." + }, + "dataAccessFqdn": { + "readOnly": true, + "type": "string", + "description": "The fully qualified domain name used to access the environment data, e.g. to query the environment's events or upload reference data for the environment." + }, + "status": { + "$ref": "#/definitions/EnvironmentStatus", + "description": "An object that represents the status of the environment, and its internal state in the Time Series Insights service." + } + }, + "allOf": [ + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "description": "Properties of the environment." + }, + "StandardEnvironmentResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/StandardEnvironmentCreationProperties" + }, + { + "$ref": "#/definitions/EnvironmentResourceProperties" + } + ], + "required": [ + "dataRetentionTime" + ], + "description": "Properties of the standard environment." + }, + "LongTermEnvironmentResourceProperties": { + "properties": { + "timeSeriesIdProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/TimeSeriesIdProperty" + }, + "description": "The list of event properties which will be used to define the environment's time series id." + }, + "storageConfiguration": { + "$ref": "#/definitions/LongTermStorageConfigurationOutput", + "description": "The storage configuration provides the connection details that allows the Time Series Insights service to connect to the customer storage account that is used to store the environment's data." + }, + "warmStoreConfiguration": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WarmStoreConfigurationProperties", + "description": "The warm store configuration provides the details to create a warm store cache that will retain a copy of the environment's data available for faster query." + } + }, + "required": [ + "timeSeriesIdProperties", + "storageConfiguration" + ], + "allOf": [ + { + "$ref": "#/definitions/EnvironmentResourceProperties" + } + ], + "description": "Properties of the long-term environment." + }, + "StandardEnvironmentMutableProperties": { + "description": "An object that represents a set of mutable standard environment resource properties.", + "type": "object", + "properties": { + "dataRetentionTime": { + "type": "string", + "description": "ISO8601 timespan specifying the minimum number of days the environment's events will be available for query." + }, + "storageLimitExceededBehavior": { + "type": "string", + "description": "The behavior the Time Series Insights service should take when the environment's capacity has been exceeded. If \"PauseIngress\" is specified, new events will not be read from the event source. If \"PurgeOldData\" is specified, new events will continue to be read and old events will be deleted from the environment. The default behavior is PurgeOldData.", + "enum": [ + "PurgeOldData", + "PauseIngress" + ], + "x-ms-enum": { + "name": "StorageLimitExceededBehavior", + "modelAsString": false + } + }, + "partitionKeyProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/TimeSeriesIdProperty" + }, + "description": "The list of event properties which will be used to partition data in the environment." + } + } + }, + "LongTermEnvironmentMutableProperties": { + "description": "An object that represents a set of mutable long-term environment resource properties.", + "type": "object", + "properties": { + "storageConfiguration": { + "$ref": "#/definitions/LongTermStorageConfigurationMutableProperties", + "description": "The storage configuration provides the connection details that allows the Time Series Insights service to connect to the customer storage account that is used to store the environment's data." + }, + "warmStoreConfiguration": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WarmStoreConfigurationProperties", + "description": "The warm store configuration provides the details to create a warm store cache that will retain a copy of the environment's data available for faster query." + } + } + }, + "TimeSeriesIdProperty": { + "properties": { + "name": { + "type": "string", + "description": "The name of the property." + }, + "type": { + "type": "string", + "description": "The type of the property.", + "enum": [ + "String", + "123" + ], + "x-ms-enum": { + "name": "PropertyType", + "modelAsString": false + } + } + }, + "description": "The structure of the property that a time series id can have. An environment can have multiple such properties." + }, + "EnvironmentStatus": { + "readOnly": true, + "type": "object", + "description": "An object that represents the status of the environment, and its internal state in the Time Series Insights service.", + "properties": { + "ingress": { + "$ref": "#/definitions/IngressEnvironmentStatus", + "description": "An object that represents the status of ingress on an environment." + }, + "warmStorage": { + "$ref": "#/definitions/WarmStorageEnvironmentStatus", + "description": "An object that represents the status of warm storage on an environment." + } + } + }, + "IngressEnvironmentStatus": { + "readOnly": true, + "type": "object", + "description": "An object that represents the status of ingress on an environment.", + "properties": { + "state": { + "type": "string", + "description": "This string represents the state of ingress operations on an environment. It can be \"Disabled\", \"Ready\", \"Running\", \"Paused\" or \"Unknown\"", + "enum": [ + "Disabled", + "Ready", + "Running", + "Paused", + "Unknown" + ], + "x-ms-enum": { + "name": "IngressState", + "modelAsString": false + } + }, + "stateDetails": { + "$ref": "#/definitions/EnvironmentStateDetails", + "description": "An object that contains the details about an environment's state." + } + } + }, + "EnvironmentStateDetails": { + "readOnly": true, + "type": "object", + "description": "An object that contains the details about an environment's state.", + "properties": { + "code": { + "type": "string", + "description": "Contains the code that represents the reason of an environment being in a particular state. Can be used to programmatically handle specific cases." + }, + "message": { + "type": "string", + "description": "A message that describes the state in detail." + } + } + }, + "WarmStorageEnvironmentStatus": { + "readOnly": true, + "type": "object", + "description": "An object that represents the status of warm storage on an environment.", + "properties": { + "propertiesUsage": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WarmStoragePropertiesUsage", + "description": "An object that contains the status of warm storage properties usage." + } + } + }, + "WarmStoragePropertiesUsage": { + "readOnly": true, + "type": "object", + "description": "An object that contains the status of warm storage properties usage.", + "properties": { + "state": { + "type": "string", + "description": "This string represents the state of warm storage properties usage. It can be \"Ok\", \"Error\", \"Unknown\".", + "enum": [ + "Ok", + "Error", + "Unknown" + ], + "x-ms-enum": { + "name": "WarmStoragePropertiesState", + "modelAsString": false + } + }, + "stateDetails": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WarmStoragePropertiesUsageStateDetails", + "description": "An object that contains the details about warm storage properties usage state." + } + } + }, + "WarmStoragePropertiesUsageStateDetails": { + "readOnly": true, + "type": "object", + "description": "An object that contains the details about warm storage properties usage state.", + "properties": { + "currentCount": { + "format": "int32", + "type": "integer", + "description": "A value that represents the number of properties used by the environment for S1/S2 SKU and number of properties used by Warm Store for PAYG SKU", + "minimum": 1, + "maximum": 10 + }, + "maxCount": { + "format": "int32", + "type": "integer", + "description": "A value that represents the maximum number of properties used allowed by the environment for S1/S2 SKU and maximum number of properties allowed by Warm Store for PAYG SKU.", + "minimum": 1, + "maximum": 10 + } + } + }, + "EventSourceCreateOrUpdateParameters": { + "discriminator": "kind", + "properties": { + "kind": { + "type": "string", + "description": "The kind of the event source.", + "enum": [ + "Microsoft.EventHub", + "Microsoft.IoTHub" + ], + "x-ms-enum": { + "name": "Kind", + "modelAsString": false + } + } + }, + "required": [ + "kind" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation." + }, + "EventHubEventSourceCreateOrUpdateParameters": { + "x-ms-discriminator-value": "Microsoft.EventHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation for an EventHub event source." + }, + "IoTHubEventSourceCreateOrUpdateParameters": { + "x-ms-discriminator-value": "Microsoft.IoTHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation for an IoTHub event source." + }, + "EventSourceUpdateParameters": { + "type": "object", + "properties": { + "tags": { + "type": "string", + "description": "Key-value pairs of additional properties for the event source." + } + }, + "description": "Parameters supplied to the Update Event Source operation." + }, + "EventHubEventSourceUpdateParameters": { + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceMutableProperties", + "description": "Properties of the EventHub event source." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Event Source operation to update an EventHub event source." + }, + "IoTHubEventSourceUpdateParameters": { + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceMutableProperties", + "description": "Properties of the IoTHub event source." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Event Source operation to update an IoTHub event source." + }, + "EventSourceListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/EventSourceResource" + }, + "description": "Result of the List EventSources operation." + } + }, + "description": "The response of the List EventSources operation." + }, + "EventSourceResource": { + "type": "object", + "discriminator": "kind", + "properties": { + "kind": { + "type": "string", + "description": "The kind of the event source.", + "enum": [ + "Microsoft.EventHub", + "Microsoft.IoTHub" + ] + } + }, + "required": [ + "kind" + ], + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "An environment receives data from one or more event sources. Each event source has associated connection info that allows the Time Series Insights ingress pipeline to connect to and pull data from the event source" + }, + "EventHubEventSourceResource": { + "x-ms-discriminator-value": "Microsoft.EventHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceResource" + } + ], + "description": "An event source that receives its data from an Azure EventHub." + }, + "IoTHubEventSourceResource": { + "x-ms-discriminator-value": "Microsoft.IotHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceResource" + } + ], + "description": "An event source that receives its data from an Azure IoTHub." + }, + "EventSourceCommonProperties": { + "properties": { + "timestampPropertyName": { + "type": "string", + "description": "The event property that will be used as the event source's timestamp. If a value isn't specified for timestampPropertyName, or if null or empty-string is specified, the event creation time will be used." + } + }, + "allOf": [ + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "description": "Properties of the event source." + }, + "AzureEventSourceProperties": { + "properties": { + "eventSourceResourceId": { + "type": "string", + "description": "The resource id of the event source in Azure Resource Manager." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceCommonProperties" + } + ], + "required": [ + "eventSourceResourceId" + ], + "description": "Properties of an event source that reads events from an event broker in Azure." + }, + "EventHubEventSourceCommonProperties": { + "properties": { + "serviceBusNamespace": { + "type": "string", + "description": "The name of the service bus that contains the event hub." + }, + "eventHubName": { + "type": "string", + "description": "The name of the event hub." + }, + "consumerGroupName": { + "type": "string", + "description": "The name of the event hub's consumer group that holds the partitions from which events will be read." + }, + "keyName": { + "type": "string", + "description": "The name of the SAS key that grants the Time Series Insights service access to the event hub. The shared access policies for this key must grant 'Listen' permissions to the event hub." + } + }, + "allOf": [ + { + "$ref": "#/definitions/AzureEventSourceProperties" + } + ], + "required": [ + "serviceBusNamespace", + "eventHubName", + "consumerGroupName", + "keyName" + ], + "description": "Properties of the EventHub event source." + }, + "EventHubEventSourceCreationProperties": { + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the event hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventHubEventSourceCommonProperties" + } + ], + "required": [ + "sharedAccessKey" + ], + "description": "Properties of the EventHub event source that are required on create or update requests." + }, + "EventHubEventSourceResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/EventHubEventSourceCommonProperties" + } + ], + "properties": { + "sss": { + "type": "string" + } + }, + "description": "Properties of the EventHub event source resource." + }, + "IoTHubEventSourceCommonProperties": { + "properties": { + "iotHubName": { + "type": "string", + "description": "The name of the iot hub." + }, + "consumerGroupName": { + "type": "string", + "description": "The name of the iot hub's consumer group that holds the partitions from which events will be read." + }, + "keyName": { + "type": "string", + "description": "The name of the Shared Access Policy key that grants the Time Series Insights service access to the iot hub. This shared access policy key must grant 'service connect' permissions to the iot hub." + } + }, + "allOf": [ + { + "$ref": "#/definitions/AzureEventSourceProperties" + } + ], + "required": [ + "iotHubName", + "consumerGroupName", + "keyName" + ], + "description": "Properties of the IoTHub event source." + }, + "IoTHubEventSourceCreationProperties": { + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the Shared Access Policy key that grants the Time Series Insights service read access to the iot hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/IoTHubEventSourceCommonProperties" + } + ], + "required": [ + "sharedAccessKey" + ], + "description": "Properties of the IoTHub event source that are required on create or update requests." + }, + "IoTHubEventSourceResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/IoTHubEventSourceCommonProperties" + } + ], + "properties": { + "name": { + "type": "boolean" + } + }, + "description": "Properties of the IoTHub event source resource." + }, + "LocalTimestamp": { + "description": "An object that represents the local timestamp property. It contains the format of local timestamp that needs to be used and the corresponding timezone offset information. If a value isn't specified for localTimestamp, or if null, then the local timestamp will not be ingressed with the events.", + "type": "object", + "properties": { + "format": { + "description": "An enum that represents the format of the local timestamp property that needs to be set.", + "type": "string", + "enum": [ + "Embedded", + "Iana", + "TimeSpan" + ], + "x-ms-enum": { + "name": "LocalTimestampFormat", + "modelAsString": false + } + }, + "timeZoneOffset": { + "description": "An object that represents the offset information for the local timestamp format specified. Should not be specified for LocalTimestampFormat - Embedded.", + "type": "object", + "properties": { + "propertyName": { + "type": "string", + "description": "The event property that will be contain the offset information to calculate the local timestamp. When the LocalTimestampFormat is Iana, the property name will contain the name of the column which contains IANA Timezone Name (eg: Americas/Los Angeles). When LocalTimestampFormat is Timespan, it contains the name of property which contains values representing the offset (eg: P1D or 1.00:00:00)" + } + } + } + } + }, + "EventSourceMutableProperties": { + "description": "An object that represents a set of mutable event source resource properties.", + "type": "object", + "properties": { + "timestampPropertyName": { + "type": "string", + "description": "The event property that will be used as the event source's timestamp. If a value isn't specified for timestampPropertyName, or if null or empty-string is specified, the event creation time will be used." + }, + "localTimestamp": { + "$ref": "#/definitions/LocalTimestamp", + "description": "An object that represents the local timestamp property. It contains the format of local timestamp that needs to be used and the corresponding timezone offset information. If a value isn't specified for localTimestamp, or if null, then the local timestamp will not be ingressed with the events." + } + } + }, + "EventHubEventSourceMutableProperties": { + "description": "An object that represents a set of mutable EventHub event source resource properties.", + "type": "object", + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the event hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceMutableProperties" + } + ] + }, + "IoTHubEventSourceMutableProperties": { + "description": "An object that represents a set of mutable IoTHub event source resource properties.", + "type": "object", + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the iot hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceMutableProperties" + } + ] + }, + "ReferenceDataSetCreateOrUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/ReferenceDataSetCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ] + }, + "ReferenceDataSetUpdateParameters": { + "type": "object", + "properties": { + "tags": { + "type": "string", + "description": "Key-value pairs of additional properties for the reference data set." + } + }, + "description": "Parameters supplied to the Update Reference Data Set operation." + }, + "ReferenceDataSetListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/ReferenceDataSetResource" + }, + "description": "Result of the List Reference Data Sets operation." + } + }, + "description": "The response of the List Reference Data Sets operation." + }, + "ReferenceDataSetResource": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/ReferenceDataSetResourceProperties" + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "A reference data set provides metadata about the events in an environment. Metadata in the reference data set will be joined with events as they are read from event sources. The metadata that makes up the reference data set is uploaded or modified through the Time Series Insights data plane APIs." + }, + "ReferenceDataSetCreationProperties": { + "properties": { + "keyProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/ReferenceDataSetKeyProperty" + }, + "description": "The list of key properties for the reference data set." + }, + "dataStringComparisonBehavior": { + "type": "string", + "description": "The reference data set key comparison behavior can be set using this property. By default, the value is 'Ordinal' - which means case sensitive key comparison will be performed while joining reference data with events or while adding new reference data. When 'OrdinalIgnoreCase' is set, case insensitive comparison will be used.", + "enum": [ + "Ordinal", + "OrdinalIgnoreCase" + ], + "x-ms-enum": { + "name": "DataStringComparisonBehavior", + "modelAsString": false + } + } + }, + "required": [ + "keyProperties" + ], + "description": "Properties used to create a reference data set." + }, + "ReferenceDataSetResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/ReferenceDataSetCreationProperties" + }, + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "required": [ + "keyProperties" + ], + "description": "Properties of the reference data set." + }, + "ReferenceDataSetKeyProperty": { + "properties": { + "name": { + "type": "string", + "description": "The name of the key property." + }, + "type": { + "type": "string", + "description": "The type of the key property.", + "enum": [ + "String", + "Double", + "Bool", + "DateTime" + ], + "x-ms-enum": { + "name": "ReferenceDataKeyPropertyType", + "modelAsString": false + } + } + }, + "description": "A key property for the reference data set. A reference data set can have multiple key properties." + }, + "AccessPolicyCreateOrUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyResourceProperties" + } + }, + "required": [ + "properties" + ] + }, + "AccessPolicyUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyMutableProperties" + } + } + }, + "AccessPolicyListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/AccessPolicyResource" + }, + "description": "Result of the List access policies operation." + } + }, + "description": "The response of the List access policies operation." + }, + "AccessPolicyResource": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyResourceProperties" + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "description": "An access policy is used to grant users and applications access to the environment. Roles are assigned to service principals in Azure Active Directory. These roles define the actions the principal can perform through the Time Series Insights data plane APIs." + }, + "AccessPolicyResourceProperties": { + "properties": { + "principalObjectId": { + "type": "string", + "description": "The objectId of the principal in Azure Active Directory." + }, + "description": { + "type": "string", + "description": "An description of the access policy." + }, + "roles": { + "type": "array", + "items": { + "type": "string", + "description": "A role defining the data plane operations that a principal can perform on a Time Series Insights client.", + "enum": [ + "Reader", + "Contributor" + ], + "x-ms-enum": { + "name": "AccessPolicyRole", + "modelAsString": false + } + }, + "description": "The list of roles the principal is assigned on the environment." + } + } + }, + "AccessPolicyMutableProperties": { + "description": "An object that represents a set of mutable access policy resource properties.", + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "An description of the access policy." + }, + "roles": { + "type": "array", + "items": { + "type": "string", + "description": "A role defining the data plane operations that a principal can perform on a Time Series Insights client.", + "enum": [ + "Reader", + "Contributor" + ], + "x-ms-enum": { + "name": "AccessPolicyRole", + "modelAsString": false + } + }, + "description": "The list of roles the principal is assigned on the environment." + } + } + }, + "CloudError": { + "type": "object", + "properties": { + "error": { + "$ref": "#/definitions/CloudErrorBody", + "description": "Describes a particular API error with an error code and a message." + } + }, + "description": "Contains information about an API error.", + "x-ms-external": true + }, + "CloudErrorBody": { + "type": "object", + "description": "Describes a particular API error with an error code and a message.", + "properties": { + "code": { + "type": "string", + "description": "An error code that describes the error condition more precisely than an HTTP status code. Can be used to programmatically handle specific error cases." + }, + "message": { + "type": "string", + "description": "A message that describes the error in detail and provides debugging information." + }, + "target": { + "type": "string", + "description": "The target of the particular error (for example, the name of the property in error)." + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/CloudErrorBody", + "description": "Describes a particular API error with an error code and a message." + }, + "description": "Contains nested errors that are related to this error." + } + }, + "x-ms-external": true + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "description": "Azure Subscription ID.", + "required": true, + "type": "string" + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "Version of the API to be used with the client request." + }, + "ExpandParameter": { + "name": "expand", + "in": "query", + "required": false, + "type": "string", + "x-ms-parameter-location": "method", + "description": "Setting expand=status will include the status of the internal services of the environment in the Time Series Insights service." + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "x-ms-parameter-location": "method", + "description": "Name of an Azure Resource group." + }, + "EnvironmentNameParameter": { + "name": "environmentName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights environment associated with the specified resource group.", + "x-ms-parameter-location": "method" + }, + "EventSourceNameParameter": { + "name": "eventSourceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights event source associated with the specified environment.", + "x-ms-parameter-location": "method" + }, + "ReferenceDataSetNameParameter": { + "name": "referenceDataSetName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights reference data set associated with the specified environment.", + "x-ms-parameter-location": "method" + }, + "AccessPolicyNameParameter": { + "name": "accessPolicyName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights access policy associated with the specified environment.", + "x-ms-parameter-location": "method" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-request-methods/readme.md b/tests-upgrade/basic-request-methods/readme.md new file mode 100644 index 00000000000..43b8fe48491 --- /dev/null +++ b/tests-upgrade/basic-request-methods/readme.md @@ -0,0 +1,9 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json +``` diff --git a/tests-upgrade/basic-request-methods/swagger.json b/tests-upgrade/basic-request-methods/swagger.json new file mode 100644 index 00000000000..c29f5743bb5 --- /dev/null +++ b/tests-upgrade/basic-request-methods/swagger.json @@ -0,0 +1,89 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/users": { + "get": { + "tags": [ + "users" + ], + "operationId": "users_Get", + "description": "get user list.", + "responses": { + "200": { + "description": "OK" + } + } + }, + "delete": { + "tags": [ + "users" + ], + "operationId": "users_Delete", + "description": "delete a user.", + "responses": { + "200": { + "description": "OK" + } + } + }, + "put": { + "tags": [ + "users" + ], + "operationId": "users_CreateOrUpdate", + "description": "Create or Update a user.", + "responses": { + "200": { + "description": "OK" + } + } + }, + "post": { + "tags": [ + "users" + ], + "operationId": "users_Update", + "description": "Update a user.", + "responses": { + "200": { + "description": "OK" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-response-multioperation/readme.md b/tests-upgrade/basic-response-multioperation/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-response-multioperation/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-response-multioperation/swagger.json b/tests-upgrade/basic-response-multioperation/swagger.json new file mode 100644 index 00000000000..115b3af19eb --- /dev/null +++ b/tests-upgrade/basic-response-multioperation/swagger.json @@ -0,0 +1,104 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Deletes the workspace", + "responses": { + "200": { + "description": "OK - Deleted the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "responses": { + "200": { + "description": "OK - Updated the workspace." + }, + "202": { + "description": "Accepted" + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-spec-required/readme.md b/tests-upgrade/basic-spec-required/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-spec-required/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-spec-required/swagger.json b/tests-upgrade/basic-spec-required/swagger.json new file mode 100644 index 00000000000..eb4665e888a --- /dev/null +++ b/tests-upgrade/basic-spec-required/swagger.json @@ -0,0 +1,22 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + } +} \ No newline at end of file diff --git a/tests-upgrade/basic-spec-root/readme.md b/tests-upgrade/basic-spec-root/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/basic-spec-root/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/basic-spec-root/swagger.json b/tests-upgrade/basic-spec-root/swagger.json new file mode 100644 index 00000000000..fccc4fc3f57 --- /dev/null +++ b/tests-upgrade/basic-spec-root/swagger.json @@ -0,0 +1,39 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + } +} \ No newline at end of file diff --git a/tests-upgrade/component-definitions-combined/readme.md b/tests-upgrade/component-definitions-combined/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-definitions-combined/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-definitions-combined/swagger.json b/tests-upgrade/component-definitions-combined/swagger.json new file mode 100644 index 00000000000..1330299ffdf --- /dev/null +++ b/tests-upgrade/component-definitions-combined/swagger.json @@ -0,0 +1,96 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ExtendedErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required":[ + "message", + "code" + ], + "properties": { + "message": { + "type": "string" + }, + "code": { + "type": "integer", + "minimum": 100, + "maximum": 600 + } + } + }, + "ExtendedErrorModel": { + "allOf": [ + { + "$ref": "#/definitions/BasicErrorModel" + }, + { + "type": "object", + "required": [ + "rootCause" + ], + "properties": { + "rootCause": { + "type": "string" + } + } + } + ] + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-definitions-local/readme.md b/tests-upgrade/component-definitions-local/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-definitions-local/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-definitions-local/swagger.json b/tests-upgrade/component-definitions-local/swagger.json new file mode 100644 index 00000000000..3f8d3819459 --- /dev/null +++ b/tests-upgrade/component-definitions-local/swagger.json @@ -0,0 +1,126 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/xxxx-xxxx-xxxx/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListBySubscription", + "description": "Gets all the workspaces within a subscription.", + "responses": { + "200": { + "description": "OK - Returns an array of workspaces." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + } + } + }, + "definitions": { + "ErrorDetail": { + "title": "Error details.", + "type": "object", + "properties": { + "code": { + "description": "The error's code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "target": { + "description": "Indicates which property in the request is responsible for the error.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorInfo": { + "title": "The code and message for an error.", + "type": "object", + "properties": { + "code": { + "description": "A machine readable error code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "details": { + "description": "error details.", + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDetail" + } + }, + "innererror": { + "description": "Inner error details if they exist.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorResponse": { + "title": "Error response.", + "description": "Contains details when the response code indicates an error.", + "type": "object", + "properties": { + "error": { + "description": "The error details.", + "$ref": "#/definitions/ErrorInfo" + } + }, + "required": [ + "error" + ] + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-definitions-remote/readme.md b/tests-upgrade/component-definitions-remote/readme.md new file mode 100644 index 00000000000..43b8fe48491 --- /dev/null +++ b/tests-upgrade/component-definitions-remote/readme.md @@ -0,0 +1,9 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json +``` diff --git a/tests-upgrade/component-definitions-remote/swagger.json b/tests-upgrade/component-definitions-remote/swagger.json new file mode 100644 index 00000000000..06c53dc20f4 --- /dev/null +++ b/tests-upgrade/component-definitions-remote/swagger.json @@ -0,0 +1,59 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/xxxx-xxxx-xxxx/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListBySubscription", + "description": "Gets all the workspaces within a subscription.", + "responses": { + "200": { + "description": "OK - Returns an array of workspaces." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "../examples/RemoteDefinitions.json#/definitions/ErrorResponse" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-definitions-url/readme.md b/tests-upgrade/component-definitions-url/readme.md new file mode 100644 index 00000000000..43b8fe48491 --- /dev/null +++ b/tests-upgrade/component-definitions-url/readme.md @@ -0,0 +1,9 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json +``` diff --git a/tests-upgrade/component-definitions-url/swagger.json b/tests-upgrade/component-definitions-url/swagger.json new file mode 100644 index 00000000000..3c7b35cbfe6 --- /dev/null +++ b/tests-upgrade/component-definitions-url/swagger.json @@ -0,0 +1,59 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/xxxx-xxxx-xxxx/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListBySubscription", + "description": "Gets all the workspaces within a subscription.", + "responses": { + "200": { + "description": "OK - Returns an array of workspaces." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "https://github.com/LucasYao93/autorest.powershell/tree/lucas/modelerfour-dev/tests-upgrade/examples/RemoteDefinitions.json#definitions/ErrorResponse" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-multiparam/readme.md b/tests-upgrade/component-multiparam/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-multiparam/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-multiparam/swagger.json b/tests-upgrade/component-multiparam/swagger.json new file mode 100644 index 00000000000..ce72f5a4f15 --- /dev/null +++ b/tests-upgrade/component-multiparam/swagger.json @@ -0,0 +1,213 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK - Deleted the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK - Updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation." + }, + "ResourceGroupName": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + }, + "PeeringNameParameter": { + "name": "peeringName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace vNet peering.", + "x-ms-parameter-location": "method" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-param-additional/readme.md b/tests-upgrade/component-param-additional/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-additional/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-additional/swagger.json b/tests-upgrade/component-param-additional/swagger.json new file mode 100644 index 00000000000..c958f3ec259 --- /dev/null +++ b/tests-upgrade/component-param-additional/swagger.json @@ -0,0 +1,362 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/additionalProperties/true": { + "put": { + "operationId": "Pets_CreateAPTrue", + "description": "Create a Pet which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "description": "The ID of the target subscription.", + "schema":{ + "$ref":"#/definitions/PetAPTrue" + } + } + ], + "responses": { + "200": { + "description": "A Pet which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/PetAPTrue" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/additionalProperties/true-subclass": { + "put":{ + "operationId": "Pets_CreateCatAPTrue", + "description": "Create a CatAPTrue which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CatAPTrue" + } + } + ], + "responses": { + "200": { + "description": "A CatAPTrue which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/CatAPTrue" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/additionalProperties/type/object": { + "put": { + "operationId": "Pets_CreateAPObject", + "description": "Create a Pet which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/PetAPObject" + } + } + ], + "responses": { + "200": { + "description": "A Pet which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/PetAPObject" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/additionalProperties/type/string": { + "put": { + "operationId": "Pets_CreateAPString", + "description": "Create a Pet which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/PetAPString" + } + } + ], + "responses": { + "200": { + "description": "A Pet which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/PetAPString" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/additionalProperties/in/properties": { + "put": { + "operationId": "Pets_CreateAPInProperties", + "description": "Create a Pet which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/PetAPInProperties" + } + } + ], + "responses": { + "200": { + "description": "A Pet which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/PetAPInProperties" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/additionalProperties/in/properties/with/additionalProperties/string": { + "put": { + "operationId": "Pets_CreateAPInPropertiesWithAPString", + "description": "Create a Pet which contains more properties than what is defined.", + "parameters": [ + { + "name": "createParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/PetAPInPropertiesWithAPString" + } + } + ], + "responses": { + "200": { + "description": "A Pet which contains more properties than what is defined.", + "schema": { + "$ref": "#/definitions/PetAPInPropertiesWithAPString" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + }, + "PetAPTrue":{ + "required": [ + "id" + ], + "type": "object", + "properties":{ + "id": { + "type":"integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "boolean", + "readOnly": true + } + }, + "additionalProperties": true + }, + "CatAPTrue": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/PetAPTrue" + } + ], + "properties": { + "friendly": { + "type": "boolean" + } + } + }, + "PetAPObject": { + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "boolean", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "PetAPString": { + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "boolean", + "readOnly": true + } + }, + "additionalProperties": { + "type": "string" + } + }, + "PetAPInProperties": { + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "boolean", + "readOnly": true + }, + "additionalProperties": { + "type": "object", + "additionalProperties": { + "type": "number" + } + } + } + }, + "PetAPInPropertiesWithAPString": { + "type": "object", + "required": [ + "id", + "@odata.location" + ], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "status": { + "type": "boolean", + "readOnly": true + }, + "@odata.location": { + "type": "string" + }, + "additionalProperties": { + "type": "object", + "additionalProperties": { + "type": "number" + } + } + }, + "additionalProperties": { + "type": "string" + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-param-grouping/readme.md b/tests-upgrade/component-param-grouping/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-grouping/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-grouping/swagger.json b/tests-upgrade/component-param-grouping/swagger.json new file mode 100644 index 00000000000..2df96c6d54f --- /dev/null +++ b/tests-upgrade/component-param-grouping/swagger.json @@ -0,0 +1,251 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/parameterGrouping/postRequired/{path}": { + "post": { + "operationId": "parameterGrouping_postRequired", + "description": "Post a bunch of required parameters grouped", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "description":"Post a bunch of required parameters grouped", + "schema":{ + "type":"integer", + "format":"int32" + }, + "x-ms-parameter-grouping": { + } + }, + { + "name": "customHeader", + "in": "header", + "type": "string", + "required": false, + "x-ms-parameter-grouping": { + } + }, + { + "name": "query", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "default": 30, + "description": "Query parameter with default", + "x-ms-parameter-grouping": { + } + }, + { + "name": "path", + "in": "path", + "required": true, + "type": "string", + "description": "Path parameter", + "x-ms-parameter-grouping": { + } + } + ], + "responses": { + "200": { + "description": "Success" + + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/parameterGrouping/postOptional": { + "post":{ + "operationId": "parameterGrouping_postOptional", + "description": "Post a bunch of optional parameters grouped", + "parameters": [ + { + "name": "customHeader", + "in": "header", + "type": "string", + "required": false, + "x-ms-parameter-grouping": { + } + }, + { + "name": "query", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "default": 30, + "description": "Query parameter with default", + "x-ms-parameter-grouping": { + } + } + ], + "responses": { + "200": { + "description": "Success" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/parameterGrouping/postMultipleParameterGroups": { + "post": { + "operationId": "parameterGrouping_postMultiParamGroups", + "description": "Post parameters from multiple different parameter groups", + "parameters": [ + { + "name": "header-one", + "in": "header", + "type": "string", + "required": false, + "x-ms-parameter-grouping": { + "name": "first-parameter-group" + } + }, + { + "name": "query-one", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "default": 30, + "description": "Query parameter with default", + "x-ms-parameter-grouping": { + "name": "first-parameter-group" + } + }, + { + "name": "header-two", + "in": "header", + "type": "string", + "required": false, + "x-ms-parameter-grouping": { + "postfix": "second-param-group" + } + }, + { + "name": "query-two", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "default": 30, + "description": "Query parameter with default", + "x-ms-parameter-grouping": { + "postfix": "second-param-group" + } + } + ], + "responses": { + "200": { + "description": "Success" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/parameterGrouping/sharedParameterGroupObject": { + "put": { + "operationId": "parameterGrouping_postSharedParameterGroupObject", + "description": "Post parameters with a shared parameter group object", + "parameters": [ + { + "name": "header-one", + "in": "header", + "type": "string", + "required": false, + "x-ms-parameter-grouping": { + "name": "first-parameter-group" + } + }, + { + "name": "query-one", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "default": 30, + "description": "Query parameter with default", + "x-ms-parameter-grouping": { + "name": "first-parameter-group" + } + } + ], + "responses": { + "200": { + "description": "Success" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-param-inbody/readme.md b/tests-upgrade/component-param-inbody/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-inbody/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-inbody/swagger.json b/tests-upgrade/component-param-inbody/swagger.json new file mode 100644 index 00000000000..5f514f3db2e --- /dev/null +++ b/tests-upgrade/component-param-inbody/swagger.json @@ -0,0 +1,78 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/workspaces": { + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "parameters": [ + { + "name": "workspace", + "in": "body", + "required": true, + "description": "The workspace to create or update.", + "schema": { + "required": [ + "properties" + ], + "properties": { + "name": { + "type": "string", + "description": "Workspace name" + }, + "location": { + "type": "string", + "description": "Workspace region" + } + } + } + } + ], + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } + } \ No newline at end of file diff --git a/tests-upgrade/component-param-localremote/readme.md b/tests-upgrade/component-param-localremote/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-localremote/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-localremote/swagger.json b/tests-upgrade/component-param-localremote/swagger.json new file mode 100644 index 00000000000..2ee6b92080d --- /dev/null +++ b/tests-upgrade/component-param-localremote/swagger.json @@ -0,0 +1,210 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Deleted the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "parameters": { + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation." + }, + "ResourceGroupName": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + }, + "PeeringNameParameter": { + "name": "peeringName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace vNet peering.", + "x-ms-parameter-location": "method" + } + } + } \ No newline at end of file diff --git a/tests-upgrade/component-param-remote/readme.md b/tests-upgrade/component-param-remote/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-remote/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-remote/swagger.json b/tests-upgrade/component-param-remote/swagger.json new file mode 100644 index 00000000000..eed1f4fca06 --- /dev/null +++ b/tests-upgrade/component-param-remote/swagger.json @@ -0,0 +1,201 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Deleted the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation." + }, + "ResourceGroupName": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + }, + "PeeringNameParameter": { + "name": "peeringName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace vNet peering.", + "x-ms-parameter-location": "method" + } + } + } \ No newline at end of file diff --git a/tests-upgrade/component-param-resourceasarraay/readme.md b/tests-upgrade/component-param-resourceasarraay/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-resourceasarraay/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-resourceasarraay/swagger.json b/tests-upgrade/component-param-resourceasarraay/swagger.json new file mode 100644 index 00000000000..51dbe32aeeb --- /dev/null +++ b/tests-upgrade/component-param-resourceasarraay/swagger.json @@ -0,0 +1,302 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/azure/resource-flatten/array": { + "put": { + "operationId": "putArray", + "description": "Put External Resource as an Array", + "externalDocs": { + "url": "http://tempuri.org" + }, + "parameters": [ + { + "name": "ResourceArray", + "description": "External Resource as an Array to put", + "in": "body", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Resource" + } + } + } + ], + "responses": { + "200": { + "description": "Successful Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "getArray", + "description": "Get External Resource as an Array", + "externalDocs": { + "url": "http://tempuri.org" + }, + "responses": { + "200": { + "description": "External Resource as an Array from get", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FlattenedProduct" + } + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azure/resource-flatten/dictionary": { + "put": { + "operationId": "putDictionary", + "description": "Put External Resource as a Dictionary", + "externalDocs": { + "url": "http://tempuri.org" + }, + "parameters": [ + { + "name": "ResourceDictionary", + "description": "External Resource as a Dictionary to put", + "in": "body", + "schema": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/FlattenedProduct" + } + } + } + ], + "responses": { + "200": { + "description": "Successful Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "getDictionary", + "description": "Get External Resource as a Dictionary", + "externalDocs": { + "url": "http://tempuri.org" + }, + "responses": { + "200": { + "description": "External Resource as a Dictionary from get", + "schema": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/FlattenedProduct" + } + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azure/resource-flatten/resourcecollection": { + "put": { + "operationId": "putResourceCollection", + "description": "Put External Resource as a ResourceCollection", + "externalDocs": { + "url": "http://tempuri.org" + }, + "parameters": [ + { + "name": "ResourceComplexObject", + "description": "External Resource as a ResourceCollection to put", + "in": "body", + "schema": { + "$ref": "#/definitions/ResourceCollection" + } + } + ], + "responses": { + "200": { + "description": "Successful Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "getResourceCollection", + "description": "Get External Resource as a ResourceCollection", + "externalDocs": { + "url": "http://tempuri.org" + }, + "responses": { + "200": { + "description": "External Resource as a ResourceCollection from get", + "schema": { + "$ref": "#/definitions/ResourceCollection" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + }, + "Resource": { + "type": "object", + "x-ms-azure-resource": true, + "description": "Some resource", + "externalDocs": { + "url": "http://tempuri.org" + }, + "properties": { + "id": { + "type": "string", + "description": "Resource Id", + "readOnly": true + }, + "type": { + "type": "string", + "description": "Resource Type", + "readOnly": true + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "location": { + "type": "string", + "description": "Resource Location" + }, + "name": { + "type": "string", + "description": "Resource Name", + "readOnly": true + } + } + }, + "FlattenedProduct": { + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/FlattenedResourceProperties" + } + } + }, + "FlattenedResourceProperties": { + "type": "object", + "properties": { + "pname": { + "type": "string" + }, + "lsize": { + "type": "integer", + "format": "int32" + }, + "provisioningState": { + "type": "string" + } + } + }, + "ResourceCollection": { + "type": "object", + "properties": { + "productresource": { + "type": "object", + "$ref": "#/definitions/FlattenedProduct" + }, + "arrayofresources": { + "type": "array", + "items": { + "$ref": "#/definitions/FlattenedProduct" + } + }, + "dictionaryofresources": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/FlattenedProduct" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/component-param-specialproperties/readme.md b/tests-upgrade/component-param-specialproperties/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param-specialproperties/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param-specialproperties/swagger.json b/tests-upgrade/component-param-specialproperties/swagger.json new file mode 100644 index 00000000000..718820f033b --- /dev/null +++ b/tests-upgrade/component-param-specialproperties/swagger.json @@ -0,0 +1,969 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/azurespecials/overwrite/x-ms-client-request-id/method/": { + "get": { + "operationId": "x-ms-client-request-id_Get", + "description": "Get method that overwrites x-ms-client-request header with value 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "responses": { + "200": { + "description": "Successfully received the '9C4D50EE-2D56-4CD3-8152-34347DC9F2B0' x-ms-client-request header" + }, + "default": { + "description": "Unexpected error" + } + } + } + }, + "/azurespecials/overwrite/x-ms-client-request-id/via-param/method/": { + "get": { + "operationId": "x-ms-client-request-id_ParamGet", + "description": "Get method that overwrites x-ms-client-request header with value 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "parameters": [ + { + "name": "x-ms-client-request-id", + "in": "header", + "description": "This should appear as a method parameter, use value '9C4D50EE-2D56-4CD3-8152-34347DC9F2B0'", + "type": "string", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully received the '9C4D50EE-2D56-4CD3-8152-34347DC9F2B0' x-ms-client-request header" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/method/string/none/path/global/1234-5678-9012-3456/{subscriptionId}": { + "post": { + "operationId": "subscriptionInCredentials_postMethodGlobalValid", + "description": "POST method with subscriptionId modeled in credentials. Set the credential subscriptionId to '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalSubscriptionId" + } + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from credentials" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/method/string/none/path/global/null/{subscriptionId}": { + "post": { + "operationId": "subscriptionInCredentials_postMethodGlobalNull", + "description": "POST method with subscriptionId modeled in credentials. Set the credential subscriptionId to null, and client-side validation should prevent you from making this call", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalSubscriptionId" + } + ], + "responses": { + "200": { + "description": "This should never occur - you should be prevented from calling this method with null subscription Id in credentials" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/method/string/none/path/globalNotProvided/1234-5678-9012-3456/{subscriptionId}": { + "post": { + "operationId": "subscriptionInCredentials_postMethodGlobalNotProvidedValid", + "description": "POST method with subscriptionId modeled in credentials. Set the credential subscriptionId to '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalSubscriptionId" + }, + { + "$ref": "#/parameters/globalApiVersion" + } + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from credentials" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/method/string/none/path/local/1234-5678-9012-3456/{subscriptionId}": { + "post": { + "operationId": "subscriptionInMethod_postMethodLocalValid", + "description": "POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInMethod Operations" + ], + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "description": "This should appear as a method parameter, use value '1234-5678-9012-3456'", + "type": "string", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/method/string/none/path/local/null/{subscriptionId}": { + "post": { + "operationId": "subscriptionInMethod_postMethodLocalNull", + "description": "POST method with subscriptionId modeled in the method. pass in subscription id = null, client-side validation should prevent you from making this call", + "tags": [ + "SubscriptionInMethod Operations" + ], + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "description": "This should appear as a method parameter, use value null, client-side validation should prvenet the call", + "type": "string", + "required": true + } + ], + "responses": { + "200": { + "description": "You should not reach this response - client side validation should prevent yopu from sending a null subscriptionId" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/path/string/none/path/global/1234-5678-9012-3456/{subscriptionId}": { + "parameters": [ + { + "$ref": "#/parameters/globalSubscriptionId" + } + ], + "post": { + "operationId": "subscriptionInCredentials_postPathGlobalValid", + "description": "POST method with subscriptionId modeled in credentials. Set the credential subscriptionId to '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from credentials" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/path/string/none/path/local/1234-5678-9012-3456/{subscriptionId}": { + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "description": "Should appear as a method parameter -use value '1234-5678-9012-3456'", + "type": "string", + "required": true + } + ], + "post": { + "operationId": "subscriptionInMethod_postPathLocalValid", + "description": "POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInMethod Operations" + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/swagger/string/none/path/global/1234-5678-9012-3456/{subscriptionId}": { + "post": { + "operationId": "subscriptionInCredentials_postSwaggerGlobalValid", + "description": "POST method with subscriptionId modeled in credentials. Set the credential subscriptionId to '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInCredentials Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalSubscriptionId" + } + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from credentials" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/subscriptionId/swagger/string/none/path/local/1234-5678-9012-3456/{subscriptionId}": { + "post": { + "operationId": "subscriptionInMethod_postSwaggerLocalValid", + "description": "POST method with subscriptionId modeled in the method. pass in subscription id = '1234-5678-9012-3456' to succeed", + "tags": [ + "SubscriptionInMethod Operations" + ], + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "description": "The subscriptionId, which appears in the path, the value is always '1234-5678-9012-3456'", + "type": "string", + "required": true + } + ], + "responses": { + "200": { + "description": "Successfully received the '1234-5678-9012-3456' credential value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/method/string/none/query/global/2015-07-01-preview": { + "get": { + "operationId": "apiVersionDefault_getMethodGlobalValid", + "description": "GET method with api-version modeled in global settings.", + "tags": [ + "ApiVersionDefault Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalApiVersion" + } + ], + "responses": { + "200": { + "description": "Successfully received the '2015-07-01-preview' api-version value from global client settings" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/method/string/none/query/globalNotProvided/2015-07-01-preview": { + "get": { + "operationId": "apiVersionDefault_getMethodGlobalNotProvidedValid", + "description": "GET method with api-version modeled in global settings.", + "tags": [ + "ApiVersionDefault Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalApiVersion" + } + ], + "responses": { + "200": { + "description": "Successfully received the '2015-07-01-preview' credential value from global client settings" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/method/string/none/query/local/2.0": { + "get": { + "operationId": "apiVersionLocal_getMethodLocalValid", + "description": "Get method with api-version modeled in the method. pass in api-version = '2.0' to succeed", + "tags": [ + "ApiVersionLocal Operations" + ], + "parameters": [ + { + "name": "api-version", + "in": "query", + "description": "This should appear as a method parameter, use value '2.0'", + "type": "string", + "required": true, + "enum": [ "2.0" ], + "x-ms-api-version": false + } + ], + "responses": { + "200": { + "description": "Successfully received the '2.0' api-version value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/method/string/none/query/local/null": { + "get": { + "operationId": "apiVersionLocal_getMethodLocalNull", + "description": "Get method with api-version modeled in the method. pass in api-version = null to succeed", + "tags": [ + "ApiVersionLocal Operations" + ], + "parameters": [ + { + "name": "api-version", + "in": "query", + "description": "This should appear as a method parameter, use value null, this should result in no serialized parameter", + "type": "string", + "x-ms-api-version": false + } + ], + "responses": { + "200": { + "description": "Successfully received no api-version value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/path/string/none/query/global/2015-07-01-preview": { + "parameters": [ + { + "$ref": "#/parameters/globalApiVersion" + } + ], + "get": { + "operationId": "apiVersionDefault_getPathGlobalValid", + "description": "GET method with api-version modeled in global settings.", + "tags": [ + "ApiVersionDefault Operations" + ], + "responses": { + "200": { + "description": "Successfully received the '2015-07-01-preview' api-version value from global client settings" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/path/string/none/query/local/2.0": { + "parameters": [ + { + "name": "api-version", + "in": "query", + "description": "This should appear as a method parameter, use value '2.0'", + "type": "string", + "required": true, + "enum": [ "2.0" ], + "x-ms-api-version": false + } + ], + "get": { + "operationId": "apiVersionLocal_getPathLocalValid", + "description": "Get method with api-version modeled in the method. pass in api-version = '2.0' to succeed", + "tags": [ + "ApiVersionLocal Operations" + ], + "responses": { + "200": { + "description": "Successfully received the '2.0' api-version value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/swagger/string/none/query/global/2015-07-01-preview": { + "get": { + "operationId": "apiVersionDefault_getSwaggerGlobalValid", + "description": "GET method with api-version modeled in global settings.", + "tags": [ + "ApiVersionDefault Operations" + ], + "parameters": [ + { + "$ref": "#/parameters/globalApiVersion" + } + ], + "responses": { + "200": { + "description": "Successfully received the '2015-07-01-preview' api-version value from global client settings" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/apiVersion/swagger/string/none/query/local/2.0": { + "get": { + "operationId": "apiVersionLocal_getSwaggerLocalValid", + "description": "Get method with api-version modeled in the method. pass in api-version = '2.0' to succeed", + "tags": [ + "ApiVersionLocal Operations" + ], + "parameters": [ + { + "name": "api-version", + "in": "query", + "description": "The api version, which appears in the query, the value is always '2.0'", + "type": "string", + "required": true, + "enum": [ "2.0" ], + "x-ms-api-version": false + } + ], + "responses": { + "200": { + "description": "Successfully received the '2.0' api-version value from the local parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/method/path/valid/{unencodedPathParam}": { + "get": { + "operationId": "skipUrlEncoding_getMethodPathValid", + "description": "Get method with unencoded path parameter with value 'path1/path2/path3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "parameters": [ + { + "name": "unencodedPathParam", + "in": "path", + "description": "Unencoded path parameter with value 'path1/path2/path3'", + "type": "string", + "required": true, + "x-ms-skip-url-encoding": true + } + ], + "responses": { + "200": { + "description": "Successfully received the path parameter with value 'path1/path2/path3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/path/path/valid/{unencodedPathParam}": { + "parameters": [ + { + "name": "unencodedPathParam", + "in": "path", + "description": "Unencoded path parameter with value 'path1/path2/path3'", + "type": "string", + "required": true, + "x-ms-skip-url-encoding": true + } + ], + "get": { + "operationId": "skipUrlEncoding_getPathPathValid", + "description": "Get method with unencoded path parameter with value 'path1/path2/path3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "responses": { + "200": { + "description": "Successfully received the path parameter with value 'path1/path2/path3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/swagger/path/valid/{unencodedPathParam}": { + "get": { + "operationId": "skipUrlEncoding_getSwaggerPathValid", + "description": "Get method with unencoded path parameter with value 'path1/path2/path3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "parameters": [ + { + "name": "unencodedPathParam", + "in": "path", + "description": "An unencoded path parameter with value 'path1/path2/path3'", + "type": "string", + "required": true, + "enum": [ "path1/path2/path3" ], + "x-ms-skip-url-encoding": true + } + ], + "responses": { + "200": { + "description": "Successfully received the path parameter with value 'path1/path2/path3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/method/query/valid": { + "get": { + "operationId": "skipUrlEncoding_getMethodQueryValid", + "description": "Get method with unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "parameters": [ + { + "name": "q1", + "in": "query", + "description": "Unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "type": "string", + "required": true, + "x-ms-skip-url-encoding": true + } + ], + "responses": { + "200": { + "description": "Successfully received the unencoded query parameter with value 'value1&q2=value2&q3=value3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/method/query/null": { + "get": { + "operationId": "skipUrlEncoding_getMethodQueryNull", + "description": "Get method with unencoded query parameter with value null", + "tags": [ + "SkipUrlEncoding Operations" + ], + "parameters": [ + { + "name": "q1", + "in": "query", + "description": "Unencoded query parameter with value null", + "type": "string", + "x-ms-skip-url-encoding": true + } + ], + "responses": { + "200": { + "description": "Successfully received no query parameter" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/path/query/valid": { + "parameters": [ + { + "name": "q1", + "in": "query", + "description": "Unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "type": "string", + "required": true, + "x-ms-skip-url-encoding": true + } + ], + "get": { + "operationId": "skipUrlEncoding_getPathQueryValid", + "description": "Get method with unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "responses": { + "200": { + "description": "Successfully received the unencoded query parameter with value 'value1&q2=value2&q3=value3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/skipUrlEncoding/swagger/query/valid": { + "get": { + "operationId": "skipUrlEncoding_getSwaggerQueryValid", + "description": "Get method with unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "tags": [ + "SkipUrlEncoding Operations" + ], + "parameters": [ + { + "name": "q1", + "in": "query", + "description": "An unencoded query parameter with value 'value1&q2=value2&q3=value3'", + "type": "string", + "enum": [ "value1&q2=value2&q3=value3" ], + "required": true, + "x-ms-skip-url-encoding": true + } + ], + "responses": { + "200": { + "description": "Successfully received the unencoded query parameter with value 'value1&q2=value2&q3=value3'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/odata/filter": { + "get": { + "operationId": "odata_getWithFilter", + "description": "Specify filter parameter with value '$filter=id gt 5 and name eq 'foo'&$orderby=id&$top=10'", + "tags": [ + "odata Operations" + ], + "parameters": [ + { + "name": "$filter", + "in": "query", + "required": false, + "type": "string", + "description": "The filter parameter with value '$filter=id gt 5 and name eq 'foo''." + }, + { + "name": "$top", + "in": "query", + "required": false, + "type": "integer", + "format": "int32", + "description": "The top parameter with value 10." + }, + { + "name": "$orderby", + "in": "query", + "required": false, + "type": "string", + "description": "The orderby parameter with value id." + } + ], + "responses": { + "200": { + "description": "Successfully received filter parameter with value '$filter=id gt 5 and name eq 'foo'&$orderby=id&$top=10'" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + }, + "x-ms-odata": "#/definitions/OdataFilter" + } + }, + "/azurespecials/customNamedRequestId": { + "post": { + "operationId": "header_customNamedRequestId", + "description": "Send foo-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 in the header of the request", + "parameters": [ + { + "name": "foo-client-request-id", + "in": "header", + "required": true, + "type": "string", + "description": "The fooRequestId", + "x-ms-client-request-id": true + } + ], + "tags": [ + "Header Operations" + ], + "x-ms-request-id": "foo-request-id", + "responses": { + "200": { + "headers": { + "foo-request-id": { + "description": "Gets the foo-request-id.", + "type": "string" + } + }, + "description": "Gets the foo-request-id" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/customNamedRequestIdParamGrouping": { + "post": { + "operationId": "header_customNamedRequestIdParamGrouping", + "description": "Send foo-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 in the header of the request, via a parameter group", + "parameters": [ + { + "name": "foo-client-request-id", + "in": "header", + "required": true, + "type": "string", + "description": "The fooRequestId", + "x-ms-client-request-id": true, + "x-ms-parameter-grouping": { + } + } + ], + "tags": [ + "Header Operations" + ], + "x-ms-request-id": "foo-request-id", + "responses": { + "200": { + "headers": { + "foo-request-id": { + "description": "Gets the foo-request-id.", + "type": "string" + } + }, + "description": "Gets the foo-request-id" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/azurespecials/customNamedRequestIdHead": { + "head": { + "operationId": "header_customNamedRequestIdHead", + "description": "Send foo-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 in the header of the request", + "parameters": [ + { + "name": "foo-client-request-id", + "in": "header", + "required": true, + "type": "string", + "description": "The fooRequestId", + "x-ms-client-request-id": true + } + ], + "tags": [ + "Header Operations" + ], + "x-ms-request-id": "foo-request-id", + "responses": { + "200": { + "headers": { + "foo-request-id": { + "description": "Gets the foo-request-id.", + "type": "string" + } + }, + "description": "Gets the foo-request-id" + }, + "404": { + "description": "Gets the foo-request-id" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "parameters": { + "globalSubscriptionId": { + "name": "subscriptionId", + "in": "path", + "description": "The subscription id, which appears in the path, always modeled in credentials. The value is always '1234-5678-9012-3456'", + "type": "string", + "required": true + }, + "globalApiVersion": { + "name": "api-version", + "in": "query", + "description": "The api version, which appears in the query, the value is always '2015-07-01-preview'", + "type": "string", + "required": true + } + }, + "definitions": { + "Error": { + "type": "object", + "required": ["constantId"], + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "constantId": { + "type": "integer", + "enum": [ 1 ] + }, + "message": { + "type": "string" + } + } + }, + "OdataFilter": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int32" + }, + "name": { + "type": "string" + } + } + } + } +} diff --git a/tests-upgrade/component-param/readme.md b/tests-upgrade/component-param/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/component-param/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/component-param/swagger.json b/tests-upgrade/component-param/swagger.json new file mode 100644 index 00000000000..731b8c5688d --- /dev/null +++ b/tests-upgrade/component-param/swagger.json @@ -0,0 +1,151 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroup/{resourceGroup}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Gets the workspace.", + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Deleted the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + } + ], + "responses": { + "200": { + "description": "OK - Created or updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + }, + { + "$ref": "#/parameters/ResourceGroupName" + } + ], + "responses": { + "200": { + "description": "OK - Updated the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "parameters": { + "ResourceGroupName":{ + "name": "resourceGroup", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90 + } + } +} \ No newline at end of file diff --git a/tests-upgrade/databricks/databricks.json b/tests-upgrade/databricks/databricks.json new file mode 100644 index 00000000000..76df17202e9 --- /dev/null +++ b/tests-upgrade/databricks/databricks.json @@ -0,0 +1,1274 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "x-ms-examples": { + "Get a workspace": { + "$ref": "./examples/WorkspaceGet.json" + }, + "Get a workspace with custom parameters": { + "$ref": "./examples/WorkspaceGetParameters.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Deletes the workspace.", + "x-ms-examples": { + "Delete a workspace": { + "$ref": "./examples/WorkspaceDelete.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "204": { + "description": "NoContent" + }, + "202": { + "description": "Accepted - Returns this status until the asynchronous operation has completed." + }, + "200": { + "description": "OK. The request has succeeded." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "x-ms-examples": { + "Create or update workspace": { + "$ref": "./examples/WorkspaceCreate.json" + }, + "Create or update workspace with custom parameters": { + "$ref": "./examples/WorkspaceCreateWithParameters.json" + }, + "Create a workspace which is ready for Customer-Managed Key (CMK) encryption": { + "$ref": "./examples/PrepareEncryption.json" + }, + "Enable Customer-Managed Key (CMK) encryption on a workspace which is prepared for encryption": { + "$ref": "./examples/EnableEncryption.json" + }, + "Revert Customer-Managed Key (CMK) encryption to Microsoft Managed Keys encryption on a workspace": { + "$ref": "./examples/DisableEncryption.json" + } + }, + "parameters": [ + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/Workspace" + }, + "description": "Parameters supplied to the create or update a workspace." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Created - Returns information about the workspace, including provisioning status.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "201": { + "description": "Created - Returns information about the workspace, including provisioning status.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/WorkspaceUpdate" + }, + "description": "The update to the workspace." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Successfully updated the workspace.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true, + "x-ms-examples": { + "Update a workspace's tags.": { + "$ref": "./examples/WorkspaceUpdate.json" + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListByResourceGroup", + "description": "Gets all the workspaces within a resource group.", + "x-ms-examples": { + "Lists workspaces": { + "$ref": "./examples/WorkspacesListByResourceGroup.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns an array of workspaces.", + "schema": { + "$ref": "#/definitions/WorkspaceListResult" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListBySubscription", + "description": "Gets all the workspaces within a subscription.", + "x-ms-examples": { + "Lists workspaces": { + "$ref": "./examples/WorkspacesListBySubscription.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns an array of workspaces.", + "schema": { + "$ref": "#/definitions/WorkspaceListResult" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}": { + "get": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_Get", + "description": "Gets the workspace vNet Peering.", + "x-ms-examples": { + "Get a workspace with vNet Peering Configured": { + "$ref": "./examples/WorkspaceVirtualNetPeeringGet.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK. The request has succeeded.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "204": { + "description": "OK - NoContent" + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_Delete", + "description": "Deletes the workspace vNetPeering.", + "x-ms-examples": { + "Delete a workspace vNet Peering": { + "$ref": "./examples/WorkspaceVirtualNetworkPeeringDelete.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "202": { + "description": "Accepted - Returns this status until the asynchronous operation has completed." + }, + "204": { + "description": "NoContent" + }, + "200": { + "description": "OK. The request has succeeded." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "put": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_CreateOrUpdate", + "description": "Creates vNet Peering for workspace.", + "x-ms-examples": { + "Create vNet Peering for Workspace": { + "$ref": "./examples/WorkspaceVirtualNetworkPeeringCreateOrUpdate.json" + } + }, + "parameters": [ + { + "name": "VirtualNetworkPeeringParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + }, + "description": "Parameters supplied to the create workspace vNet Peering." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "201": { + "description": "Created - Returns information about the workspace vNet peering, including provisioning status.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "200": { + "description": "Update succeeded - Returns information about the workspace vNet peering.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings": { + "get": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_ListByWorkspace", + "description": "Lists the workspace vNet Peerings.", + "x-ms-examples": { + "List all vNet Peerings for the workspace": { + "$ref": "./examples/WorkspaceVirtualNetPeeringList.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "List succeeded. Returns the resulting resource objects in response body.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeeringList" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/providers/Microsoft.Databricks/operations": { + "get": { + "tags": [ + "Operations" + ], + "description": "Lists all of the available RP operations.", + "x-ms-examples": { + "Operations": { + "$ref": "./examples/OperationsList.json" + } + }, + "operationId": "Operations_List", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK. The request has succeeded.", + "schema": { + "$ref": "#/definitions/OperationListResult" + } + }, + "default": { + "description": "Resource Provider error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + } + }, + "definitions": { + "Workspace": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WorkspaceProperties", + "description": "The workspace properties." + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The SKU of the resource." + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "Information about workspace." + }, + "WorkspaceProperties": { + "properties": { + "managedResourceGroupId": { + "type": "string", + "description": "The managed resource group Id." + }, + "parameters": { + "$ref": "#/definitions/WorkspaceCustomParameters", + "description": "The workspace's custom parameters." + }, + "provisioningState": { + "readOnly": true, + "$ref": "#/definitions/ProvisioningState", + "description": "The workspace provisioning state." + }, + "uiDefinitionUri": { + "type": "string", + "description": "The blob URI where the UI definition file is located." + }, + "authorizations": { + "description": "The workspace provider authorizations.", + "type": "array", + "items": { + "$ref": "#/definitions/WorkspaceProviderAuthorization" + } + }, + "createdBy": { + "description": "Indicates the Object ID, PUID and Application ID of entity that created the workspace.", + "$ref": "#/definitions/CreatedBy" + }, + "updatedBy": { + "description": "Indicates the Object ID, PUID and Application ID of entity that last updated the workspace.", + "$ref": "#/definitions/CreatedBy" + }, + "createdDateTime": { + "description": "Specifies the date and time when the workspace is created.", + "$ref": "#/definitions/CreatedDateTime" + }, + "workspaceId": { + "readOnly": true, + "description": "The unique identifier of the databricks workspace in databricks control plane.", + "type": "string" + }, + "workspaceUrl": { + "readOnly": true, + "description": "The workspace URL which is of the format 'adb-{workspaceId}.{random}.azuredatabricks.net'", + "type": "string" + }, + "storageAccountIdentity": { + "description": "The details of Managed Identity of Storage Account", + "$ref": "#/definitions/ManagedIdentityConfiguration" + } + }, + "required": [ + "managedResourceGroupId" + ], + "description": "The workspace properties." + }, + "TrackedResource": { + "description": "The resource model definition for a ARM tracked top level resource", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "Resource": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + }, + "x-ms-azure-resource": true + }, + "Sku": { + "properties": { + "name": { + "type": "string", + "description": "The SKU name." + }, + "tier": { + "type": "string", + "description": "The SKU tier." + } + }, + "required": [ + "name" + ], + "description": "SKU for the resource." + }, + "ProvisioningState": { + "description": "Provisioning status of the workspace.", + "readOnly": true, + "enum": [ + "Accepted", + "Running", + "Ready", + "Creating", + "Created", + "Deleting", + "Deleted", + "Canceled", + "Failed", + "Succeeded", + "Updating" + ], + "type": "string", + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": false + } + }, + "PeeringProvisioningState": { + "type": "string", + "readOnly": true, + "description": "The current provisioning state.", + "enum": [ + "Succeeded", + "Updating", + "Deleting", + "Failed" + ], + "x-ms-enum": { + "name": "PeeringProvisioningState", + "modelAsString": false + } + }, + "ManagedIdentityConfiguration": { + "description": "The Managed Identity details for storage account.", + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The objectId of the Managed Identity that is linked to the Managed Storage account.", + "readOnly": true + }, + "tenantId": { + "type": "string", + "format": "uuid", + "description": "The tenant Id where the Managed Identity is created.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The type of Identity created. It can be either SystemAssigned or UserAssigned.", + "readOnly": true + } + } + }, + "WorkspaceCustomParameterType": { + "description": "Provisioning status of the workspace.", + "readOnly": true, + "enum": [ + "Bool", + "Object", + "String" + ], + "type": "string", + "x-ms-enum": { + "name": "CustomParameterType", + "modelAsString": false + } + }, + "WorkspaceCustomBooleanParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "boolean", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The value which should be used for this field." + }, + "WorkspaceCustomObjectParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "object", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The value which should be used for this field." + }, + "WorkspaceCustomStringParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "string", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The Value." + }, + "WorkspaceCustomParameters": { + "properties": { + "amlWorkspaceId": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The ID of a Azure Machine Learning workspace to link with Databricks workspace" + }, + "customVirtualNetworkId": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The ID of a Virtual Network where this Databricks Cluster should be created" + }, + "customPublicSubnetName": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The name of a Public Subnet within the Virtual Network" + }, + "customPrivateSubnetName": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The name of the Private Subnet within the Virtual Network" + }, + "enableNoPublicIp": { + "$ref": "#/definitions/WorkspaceCustomBooleanParameter", + "description": "Should the Public IP be Disabled?" + }, + "prepareEncryption": { + "$ref": "#/definitions/WorkspaceCustomBooleanParameter", + "description": "Prepare the workspace for encryption. Enables the Managed Identity for managed storage account." + }, + "encryption": { + "$ref": "#/definitions/WorkspaceEncryptionParameter", + "description": "Contains the encryption details for Customer-Managed Key (CMK) enabled workspace." + }, + "requireInfrastructureEncryption": { + "$ref": "#/definitions/WorkspaceCustomBooleanParameter", + "description": "A boolean indicating whether or not the DBFS root file system will be enabled with secondary layer of encryption with platform managed keys for data at rest." + } + }, + "description": "Custom Parameters used for Cluster Creation." + }, + "CreatedDateTime": { + "type": "string", + "format": "date-time", + "description": "The date and time stamp when the workspace was created.", + "readOnly": true + }, + "CreatedBy": { + "properties": { + "oid": { + "type": "string", + "format": "uuid", + "description": "The Object ID that created the workspace.", + "readOnly": true + }, + "puid": { + "type": "string", + "description": "The Personal Object ID corresponding to the object ID above", + "readOnly": true + }, + "applicationId": { + "type": "string", + "format": "uuid", + "description": "The application ID of the application that initiated the creation of the workspace. For example, Azure Portal.", + "readOnly": true + } + }, + "description": "Provides details of the entity that created/updated the workspace." + }, + "WorkspaceEncryptionParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "$ref": "#/definitions/Encryption", + "description": "The value which should be used for this field." + } + }, + "description": "The object that contains details of encryption used on the workspace." + }, + "Encryption": { + "properties": { + "keySource": { + "type": "string", + "description": "The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault", + "enum": [ + "Default", + "Microsoft.Keyvault" + ], + "x-ms-enum": { + "name": "KeySource", + "modelAsString": false + }, + "default": "Default" + }, + "KeyName": { + "type": "string", + "description": "The name of KeyVault key." + }, + "keyversion": { + "type": "string", + "description": "The version of KeyVault key.", + "x-ms-client-name": "KeyVersion" + }, + "keyvaulturi": { + "type": "string", + "description": "The Uri of KeyVault.", + "x-ms-client-name": "KeyVaultUri" + } + }, + "description": "The object that contains details of encryption used on the workspace." + }, + "WorkspaceProviderAuthorization": { + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The provider's principal identifier. This is the identity that the provider will use to call ARM to manage the workspace resources." + }, + "roleDefinitionId": { + "type": "string", + "format": "uuid", + "description": "The provider's role definition identifier. This role will define all the permissions that the provider must have on the workspace's container resource group. This role definition cannot have permission to delete the resource group." + } + }, + "required": [ + "principalId", + "roleDefinitionId" + ], + "description": "The workspace provider authorization." + }, + "WorkspaceUpdate": { + "description": "An update to a workspace.", + "type": "object", + "properties": { + "tags": { + "description": "Resource tags.", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "ErrorDetail": { + "title": "Error details.", + "type": "object", + "properties": { + "code": { + "description": "The error's code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "target": { + "description": "Indicates which property in the request is responsible for the error.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorInfo": { + "title": "The code and message for an error.", + "type": "object", + "properties": { + "code": { + "description": "A machine readable error code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "details": { + "description": "error details.", + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDetail" + } + }, + "innererror": { + "description": "Inner error details if they exist.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorResponse": { + "title": "Error response.", + "description": "Contains details when the response code indicates an error.", + "type": "object", + "properties": { + "error": { + "description": "The error details.", + "$ref": "#/definitions/ErrorInfo" + } + }, + "required": [ + "error" + ] + }, + "WorkspaceListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Workspace" + }, + "description": "The array of workspaces." + }, + "nextLink": { + "type": "string", + "description": "The URL to use for getting the next set of results." + } + }, + "description": "List of workspaces." + }, + "Operation": { + "description": "REST API operation", + "type": "object", + "properties": { + "name": { + "description": "Operation name: {provider}/{resource}/{operation}", + "type": "string" + }, + "display": { + "description": "The object that represents the operation.", + "properties": { + "provider": { + "description": "Service provider: Microsoft.ResourceProvider", + "type": "string" + }, + "resource": { + "description": "Resource on which the operation is performed.", + "type": "string" + }, + "operation": { + "description": "Operation type: Read, write, delete, etc.", + "type": "string" + } + } + } + } + }, + "OperationListResult": { + "description": "Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results.", + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Operation" + }, + "description": "List of Resource Provider operations supported by the Resource Provider resource provider." + }, + "nextLink": { + "type": "string", + "description": "URL to get the next set of operation list results if there are any." + } + } + }, + "VirtualNetworkPeeringList": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/VirtualNetworkPeering" + }, + "description": "List of virtual network peerings on workspace." + }, + "nextLink": { + "type": "string", + "description": "URL to get the next set of virtual network peering list results if there are any." + } + }, + "description": "Gets all virtual network peerings under a workspace." + }, + "VirtualNetworkPeering": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/VirtualNetworkPeeringPropertiesFormat", + "description": "List of properties for vNet Peering" + }, + "name": { + "type": "string", + "description": "Name of the virtual network peering resource", + "readOnly": true + }, + "id": { + "type": "string", + "description": "Resource ID.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "type of the virtual network peering resource", + "readOnly": true + } + }, + "required": [ + "properties" + ], + "description": "Peerings in a VirtualNetwork resource", + "x-ms-azure-resource": true + }, + "VirtualNetworkPeeringPropertiesFormat": { + "properties": { + "allowVirtualNetworkAccess": { + "type": "boolean", + "description": "Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space." + }, + "allowForwardedTraffic": { + "type": "boolean", + "description": "Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network." + }, + "allowGatewayTransit": { + "type": "boolean", + "description": "If gateway links can be used in remote virtual networking to link to this virtual network." + }, + "useRemoteGateways": { + "type": "boolean", + "description": "If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway." + }, + "databricksVirtualNetwork": { + "properties": { + "id": { + "type": "string", + "description": "The Id of the databricks virtual network." + } + }, + "description": " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)." + }, + "databricksAddressSpace": { + "$ref": "#/definitions/AddressSpace", + "description": "The reference to the databricks virtual network address space." + }, + "remoteVirtualNetwork": { + "properties": { + "id": { + "type": "string", + "description": "The Id of the remote virtual network." + } + }, + "description": " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)." + }, + "remoteAddressSpace": { + "$ref": "#/definitions/AddressSpace", + "description": "The reference to the remote virtual network address space." + }, + "peeringState": { + "type": "string", + "description": "The status of the virtual network peering.", + "enum": [ + "Initiated", + "Connected", + "Disconnected" + ], + "x-ms-enum": { + "name": "peeringState", + "modelAsString": false + }, + "readOnly": true + }, + "provisioningState": { + "readOnly": true, + "$ref": "#/definitions/PeeringProvisioningState", + "description": "The provisioning state of the virtual network peering resource." + } + }, + "required": [ + "remoteVirtualNetwork" + ], + "description": "Properties of the virtual network peering." + }, + "AddressSpace": { + "properties": { + "addressPrefixes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of address blocks reserved for this virtual network in CIDR notation." + } + }, + "description": "AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network." + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation." + }, + "ResourceGroupName": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + }, + "PeeringNameParameter": { + "name": "peeringName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace vNet peering.", + "x-ms-parameter-location": "method" + } + } +} diff --git a/tests-upgrade/databricks/readme.md b/tests-upgrade/databricks/readme.md new file mode 100644 index 00000000000..fd7d8e5b4ef --- /dev/null +++ b/tests-upgrade/databricks/readme.md @@ -0,0 +1,105 @@ + +# Az.Databricks +This directory contains the PowerShell module for the Databricks service. + +--- +## Status +[![Az.Databricks](https://img.shields.io/powershellgallery/v/Az.Databricks.svg?style=flat-square&label=Az.Databricks "Az.Databricks")](https://www.powershellgallery.com/packages/Az.Databricks/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.Databricks`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g autorest@autorest`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest-beta` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - ./databricks.json + +module-version: 0.0.1 +title: Databricks +subject-prefix: $(service-name) + +inlining-threshold: 40 + +directive: + # Fix the error in swagger, RP actually returns 200 when deletion succeeds + # - from: swagger-document + # where: $ + # transform: return $.replace(/204/g, "200") + # Remove the unexpanded parameter set + - where: + variant: ^Create$|^CreateViaIdentity$|^CreateViaIdentityExpanded$|^Update$|^UpdateViaIdentity$ + remove: true + # Rename the parameter name to follow Azure PowerShell best practice + - where: + parameter-name: SkuName + set: + parameter-name: Sku + - where: + parameter-name: CustomVirtualNetworkIdValue + set: + parameter-name: VirtualNetworkId + - where: + parameter-name: CustomPublicSubnetNameValue + set: + parameter-name: PublicSubnetName + - where: + parameter-name: CustomPrivateSubnetNameValue + set: + parameter-name: PrivateSubnetName + # Remove the set-* cmdlet + - where: + verb: Set + remove: true + # Hide the New-* cmdlet for customization + - where: + verb: New + hide: true + - where: + model-name: Workspace + set: + format-table: + properties: + - Name + - Location + - ManagedResourceGroupId + labels: + ManagedResourceGroupId: Managed Resource Group ID +``` diff --git a/tests-upgrade/datamodels-combineschema/readme.md b/tests-upgrade/datamodels-combineschema/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-combineschema/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-combineschema/swagger.json b/tests-upgrade/datamodels-combineschema/swagger.json new file mode 100644 index 00000000000..9f83a802c31 --- /dev/null +++ b/tests-upgrade/datamodels-combineschema/swagger.json @@ -0,0 +1,105 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/pets": { + "patch": { + "tags": [ + "pets" + ], + "operationId": "Pets_Update", + "description": "Gets pet list.", + "parameters": [ + { + "name": "pets", + "in": "body", + "required": true, + "description": "The pets update.", + "schema": { + "$ref":"#/definitions/Dog" + } + } + ], + "responses": { + "200": { + "description": "OK-Return workspace." + } + } + } + } + }, + "definitions": { + "Pet": { + "type": "object", + "properties": { + "petType": { + "type": "string" + } + } + }, + "Dog":{ + "type": "object", + "properties": { + "bark": { + "type": "boolean" + }, + "breed": { + "type": "string" + } + } + }, + "Cat": { + "allOf": [ + { + "$ref": "#/definitions/Pet" + + }, + { + "type": "object", + "properties": { + "hunts": { + "type": "boolean" + }, + "age": { + "type": "integer" + } + } + } + ] + + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-array/readme.md b/tests-upgrade/datamodels-datatypes-array/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-array/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-array/swagger.json b/tests-upgrade/datamodels-datatypes-array/swagger.json new file mode 100644 index 00000000000..6e69cbf3baa --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-array/swagger.json @@ -0,0 +1,102 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace.", + "schema":{ + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "ErrorDetail": { + "type": "array", + "items":{ + "type": "integer" + }, + "uniqueItems": true + }, + "ErrorList": { + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDetail" + }, + "minItems": 1, + "maxItems": 10 + }, + "ErrorCode": { + "type": "array", + "items": { + "type":"array", + "items": { + "type": "integer" + } + } + }, + "Workspace":{ + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "sku": { + "type": "string" + }, + "location": { + "type": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean-quirks/readme.md b/tests-upgrade/datamodels-datatypes-boolean-quirks/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean-quirks/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-boolean-quirks/swagger.json b/tests-upgrade/datamodels-datatypes-boolean-quirks/swagger.json new file mode 100644 index 00000000000..c5aa1f4a374 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean-quirks/swagger.json @@ -0,0 +1,200 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/bool/true": { + "get": { + "operationId": "bool_getTrue", + "description": "Get true Boolean value", + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The true Boolean value", + "schema": { + "type": "boolean" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "put": { + "operationId": "bool_putTrue", + "description": "Set Boolean value true", + "parameters": [ + { + "name": "boolBody", + "in": "body", + "schema" : { + "type": "boolean" + }, + "required": true + } + ], + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "Empty Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/false": { + "get": { + "operationId": "bool_getFalse", + "description": "Get false Boolean value", + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The false Boolean value", + "schema": { + "type": "boolean" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "put": { + "operationId": "bool_putFalse", + "description": "Set Boolean value false", + "parameters": [ + { + "name": "boolBody", + "in": "body", + "schema" : { + "type": "boolean" + }, + "required": true + } + ], + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "Empty Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/null": { + "get": { + "operationId": "bool_getNull", + "description": "Get null Boolean value", + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The null Boolean value", + "schema": { + "type": "boolean" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/invalid": { + "get": { + "operationId": "bool_getInvalid", + "description": "Get invalid Boolean value", + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The invalid Boolean value", + "schema": { + "type": "boolean" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } +}, +"definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } +} +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getFalse.json b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getFalse.json new file mode 100644 index 00000000000..0bc3b35e600 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getFalse.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": false + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getNull.json b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getNull.json new file mode 100644 index 00000000000..e24179d12d9 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getNull.json @@ -0,0 +1,6 @@ +{ + "parameters": {}, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getTrue.json b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getTrue.json new file mode 100644 index 00000000000..235cd259450 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_getTrue.json @@ -0,0 +1,9 @@ +{ + "parameters": { + }, + "responses": { + "200": { + "body": true + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putFalse.json b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putFalse.json new file mode 100644 index 00000000000..ab9238e5832 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putFalse.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "boolBody": false + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putTrue.json b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putTrue.json new file mode 100644 index 00000000000..d6adf6c69f7 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/examples/bool_putTrue.json @@ -0,0 +1,9 @@ +{ + "parameters": { + "boolBody": true + }, + "responses": { + "200": { + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-boolean/readme.md b/tests-upgrade/datamodels-datatypes-boolean/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-boolean/swagger.json b/tests-upgrade/datamodels-datatypes-boolean/swagger.json new file mode 100644 index 00000000000..d5e03f5e06c --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-boolean/swagger.json @@ -0,0 +1,230 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/bool/true": { + "get": { + "operationId": "bool_getTrue", + "description": "Get true Boolean value", + "x-ms-examples": { + "bool_getTrue": { + "$ref": "./examples/bool_getTrue.json" + } + }, + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The true Boolean value", + "schema": { + "type": "boolean", + "enum": [true] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "put": { + "operationId": "bool_putTrue", + "description": "Set Boolean value true", + "x-ms-examples": { + "bool_putTrue": { + "$ref": "./examples/bool_putTrue.json" + } + }, + "parameters": [ + { + "name": "boolBody", + "in": "body", + "schema" : { + "type": "boolean", + "enum": [true] + }, + "required": true + } + ], + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "Empty Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/false": { + "get": { + "operationId": "bool_getFalse", + "description": "Get false Boolean value", + "x-ms-examples": { + "bool_getFalse": { + "$ref": "./examples/bool_getFalse.json" + } + }, + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The false Boolean value", + "schema": { + "type": "boolean", + "enum": [false] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "put": { + "operationId": "bool_putFalse", + "description": "Set Boolean value false", + "x-ms-examples": { + "bool_putFalse": { + "$ref": "./examples/bool_putFalse.json" + } + }, + "parameters": [ + { + "name": "boolBody", + "in": "body", + "schema" : { + "type": "boolean", + "enum": [false] + }, + "required": true + } + ], + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "Empty Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/null": { + "get": { + "operationId": "bool_getNull", + "description": "Get null Boolean value", + "x-ms-examples": { + "bool_getNull": { + "$ref": "./examples/bool_getNull.json" + } + }, + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The null Boolean value", + "schema": { + "type": "boolean", + "x-nullable": true + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/bool/invalid": { + "get": { + "operationId": "bool_getInvalid", + "description": "Get invalid Boolean value", + "tags": [ + "Bool Operations" + ], + "responses": { + "200": { + "description": "The invalid Boolean value", + "schema": { + "type": "boolean" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } +}, +"definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } +} +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/examples/byte_getEmpty.json b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getEmpty.json new file mode 100644 index 00000000000..2b04d0f8a9f --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getEmpty.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/examples/byte_getInvalid.json b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getInvalid.json new file mode 100644 index 00000000000..f47ede114d9 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getInvalid.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "::::SWAGGER::::" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNonAscii.json b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNonAscii.json new file mode 100644 index 00000000000..4fe17d43890 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNonAscii.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "//79/Pv6+fj39g==" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNull.json b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNull.json new file mode 100644 index 00000000000..e24179d12d9 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/examples/byte_getNull.json @@ -0,0 +1,6 @@ +{ + "parameters": {}, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/examples/byte_putNonAscii.json b/tests-upgrade/datamodels-datatypes-byte/examples/byte_putNonAscii.json new file mode 100644 index 00000000000..367268e0f3e --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/examples/byte_putNonAscii.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "byteBody": "//79/Pv6+fj39g==" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-byte/readme.md b/tests-upgrade/datamodels-datatypes-byte/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-byte/swagger.json b/tests-upgrade/datamodels-datatypes-byte/swagger.json new file mode 100644 index 00000000000..aa5d7a1e4ec --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-byte/swagger.json @@ -0,0 +1,188 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/byte/null": { + "get": { + "operationId": "byte_getNull", + "description": "Get null byte value", + "x-ms-examples": { + "byte_getNull": { + "$ref": "./examples/byte_getNull.json" + } + }, + "tags": [ + "Byte Operations" + ], + "responses": { + "200": { + "description": "The null byte value", + "schema": { + "description": "The null byte value", + "type": "string", + "format": "byte" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/byte/empty": { + "get": { + "operationId": "byte_getEmpty", + "description": "Get empty byte value ''", + "tags": [ + "Byte Operations" + ], + "responses": { + "200": { + "description": "The empty byte value ''", + "schema": { + "description": "The empty byte value ''", + "type": "string", + "format": "byte" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/byte/nonAscii": { + "get": { + "operationId": "byte_getNonAscii", + "description": "Get non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "tags": [ + "Byte Operations" + ], + "responses": { + "200": { + "description": "Non-ascii base-64 encoded byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "schema": { + "description": "Non-ascii base-64 encoded byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "type": "string", + "format": "byte" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "put": { + "operationId": "byte_putNonAscii", + "description": "Put non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "parameters": [ + { + "name": "byteBody", + "in": "body", + "description": "Base64-encoded non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "schema" : { + "description": "Base64-encoded non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)", + "type": "string", + "format": "byte" + }, + "required": true + } + ], + "tags": [ + "Byte Operations" + ], + "responses": { + "200": { + "description": "Empty Response" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/byte/invalid": { + "get": { + "operationId": "byte_getInvalid", + "description": "Get invalid byte value ':::SWAGGER::::'", + "tags": [ + "Byte Operations" + ], + "responses": { + "200": { + "description": "The invalid byte value '::::SWAGGER::::'", + "schema": { + "description": "The invalid byte value '::::SWAGGER::::'", + "type": "string", + "format": "byte" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } +}, +"definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } +} +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/examples/date_getMaxDate.json b/tests-upgrade/datamodels-datatypes-date/examples/date_getMaxDate.json new file mode 100644 index 00000000000..80c8e23555b --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/examples/date_getMaxDate.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "9999-12-31" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/examples/date_getMinDate.json b/tests-upgrade/datamodels-datatypes-date/examples/date_getMinDate.json new file mode 100644 index 00000000000..e6e618742a7 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/examples/date_getMinDate.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "0001-01-01" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/examples/date_getNull.json b/tests-upgrade/datamodels-datatypes-date/examples/date_getNull.json new file mode 100644 index 00000000000..2b04d0f8a9f --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/examples/date_getNull.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/examples/date_putMaxDate.json b/tests-upgrade/datamodels-datatypes-date/examples/date_putMaxDate.json new file mode 100644 index 00000000000..52b4c3b0c02 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/examples/date_putMaxDate.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "dateBody": "9999-12-31" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/examples/date_putMinDate.json b/tests-upgrade/datamodels-datatypes-date/examples/date_putMinDate.json new file mode 100644 index 00000000000..4818e46a278 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/examples/date_putMinDate.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "dateBody": "0001-01-01" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-date/readme.md b/tests-upgrade/datamodels-datatypes-date/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-date/swagger.json b/tests-upgrade/datamodels-datatypes-date/swagger.json new file mode 100644 index 00000000000..2ad4c3d55fa --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-date/swagger.json @@ -0,0 +1,259 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/date/null": { + "get": { + "operationId": "date_getNull", + "description": "Get null date value", + "x-ms-examples": { + "date_getNull": { + "$ref": "./examples/date_getNull.json" + } + }, + "responses": { + "200": { + "description": "The null date value", + "schema": { + "type": "string", + "format": "date", + "x-nullable": true + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/date/invaliddate": { + "get": { + "operationId": "date_getInvalidDate", + "description": "Get invalid date value", + "responses": { + "200": { + "description": "The invalid date value", + "schema": { + "type": "string", + "format": "date" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/date/overflowdate": { + "get": { + "operationId": "date_getOverflowDate", + "description": "Get overflow date value", + "responses": { + "200": { + "description": "The overflow date value", + "schema": { + "type": "string", + "format": "date" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/date/underflowdate": { + "get": { + "operationId": "date_getUnderflowDate", + "description": "Get underflow date value", + "responses": { + "200": { + "description": "The underflow date value", + "schema": { + "type": "string", + "format": "date" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/date/max": { + "put": { + "operationId": "date_putMaxDate", + "description": "Put max date value 9999-12-31", + "x-ms-examples": { + "date_putMaxDate": { + "$ref": "./examples/date_putMaxDate.json" + } + }, + "parameters": [ + { + "name": "dateBody", + "description": "date body", + "in": "body", + "schema": { + "type": "string", + "format": "date" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The max date value" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "date_getMaxDate", + "description": "Get max date value 9999-12-31", + "x-ms-examples": { + "date_getMaxDate": { + "$ref": "./examples/date_getMaxDate.json" + } + }, + "responses": { + "200": { + "description": "The max date value", + "schema": { + "type": "string", + "format": "date" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/date/min": { + "put": { + "operationId": "date_putMinDate", + "description": "Put min date value 0000-01-01", + "x-ms-examples": { + "date_putMinDate": { + "$ref": "./examples/date_putMinDate.json" + } + }, + "parameters": [ + { + "name": "dateBody", + "description": "date body", + "in": "body", + "schema": { + "type": "string", + "format": "date" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The min date value" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "date_getMinDate", + "description": "Get min date value 0000-01-01", + "x-ms-examples": { + "date_getMinDate": { + "$ref": "./examples/date_getMinDate.json" + } + }, + "responses": { + "200": { + "description": "The min date value 0000-01-01", + "schema": { + "type": "string", + "format": "date", + "enum": ["0000-01-01"] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNegativeOffsetMinDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNegativeOffsetMinDateTime.json new file mode 100644 index 00000000000..52d3a74d665 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNegativeOffsetMinDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "0001-01-01T00:00:00-14:00" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNoOffsetMinDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNoOffsetMinDateTime.json new file mode 100644 index 00000000000..93f7fa64dba --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalNoOffsetMinDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "0001-01-01T00:00:00" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetLowercaseMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetLowercaseMaxDateTime.json new file mode 100644 index 00000000000..45d97ca792c --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetLowercaseMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "9999-12-31t23:59:59.999+14:00" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetUppercaseMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetUppercaseMaxDateTime.json new file mode 100644 index 00000000000..7017f3eb651 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getLocalPositiveOffsetUppercaseMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "9999-12-31T23:59:59.999+14:00" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getNull.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getNull.json new file mode 100644 index 00000000000..2b04d0f8a9f --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getNull.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcLowercaseMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcLowercaseMaxDateTime.json new file mode 100644 index 00000000000..65c7dbe7f73 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcLowercaseMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "9999-12-31t23:59:59.999z" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcMinDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcMinDateTime.json new file mode 100644 index 00000000000..88cbd30a2a8 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcMinDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "0001-01-01T00:00:00Z" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcUppercaseMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcUppercaseMaxDateTime.json new file mode 100644 index 00000000000..a66b21645bd --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_getUtcUppercaseMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": {}, + "responses": { + "200": { + "body": "9999-12-31T23:59:59.999Z" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalNegativeOffsetMinDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalNegativeOffsetMinDateTime.json new file mode 100644 index 00000000000..0c1722ad7e6 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalNegativeOffsetMinDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "datetimeBody": "0001-01-01T00:00:00-14:00" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalPositiveOffsetMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalPositiveOffsetMaxDateTime.json new file mode 100644 index 00000000000..30d6a937fb2 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putLocalPositiveOffsetMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "datetimeBody": "9999-12-31T23:59:59.999+14:00" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMaxDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMaxDateTime.json new file mode 100644 index 00000000000..2791778a24a --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMaxDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "datetimeBody": "9999-12-31T23:59:59.999Z" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMinDateTime.json b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMinDateTime.json new file mode 100644 index 00000000000..64cf4e57c2b --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/examples/datetime_putUtcMinDateTime.json @@ -0,0 +1,8 @@ +{ + "parameters": { + "datetimeBody": "0001-01-01T00:00:00Z" + }, + "responses": { + "200": {} + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-datetime/readme.md b/tests-upgrade/datamodels-datatypes-datetime/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-datetime/swagger.json b/tests-upgrade/datamodels-datatypes-datetime/swagger.json new file mode 100644 index 00000000000..eee87438506 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-datetime/swagger.json @@ -0,0 +1,636 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/datetime/null": { + "get": { + "operationId": "datetime_getNull", + "description": "Get null datetime value", + "x-ms-examples": { + "datetime_getNull": { + "$ref": "./examples/datetime_getNull.json" + } + }, + "responses": { + "200": { + "description": "The null datetime value", + "schema": { + "type": "string", + "format": "date-time", + "x-nullable": true + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/invalid": { + "get": { + "operationId": "datetime_getInvalid", + "description": "Get invalid datetime value", + "responses": { + "200": { + "description": "The invalid datetime value", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/overflow": { + "get": { + "operationId": "datetime_getOverflow", + "description": "Get overflow datetime value", + "responses": { + "200": { + "description": "The overflow datetime value", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/underflow": { + "get": { + "operationId": "datetime_getUnderflow", + "description": "Get underflow datetime value", + "responses": { + "200": { + "description": "The underflow datetime value", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/utc": { + "put": { + "operationId": "datetime_putUtcMaxDateTime", + "description": "Put max datetime value 9999-12-31T23:59:59.999Z", + "x-ms-examples": { + "datetime_putUtcMaxDateTime": { + "$ref": "./examples/datetime_putUtcMaxDateTime.json" + } + }, + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.999Z" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/utc7ms": { + "put": { + "operationId": "datetime_putUtcMaxDateTime7Digits", + "description": "This is against the recommendation that asks for 3 digits, but allow to test what happens in that scenario", + "summary": "Put max datetime value 9999-12-31T23:59:59.9999999Z", + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.9999999Z" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/utc/lowercase": { + "get": { + "operationId": "datetime_getUtcLowercaseMaxDateTime", + "description": "Get max datetime value 9999-12-31t23:59:59.999z", + "x-ms-examples": { + "datetime_getUtcLowercaseMaxDateTime": { + "$ref": "./examples/datetime_getUtcLowercaseMaxDateTime.json" + } + }, + "responses": { + "200": { + "description": "The max datetime value 9999-12-31t23:59:59.999z", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/utc/uppercase": { + "get": { + "operationId": "datetime_getUtcUppercaseMaxDateTime", + "description": "Get max datetime value 9999-12-31T23:59:59.999Z", + "x-ms-examples": { + "datetime_getUtcUppercaseMaxDateTime": { + "$ref": "./examples/datetime_getUtcUppercaseMaxDateTime.json" + } + }, + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.999Z", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/utc7ms/uppercase": { + "get": { + "operationId": "datetime_getUtcUppercaseMaxDateTime7Digits", + "description": "This is against the recommendation that asks for 3 digits, but allow to test what happens in that scenario", + "summary": "Get max datetime value 9999-12-31T23:59:59.9999999Z", + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.9999999Z", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localpositiveoffset": { + "put": { + "operationId": "datetime_putLocalPositiveOffsetMaxDateTime", + "description": "Put max datetime value with positive numoffset 9999-12-31t23:59:59.999+14:00", + "x-ms-examples": { + "datetime_putLocalPositiveOffsetMaxDateTime": { + "$ref": "./examples/datetime_putLocalPositiveOffsetMaxDateTime.json" + } + }, + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The max datetime value with positive num offset 9999-12-31T23:59:59.999+14:00" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localpositiveoffset/lowercase": { + "get": { + "operationId": "datetime_getLocalPositiveOffsetLowercaseMaxDateTime", + "description": "Get max datetime value with positive num offset 9999-12-31t23:59:59.999+14:00", + "x-ms-examples": { + "datetime_getLocalPositiveOffsetLowercaseMaxDateTime": { + "$ref": "./examples/datetime_getLocalPositiveOffsetLowercaseMaxDateTime.json" + } + }, + "responses": { + "200": { + "description": "The max datetime value 9999-12-31t23:59:59.999+14:00", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localpositiveoffset/uppercase": { + "get": { + "operationId": "datetime_getLocalPositiveOffsetUppercaseMaxDateTime", + "description": "Get max datetime value with positive num offset 9999-12-31T23:59:59.999+14:00", + "x-ms-examples": { + "datetime_getLocalPositiveOffsetUppercaseMaxDateTime": { + "$ref": "./examples/datetime_getLocalPositiveOffsetUppercaseMaxDateTime.json" + } + }, + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.999+14:00", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localnegativeoffset": { + "put": { + "operationId": "datetime_putLocalNegativeOffsetMaxDateTime", + "description": "Put max datetime value with positive numoffset 9999-12-31t23:59:59.999-14:00", + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The max datetime value with positive num offset 9999-12-31T23:59:59.999+14:00" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localnegativeoffset/uppercase": { + "get": { + "operationId": "datetime_getLocalNegativeOffsetUppercaseMaxDateTime", + "description": "Get max datetime value with positive num offset 9999-12-31T23:59:59.999-14:00", + "responses": { + "200": { + "description": "The max datetime value 9999-12-31T23:59:59.999-14:00", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/max/localnegativeoffset/lowercase": { + "get": { + "operationId": "datetime_getLocalNegativeOffsetLowercaseMaxDateTime", + "description": "Get max datetime value with positive num offset 9999-12-31t23:59:59.999-14:00", + "responses": { + "200": { + "description": "The max datetime value 9999-12-31t23:59:59.999-14:00", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/min/utc": { + "put": { + "operationId": "datetime_putUtcMinDateTime", + "description": "Put min datetime value 0001-01-01T00:00:00Z", + "x-ms-examples": { + "datetime_putUtcMinDateTime": { + "$ref": "./examples/datetime_putUtcMinDateTime.json" + } + }, + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00Z" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "datetime_getUtcMinDateTime", + "description": "Get min datetime value 0001-01-01T00:00:00Z", + "x-ms-examples": { + "datetime_getUtcMinDateTime": { + "$ref": "./examples/datetime_getUtcMinDateTime.json" + } + }, + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00Z", + "schema": { + "type": "string", + "format": "date-time", + "enum": [ + "0001-01-01T00:00:00Z" + ] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/min/localpositiveoffset": { + "put": { + "operationId": "datetime_putLocalPositiveOffsetMinDateTime", + "description": "Put min datetime value 0001-01-01T00:00:00+14:00", + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00+14:00" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "datetime_getLocalPositiveOffsetMinDateTime", + "description": "Get min datetime value 0001-01-01T00:00:00+14:00", + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00+14:00", + "schema": { + "type": "string", + "format": "date-time", + "enum": [ + "0001-01-01t00:00:00+14:00" + ] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/min/localnegativeoffset": { + "put": { + "operationId": "datetime_putLocalNegativeOffsetMinDateTime", + "description": "Put min datetime value 0001-01-01T00:00:00-14:00", + "x-ms-examples": { + "datetime_putLocalNegativeOffsetMinDateTime": { + "$ref": "./examples/datetime_putLocalNegativeOffsetMinDateTime.json" + } + }, + "parameters": [ + { + "name": "datetimeBody", + "description": "datetime body", + "in": "body", + "schema": { + "type": "string", + "format": "date-time" + }, + "required": true + } + ], + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00+14:00" + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "get": { + "operationId": "datetime_getLocalNegativeOffsetMinDateTime", + "description": "Get min datetime value 0001-01-01T00:00:00-14:00", + "x-ms-examples": { + "datetime_getLocalNegativeOffsetMinDateTime": { + "$ref": "./examples/datetime_getLocalNegativeOffsetMinDateTime.json" + } + }, + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00-14:00", + "schema": { + "type": "string", + "format": "date-time", + "enum": [ + "0001-01-01t00:00:00-14:00" + ] + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/datetime/min/localnooffset": { + "get": { + "operationId": "datetime_getLocalNoOffsetMinDateTime", + "description": "Get min datetime value 0001-01-01T00:00:00", + "x-ms-examples": { + "datetime_getLocalNoOffsetMinDateTime": { + "$ref": "./examples/datetime_getLocalNoOffsetMinDateTime.json" + } + }, + "responses": { + "200": { + "description": "The min datetime value 0001-01-01T00:00:00", + "schema": { + "type": "string", + "format": "date-time" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Error": { + "type": "object", + "properties": { + "status": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-file/readme.md b/tests-upgrade/datamodels-datatypes-file/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-file/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-file/swagger.json b/tests-upgrade/datamodels-datatypes-file/swagger.json new file mode 100644 index 00000000000..88f1a2f3320 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-file/swagger.json @@ -0,0 +1,64 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + } + } + } + } + }, + "definitions": { + "FilesModel": { + "type": "object", + "properties": { + "file": { + "type": "string", + "format": "binary" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-integer/readme.md b/tests-upgrade/datamodels-datatypes-integer/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-integer/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-integer/swagger.json b/tests-upgrade/datamodels-datatypes-integer/swagger.json new file mode 100644 index 00000000000..a08d8d9e3c8 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-integer/swagger.json @@ -0,0 +1,81 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/BasicErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required":[ + "message", + "code" + ], + "properties": { + "message": { + "type": "string" + }, + "code": { + "type": "integer", + "minimum": 99, + "maximum": 600, + "exclusiveMinimum": true, + "exclusiveMaximum": true, + "multipleOf": 10 + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-mixedtypes/readme.md b/tests-upgrade/datamodels-datatypes-mixedtypes/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-mixedtypes/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-mixedtypes/swagger.json b/tests-upgrade/datamodels-datatypes-mixedtypes/swagger.json new file mode 100644 index 00000000000..a9dca898e4e --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-mixedtypes/swagger.json @@ -0,0 +1,83 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/BasicErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required":[ + "message", + "code" + ], + "properties": { + "message": { + "type": "string" + }, + "code": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "string" + } + ] + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-nullable/readme.md b/tests-upgrade/datamodels-datatypes-nullable/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-nullable/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-nullable/swagger.json b/tests-upgrade/datamodels-datatypes-nullable/swagger.json new file mode 100644 index 00000000000..e3832914d01 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-nullable/swagger.json @@ -0,0 +1,87 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/BasicErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required":[ + "message", + "code" + ], + "properties": { + "datetime": { + "type": "string", + "format": "date-time" + }, + "host": { + "type": "string", + "pattern":"^\\d{3}-\\d{2}-\\d{4}$", + "nullable": true + }, + "message": { + "type": "string", + "minLength": 3, + "maxLength": 300 + }, + "code": { + "type": "integer" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-object/readme.md b/tests-upgrade/datamodels-datatypes-object/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-object/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-object/swagger.json b/tests-upgrade/datamodels-datatypes-object/swagger.json new file mode 100644 index 00000000000..1f3304195bd --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-object/swagger.json @@ -0,0 +1,125 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + } + } + } + } + }, + "definitions": { + "Model": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "code": { + "type": "integer" + } + } + }, + "Model2": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "username": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": [ + "id", + "username" + ] + }, + "Model3": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "readOnly": true + }, + "password": { + "type": "string" + } + }, + "required": [ + "id", + "username" + ] + }, + "ContactInfo": { + "type": "object", + "properties": { + "email": { + "type": "string", + "format": "email" + }, + "phone": { + "type": "string" + } + } + }, + "User": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "contact_info": { + "$ref": "#/definitions/ContactInfo" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-string/readme.md b/tests-upgrade/datamodels-datatypes-string/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-string/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-string/swagger.json b/tests-upgrade/datamodels-datatypes-string/swagger.json new file mode 100644 index 00000000000..f1f9d58cb9d --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-string/swagger.json @@ -0,0 +1,86 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/BasicErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required":[ + "message", + "code" + ], + "properties": { + "datetime": { + "type": "string", + "format": "date-time" + }, + "host": { + "type": "string", + "pattern":"^\\d{3}-\\d{2}-\\d{4}$" + }, + "message": { + "type": "string", + "minLength": 3, + "maxLength": 300 + }, + "code": { + "type": "integer" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-datatypes-unixtime/readme.md b/tests-upgrade/datamodels-datatypes-unixtime/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-unixtime/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-datatypes-unixtime/swagger.json b/tests-upgrade/datamodels-datatypes-unixtime/swagger.json new file mode 100644 index 00000000000..27427d16396 --- /dev/null +++ b/tests-upgrade/datamodels-datatypes-unixtime/swagger.json @@ -0,0 +1,76 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/BasicErrorModel" + } + } + } + } + } + }, + "definitions": { + "BasicErrorModel": { + "type": "object", + "required": [ + "message", + "code" + ], + "properties": { + "message": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "unixtime" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/datamodels-enums/readme.md b/tests-upgrade/datamodels-enums/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/datamodels-enums/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/datamodels-enums/swagger.json b/tests-upgrade/datamodels-enums/swagger.json new file mode 100644 index 00000000000..efa7111df69 --- /dev/null +++ b/tests-upgrade/datamodels-enums/swagger.json @@ -0,0 +1,73 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup/{resourceGroup}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + } + ], + "responses": { + "200": { + "description": "OK-Return workspace." + } + } + } + } + }, + "parameters": { + "ResourceGroupName":{ + "in": "path", + "name": "resourceGroup", + "description": "Resource group name", + "required": true, + "schema": { + "type": "string", + "enum": [ + "azure cloud", + "azure prod" + ] + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-aliasremoval/readme.md b/tests-upgrade/directive-aliasremoval/readme.md new file mode 100644 index 00000000000..0f616b88835 --- /dev/null +++ b/tests-upgrade/directive-aliasremoval/readme.md @@ -0,0 +1,14 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + parameter-name: ResourceGroupName + clear-alias: true +``` diff --git a/tests-upgrade/directive-aliasremoval/swagger.json b/tests-upgrade/directive-aliasremoval/swagger.json new file mode 100644 index 00000000000..53ed81c131f --- /dev/null +++ b/tests-upgrade/directive-aliasremoval/swagger.json @@ -0,0 +1,87 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/ResourceGroupName/{ResourceGroupName}": { + "get": { + "tags": [ + "ClearRename" + ], + "operationId": "ClearRename_Get", + "description": "rename the Model", + "parameters": [ + {"$ref": "#/parameters/ResourceGroup"} + ], + "responses": { + "200": { + "description": "OK" + } + } + } + } + }, + "parameters": { + "ResourceGroup": { + "name": "ResourceGroupName", + "in": "path", + "type": "string", + "required": true, + "description": "Resource Group Name." + } + }, + "definitions": { + "ResourceGroupName": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-cmdlet/readme.md b/tests-upgrade/directive-cmdlet/readme.md new file mode 100644 index 00000000000..ba2e53e840b --- /dev/null +++ b/tests-upgrade/directive-cmdlet/readme.md @@ -0,0 +1,53 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + verb: Get + subject: VirtualMachine + set: + subject: VM + - where: + subject: (^Configuration)(.*) + set: + subject: Config$2 + - select: command + where: + verb: Get + subject: Object + parameter-name: Id + set: + subject: obj + - where: + verb: Get + subject: ResourceGroup + set: + alias: Get-RG + - where: + verb: Get + subject: MultipleAlias + set: + alias: + - Get-MA1 + - Get-MA2 + - where: + verb: patch + subject: CmdletHide + hide: true + - where: + subject: ConfigHide.* + hide: true + - where: + verb: Get + subject: Remove + remove: true + - where: + subject: RegexRemove.* + remove: true +``` diff --git a/tests-upgrade/directive-cmdlet/swagger.json b/tests-upgrade/directive-cmdlet/swagger.json new file mode 100644 index 00000000000..d6941714ff6 --- /dev/null +++ b/tests-upgrade/directive-cmdlet/swagger.json @@ -0,0 +1,188 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/VirtualMachine": { + "get": { + "tags": [ + "VirtualMachine" + ], + "operationId": "VirtualMachine_Get", + "description": "rename the cmdlet 'Get-VirtualMachine' to 'Get-VM'", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/ConfigurationStore": { + "get": { + "tags": [ + "ConfigurationStore" + ], + "operationId": "ConfigurationStore_Get", + "description": "This will change every cmdlet where the subject starts with 'Configuration'", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/ObjectId/Id/{Id}": { + "get": { + "tags": [ + "Object" + ], + "operationId": "Object_Get", + "description": "rename the cmdlet.", + "parameters": [ + {"$ref": "#/parameters/Id"} + ], + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/ResourceGroup": { + "get": { + "tags": [ + "ResourceGroup" + ], + "operationId": "ResourceGroup_Get", + "description": "rename the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/MultipleAlias": { + "get": { + "tags": [ + "MultipleAlias" + ], + "operationId": "MultipleAlias_Get", + "description": "rename the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/CmdletHide": { + "get": { + "tags": [ + "CmdletHide" + ], + "operationId": "CmdletHide_Get", + "description": "Hide the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + }, + "patch": { + "tags": [ + "CmdletHide" + ], + "operationId": "CmdletHide_Update", + "description": "Hide the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/HideRegexCmdlet": { + "get": { + "tags": [ + "HideRegexCmdlet" + ], + "operationId": "ConfigHide_Get", + "description": "Hide the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/Remove": { + "get": { + "tags": [ + "Remove" + ], + "operationId": "Remove_Get", + "description": "Remove the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/RegexRemove": { + "get": { + "tags": [ + "RegexRemove" + ], + "operationId": "RegexRemoveTest_Get", + "description": "Regex Remove the cmdlet.", + "responses": { + "200": { + "description": "OK" + } + } + } + } + }, + "parameters": { + "Id": { + "name": "Id", + "in": "path", + "type": "string", + "required": true + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-enum/readme.md b/tests-upgrade/directive-enum/readme.md new file mode 100644 index 00000000000..071d8f56550 --- /dev/null +++ b/tests-upgrade/directive-enum/readme.md @@ -0,0 +1,16 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + enum-name: provisioningState + enum-value-name: Upgrading + set: + enum-value-name: Update +``` diff --git a/tests-upgrade/directive-enum/swagger.json b/tests-upgrade/directive-enum/swagger.json new file mode 100644 index 00000000000..ccb13464b7b --- /dev/null +++ b/tests-upgrade/directive-enum/swagger.json @@ -0,0 +1,78 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/resourceGroup": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK-Return workspace." + } + } + } + } + }, + "definitions": { + "CanonicalSupportPlanProperties": { + "type": "object", + "description": "The properties of the Canonical support plan.", + "properties": { + "provisioningState": { + "type": "string", + "description": "The provisioning state of the resource.", + "enum": [ + "Succeeded", + "Failed", + "Cancelled", + "Purchasing", + "Downgrading", + "Cancelling", + "Upgrading" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-model/readme.md b/tests-upgrade/directive-model/readme.md new file mode 100644 index 00000000000..600c44c596b --- /dev/null +++ b/tests-upgrade/directive-model/readme.md @@ -0,0 +1,28 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + model-name: Resource + set: + model-name: AzureResource + - where: + model-name: ^KeyValue(.*) + set: + model-name: kv$1 + - where: + model-name: VirtualMachine + property-name: VirtualMachineName + set: + property-name: Name + - where: + property-name: (.*)Azure + set: + property-name: Az +``` diff --git a/tests-upgrade/directive-model/swagger.json b/tests-upgrade/directive-model/swagger.json new file mode 100644 index 00000000000..949c05c3ec9 --- /dev/null +++ b/tests-upgrade/directive-model/swagger.json @@ -0,0 +1,135 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/ModelRename": { + "get": { + "tags": [ + "ModelRename" + ], + "operationId": "ModelRename_Get", + "description": "rename the Model", + "responses": { + "200": { + "description": "OK" + } + } + } + } + }, + "definitions": { + "Resource": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + } + }, + "KeyValue": { + "description": "The core properties Key value", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified Key value Id for the Key value." + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the Key value" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the Key value. " + } + } + }, + "VirtualMachine": { + "description": "The core properties of VirtualMachine", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified VirtualMachine Id for the VirtualMachine." + }, + "VirtualMachineName": { + "readOnly": true, + "type": "string", + "description": "The name of the VirtualMachine" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the VirtualMachine." + } + } + }, + "AzureEnv": { + "description": "The core properties of AzureEnv", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified AzureEnv Id for the AzureEnv." + }, + "Name": { + "readOnly": true, + "type": "string", + "description": "The name of the AzureEnv" + }, + "typeAzure": { + "readOnly": true, + "type": "string", + "description": "The type of the AzureEnv." + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-parameter/readme.md b/tests-upgrade/directive-parameter/readme.md new file mode 100644 index 00000000000..149198a96df --- /dev/null +++ b/tests-upgrade/directive-parameter/readme.md @@ -0,0 +1,38 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + parameter-name: Sku + set: + parameter-name: SkuName + - where: + verb: Get + subject: VirtualMachineSize_Get + parameter-name: VirtualMachineSize + set: + parameter-name: VMSize + - where: + parameter-name: VirtualMachine + set: + alias: + - VM + - VMachine + - where: + parameter-name: ResourceGroup + set: + parameter-description: This is resource group name. + - where: + parameter-name: SubscriptionId + set: + default: + name: SubscriptionId Default + description: Gets the SubscriptionId from the current context. + script: '(Get-AzContext).Subscription.Id' +``` diff --git a/tests-upgrade/directive-parameter/swagger.json b/tests-upgrade/directive-parameter/swagger.json new file mode 100644 index 00000000000..14a301398f7 --- /dev/null +++ b/tests-upgrade/directive-parameter/swagger.json @@ -0,0 +1,185 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/SkuName/{Sku}": { + "get": { + "tags": [ + "Sku" + ], + "operationId": "Sku_Get", + "description": "Gets the Sku.", + "parameters": [ + {"$ref": "#/parameters/Sku"} + ], + "responses": { + "200": { + "description": "OK - Returns the Sku." + } + } + } + }, + "/VirtualMachineSize/{VirtualMachineSize}": { + "get": { + "tags": [ + "VirtualMachine" + ], + "operationId": "VirtualMachineSize_Get", + "description": "Get the Virtual Machine.", + "parameters": [ + {"$ref": "#/parameters/VirtualMachineSize"} + ], + "responses": { + "200": { + "description": "OK - Returns the Virtual Machine." + } + } + } + }, + "/VirtualMachine/{VirtualMachine}": { + "get": { + "tags": [ + "VirtualMachine" + ], + "operationId": "VirtualMachine_Get", + "description": "Get the Virtual Machine.", + "parameters": [ + {"$ref": "#/parameters/VirtualMachine"} + ], + "responses": { + "200": { + "description": "OK - Returns the Virtual Machine." + } + } + } + }, + "/Workspace/WorksapceName/{WorkspaceName}": { + "get": { + "tags":[ + "Workspace" + ], + "operationId": "Workspace_Get", + "description": "Get workspace", + "parameters": [ + {"$ref": "#/parameters/WorkspaceName"} + ], + "responses": { + "200": { + "description": "OK - Returns the Workspace." + } + } + } + }, + "/ResourceGroup/{ResourceGroup}": { + "get": { + "tags":[ + "ResourceGroup" + ], + "operationId": "ResourceGroup_Get", + "description": "Get ResourceGroup", + "parameters": [ + {"$ref": "#/parameters/ResourceGroup"} + ], + "responses": { + "200": { + "description": "OK - Returns the Resource Group." + } + } + } + }, + "/SubscriptionId/{SubscriptionId}": { + "get": { + "tags":[ + "Subscription" + ], + "operationId": "Subscription_Get", + "description": "Get SubscriptionId", + "parameters": [ + {"$ref": "#/parameters/SubscriptionId"} + ], + "responses": { + "200": { + "description": "OK - Returns the Subscription." + } + } + } + } + }, + "parameters": { + "Sku": { + "name": "Sku", + "in": "path", + "type": "string", + "required": true, + "description": "SKU for the resource." + }, + "VirtualMachineSize": { + "name": "VirtualMachineSize", + "in": "path", + "type": "string", + "required": true, + "description": "Virtual machine size." + }, + "VirtualMachine": { + "name": "VirtualMachine", + "in": "path", + "type": "string", + "required": true, + "description": "Virtual machine size." + }, + "WorkspaceName": { + "name": "WorkspaceName", + "in": "path", + "type": "string", + "required": true, + "description": "Workspace Name." + }, + "ResourceGroup": { + "name": "ResourceGroup", + "in": "path", + "type": "string", + "required": true, + "description": "Resource Group Name." + }, + "SubscriptionId": { + "name": "SubscriptionId", + "in": "path", + "type": "string", + "required": true, + "description": "Subscription Id." + } + } +} \ No newline at end of file diff --git a/tests-upgrade/directive-tableformat/readme.md b/tests-upgrade/directive-tableformat/readme.md new file mode 100644 index 00000000000..d09ec83598a --- /dev/null +++ b/tests-upgrade/directive-tableformat/readme.md @@ -0,0 +1,23 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +directive: + - where: + model-name: VirtualMachine + set: + format-table: + properties: + - Name + - ResourceGroup + labels: + ResourceGroup: Resource Group + width: + Name: 60 + ResourceGroup: 80 +``` diff --git a/tests-upgrade/directive-tableformat/swagger.json b/tests-upgrade/directive-tableformat/swagger.json new file mode 100644 index 00000000000..5df9d161802 --- /dev/null +++ b/tests-upgrade/directive-tableformat/swagger.json @@ -0,0 +1,75 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/TableFormatting": { + "get": { + "tags": [ + "TableFormatting" + ], + "operationId": "TableFormatting_Get", + "description": "Table Formatting", + "responses": { + "200": { + "description": "OK" + } + } + } + } + }, + "definitions": { + "VirtualMachine": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified VirtualMachine Id for the VirtualMachine. " + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the VirtualMachine" + }, + "ResourceGroup": { + "readOnly": true, + "type": "string", + "description": "The type of the resource." + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/examples/OperationsList.json b/tests-upgrade/examples/OperationsList.json new file mode 100644 index 00000000000..4da476627d5 --- /dev/null +++ b/tests-upgrade/examples/OperationsList.json @@ -0,0 +1,65 @@ +{ + "parameters": { + "api-version": "2018-04-01" + }, + "responses": { + "200": { + "body": [ + { + "name": "Microsoft.Databricks/workspaces/read", + "display": { + "provider": "Microsoft Databricks", + "resource": "Workspace", + "operation": "List Workspaces", + "description": "Retrieves a list of workspaces." + } + }, + { + "name": "Microsoft.Databricks/workspaces/write", + "display": { + "provider": "Microsoft Databricks", + "resource": "Workspace", + "operation": "Create Workspace", + "description": "Creates an workspace." + } + }, + { + "name": "Microsoft.Databricks/workspaces/delete", + "display": { + "provider": "Microsoft Databricks", + "resource": "Workspace", + "operation": "Remove Workspace", + "description": "Removes an workspace." + } + }, + { + "name": "Microsoft.Databricks/workspaces/virtualNetworkPeerings/read", + "display": { + "provider": "Microsoft Databricks", + "resource": "Virtual Network Peering", + "operation": "Get Virtual Network Peering", + "description": "Gets the virtual network peering." + } + }, + { + "name": "Microsoft.Databricks/workspaces/virtualNetworkPeerings/write", + "display": { + "provider": "Microsoft Databricks", + "resource": "Virtual Network Peering", + "operation": "Add Virtual Network Peering", + "description": "Add or modify virtual network peering" + } + }, + { + "name": "Microsoft.Databricks/workspaces/virtualNetworkPeerings/delete", + "display": { + "provider": "Microsoft Databricks", + "resource": "Virtual Network Peering", + "operation": "Delete Virtual Network Peering", + "description": "Deletes a virtual network peering" + } + } + ] + } + } +} diff --git a/tests-upgrade/examples/RemoteDefinitions.json b/tests-upgrade/examples/RemoteDefinitions.json new file mode 100644 index 00000000000..547c57f8a9b --- /dev/null +++ b/tests-upgrade/examples/RemoteDefinitions.json @@ -0,0 +1,103 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + }, + "definitions": { + "ErrorDetail": { + "title": "Error details.", + "type": "object", + "properties": { + "code": { + "description": "The error's code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "target": { + "description": "Indicates which property in the request is responsible for the error.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorInfo": { + "title": "The code and message for an error.", + "type": "object", + "properties": { + "code": { + "description": "A machine readable error code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "details": { + "description": "error details.", + "type": "string" + }, + "innererror": { + "description": "Inner error details if they exist.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorResponse": { + "title": "Error response.", + "description": "Contains details when the response code indicates an error.", + "type": "object", + "properties": { + "error": { + "description": "The error details.", + "type": "string" + } + }, + "required": [ + "error" + ] + } + } +} diff --git a/tests-upgrade/examples/WorkspaceCreate.json b/tests-upgrade/examples/WorkspaceCreate.json new file mode 100644 index 00000000000..2c296267716 --- /dev/null +++ b/tests-upgrade/examples/WorkspaceCreate.json @@ -0,0 +1,89 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "workspaceName": "myWorkspace", + "api-version": "2018-04-01", + "parameters": { + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG" + }, + "location": "westus" + } + }, + "responses": { + "200": { + "body": { + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Accepted", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + } + }, + "201": { + "body": { + "name": "myWorkspace", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": { + "environment": "dev" + }, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Accepted", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z" + } + } + } + } +} diff --git a/tests-upgrade/examples/WorkspaceCreateWithParameters.json b/tests-upgrade/examples/WorkspaceCreateWithParameters.json new file mode 100644 index 00000000000..4a6d36ecbcf --- /dev/null +++ b/tests-upgrade/examples/WorkspaceCreateWithParameters.json @@ -0,0 +1,114 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "workspaceName": "myWorkspace", + "api-version": "2018-04-01", + "parameters": { + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": { + "customVirtualNetworkId": { + "value": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork" + }, + "customPublicSubnetName": { + "value": "myPublicSubnet" + }, + "customPrivateSubnetName": { + "value": "myPrivateSubnet" + } + } + }, + "location": "westus" + } + }, + "responses": { + "200": { + "headers": {}, + "body": { + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": { + "customPrivateSubnetName": { + "type": "String", + "value": "myPrivateSubnet" + }, + "customPublicSubnetName": { + "type": "String", + "value": "myPublicSubnet" + }, + "customVirtualNetworkId": { + "type": "String", + "value": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork" + } + }, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + } + }, + "201": { + "headers": {}, + "body": { + "name": "myWorkspace", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": { + "environment": "dev" + }, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "provisioningState": "Accepted", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z" + } + } + } + } +} diff --git a/tests-upgrade/examples/WorkspaceDelete.json b/tests-upgrade/examples/WorkspaceDelete.json new file mode 100644 index 00000000000..affad060724 --- /dev/null +++ b/tests-upgrade/examples/WorkspaceDelete.json @@ -0,0 +1,13 @@ +{ + "parameters": { + "workspaceName": "myWorkspace", + "resourceGroupName": "rg", + "api-version": "2018-04-01", + "subscriptionId": "subid" + }, + "responses": { + "200": {}, + "202": {}, + "204": {} + } +} diff --git a/tests-upgrade/examples/WorkspaceGet.json b/tests-upgrade/examples/WorkspaceGet.json new file mode 100644 index 00000000000..48fe6d0b499 --- /dev/null +++ b/tests-upgrade/examples/WorkspaceGet.json @@ -0,0 +1,47 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "workspaceName": "myWorkspace", + "api-version": "2018-04-01" + }, + "responses": { + "200": { + "headers": {}, + "body": { + "name": "myWorkspace", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "sku": { + "name": "premium" + }, + "location": "East US 2", + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + } + } + } +} diff --git a/tests-upgrade/examples/WorkspaceGetParameters.json b/tests-upgrade/examples/WorkspaceGetParameters.json new file mode 100644 index 00000000000..7babe97ee78 --- /dev/null +++ b/tests-upgrade/examples/WorkspaceGetParameters.json @@ -0,0 +1,60 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "workspaceName": "myWorkspace", + "api-version": "2018-04-01" + }, + "responses": { + "200": { + "headers": {}, + "body": { + "name": "myWorkspace", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": { + "customPrivateSubnetName": { + "type": "String", + "value": "PrivateBob" + }, + "customPublicSubnetName": { + "type": "String", + "value": "PublicSarah" + }, + "customVirtualNetworkId": { + "type": "String", + "value": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork" + } + }, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + } + } + } +} diff --git a/tests-upgrade/examples/WorkspaceUpdate.json b/tests-upgrade/examples/WorkspaceUpdate.json new file mode 100644 index 00000000000..766bbcd7872 --- /dev/null +++ b/tests-upgrade/examples/WorkspaceUpdate.json @@ -0,0 +1,55 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "workspaceName": "myWorkspace", + "api-version": "2018-04-01", + "parameters": { + "tags": { + "mytag1": "myvalue1" + } + } + }, + "responses": { + "200": { + "headers": {}, + "body": { + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": { + "mytag1": "myvalue1" + }, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "6666666666666666", + "workspaceUrl": "adb-6666666666666666.19.azuredatabricks.net" + } + } + }, + "202": {} + } +} diff --git a/tests-upgrade/examples/WorkspacesListByResourceGroup.json b/tests-upgrade/examples/WorkspacesListByResourceGroup.json new file mode 100644 index 00000000000..f8fad446745 --- /dev/null +++ b/tests-upgrade/examples/WorkspacesListByResourceGroup.json @@ -0,0 +1,86 @@ +{ + "parameters": { + "subscriptionId": "subid", + "resourceGroupName": "rg", + "api-version": "2018-04-01" + }, + "responses": { + "200": { + "headers": {}, + "body": { + "value": [ + { + "name": "myWorkspace1", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace1", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + }, + { + "name": "myWorkspace2", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace2", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "standard" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "6666666666666666", + "workspaceUrl": "adb-6666666666666666.19.azuredatabricks.net" + } + } + ] + } + } + } +} diff --git a/tests-upgrade/examples/WorkspacesListBySubscription.json b/tests-upgrade/examples/WorkspacesListBySubscription.json new file mode 100644 index 00000000000..de92bdaeaf1 --- /dev/null +++ b/tests-upgrade/examples/WorkspacesListBySubscription.json @@ -0,0 +1,85 @@ +{ + "parameters": { + "subscriptionId": "subid", + "api-version": "2018-04-01" + }, + "responses": { + "200": { + "headers": {}, + "body": { + "value": [ + { + "name": "myWorkspace1", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace1", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "premium" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "5555555555555555", + "workspaceUrl": "adb-5555555555555555.19.azuredatabricks.net" + } + }, + { + "name": "myWorkspace2", + "id": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Databricks/workspaces/myWorkspace2", + "type": "Microsoft.Databricks/workspaces", + "location": "East US 2", + "tags": null, + "sku": { + "name": "standard" + }, + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": null, + "provisioningState": "Succeeded", + "uiDefinitionUri": "https://path/to/workspaceCreateUiDefinition.json", + "authorizations": [ + { + "principalId": "00000000-0000-0000-0000-000000000000", + "roleDefinitionId": "11111111-1111-1111-1111-111111111111" + } + ], + "createdBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "updatedBy": { + "oid": "22222222-2222-2222-2222-222222222222", + "puid": "33333333", + "applicationId": "44444444-4444-4444-4444-444444444444" + }, + "createdDateTime": "2020-02-20T00:10:29.2858439Z", + "workspaceId": "6666666666666666", + "workspaceUrl": "adb-6666666666666666.19.azuredatabricks.net" + } + } + ] + } + } + } +} diff --git a/tests-upgrade/extension-ms-azureresource/readme.md b/tests-upgrade/extension-ms-azureresource/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-azureresource/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-azureresource/swagger.json b/tests-upgrade/extension-ms-azureresource/swagger.json new file mode 100644 index 00000000000..abd0e6ce8c1 --- /dev/null +++ b/tests-upgrade/extension-ms-azureresource/swagger.json @@ -0,0 +1,79 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "Resource": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + }, + "x-ms-azure-resource": true + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-clientflatten/readme.md b/tests-upgrade/extension-ms-clientflatten/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-clientflatten/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-clientflatten/swagger.json b/tests-upgrade/extension-ms-clientflatten/swagger.json new file mode 100644 index 00000000000..b5be373a859 --- /dev/null +++ b/tests-upgrade/extension-ms-clientflatten/swagger.json @@ -0,0 +1,80 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "Workspace": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WorkspaceProperties", + "description": "The workspace properties." + } + }, + "required": [ + "properties" + ], + "description": "Information about workspace." + }, + "WorkspaceProperties": { + "properties": { + "managedResourceGroupId": { + "type": "string", + "description": "The managed resource group Id." + } + }, + "description": "The workspace properties." + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-clientname/readme.md b/tests-upgrade/extension-ms-clientname/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-clientname/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-clientname/swagger.json b/tests-upgrade/extension-ms-clientname/swagger.json new file mode 100644 index 00000000000..ecf7132608d --- /dev/null +++ b/tests-upgrade/extension-ms-clientname/swagger.json @@ -0,0 +1,78 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "Encryption": { + "properties": { + "KeyName": { + "type": "string", + "description": "The name of KeyVault key.", + "x-ms-client-name": "KeyName" + }, + "keyversion": { + "type": "string", + "description": "The version of KeyVault key.", + "x-ms-client-name": "KeyVersion" + }, + "keyvaulturi": { + "type": "string", + "description": "The Uri of KeyVault.", + "x-ms-client-name": "KeyVaultUri" + } + }, + "description": "The object that contains details of encryption used on the workspace." + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-discriminatorvalue/readme.md b/tests-upgrade/extension-ms-discriminatorvalue/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-discriminatorvalue/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-discriminatorvalue/swagger.json b/tests-upgrade/extension-ms-discriminatorvalue/swagger.json new file mode 100644 index 00000000000..fc0749d9999 --- /dev/null +++ b/tests-upgrade/extension-ms-discriminatorvalue/swagger.json @@ -0,0 +1,66 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "SecureString": { + "x-ms-discriminator-value": "SecureString", + "properties": { + "value": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-enum/readme.md b/tests-upgrade/extension-ms-enum/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-enum/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-enum/swagger.json b/tests-upgrade/extension-ms-enum/swagger.json new file mode 100644 index 00000000000..c94b2b31512 --- /dev/null +++ b/tests-upgrade/extension-ms-enum/swagger.json @@ -0,0 +1,80 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "ProvisioningState": { + "description": "Provisioning status of the workspace.", + "readOnly": true, + "enum": [ + "Accepted", + "Running", + "Ready", + "Creating", + "Created", + "Deleting", + "Deleted", + "Canceled", + "Failed", + "Succeeded", + "Updating" + ], + "type": "string", + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-examples/readme.md b/tests-upgrade/extension-ms-examples/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-examples/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-examples/swagger.json b/tests-upgrade/extension-ms-examples/swagger.json new file mode 100644 index 00000000000..457c0601be6 --- /dev/null +++ b/tests-upgrade/extension-ms-examples/swagger.json @@ -0,0 +1,64 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "x-ms-examples": { + "Get a workspace": { + "$ref": "../examples/WorkspaceGet.json" + }, + "Get a workspace with custom parameters": { + "$ref": "../examples/WorkspaceGetParameters.json" + } + }, + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-longruningoperation/readme.md b/tests-upgrade/extension-ms-longruningoperation/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-longruningoperation/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-longruningoperation/swagger.json b/tests-upgrade/extension-ms-longruningoperation/swagger.json new file mode 100644 index 00000000000..02cd91a8e2b --- /dev/null +++ b/tests-upgrade/extension-ms-longruningoperation/swagger.json @@ -0,0 +1,60 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Deletes the workspace.", + "responses": { + "204": { + "description": "NoContent" + }, + "202": { + "description": "Accepted - Returns this status until the asynchronous operation has completed." + }, + "200": { + "description": "OK. The request has succeeded." + } + }, + "x-ms-long-running-operation": true + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-mutability/readme.md b/tests-upgrade/extension-ms-mutability/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-mutability/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-mutability/swagger.json b/tests-upgrade/extension-ms-mutability/swagger.json new file mode 100644 index 00000000000..2e028ce2a9b --- /dev/null +++ b/tests-upgrade/extension-ms-mutability/swagger.json @@ -0,0 +1,86 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "definitions": { + "TrackedResource": { + "description": "The resource model definition for a ARM tracked top level resource", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + } + }, + "required": [ + "location" + ] + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-pageable/readme.md b/tests-upgrade/extension-ms-pageable/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-pageable/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-pageable/swagger.json b/tests-upgrade/extension-ms-pageable/swagger.json new file mode 100644 index 00000000000..3fca89e36ab --- /dev/null +++ b/tests-upgrade/extension-ms-pageable/swagger.json @@ -0,0 +1,71 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListByResourceGroup", + "description": "Gets all the workspaces within a resource group.", + "responses": { + "200": { + "description": "OK - Returns an array of workspaces.", + "schema": { + "$ref": "#/definitions/WorkspaceList" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + } + }, + "definitions": { + "WorkspaceList": { + "properties": { + "value": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/tests-upgrade/extension-ms-paramlocation/readme.md b/tests-upgrade/extension-ms-paramlocation/readme.md new file mode 100644 index 00000000000..cd6b9c0ac24 --- /dev/null +++ b/tests-upgrade/extension-ms-paramlocation/readme.md @@ -0,0 +1,10 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(this-folder)/swagger.json + +``` diff --git a/tests-upgrade/extension-ms-paramlocation/swagger.json b/tests-upgrade/extension-ms-paramlocation/swagger.json new file mode 100644 index 00000000000..d42eebb553c --- /dev/null +++ b/tests-upgrade/extension-ms-paramlocation/swagger.json @@ -0,0 +1,73 @@ +{ + + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "parameters": [ + { + "$ref": "#/parameters/WorkspaceName" + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace." + }, + "default": { + "description": "Error response describing why the operation failed." + } + } + } + } + }, + "parameters": { + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/functions/helpers/AppInsights/readme.md b/tests-upgrade/functions/helpers/AppInsights/readme.md new file mode 100644 index 00000000000..211e1297df0 --- /dev/null +++ b/tests-upgrade/functions/helpers/AppInsights/readme.md @@ -0,0 +1,114 @@ + +# Az.AppInsights +This directory contains the PowerShell module for the AppInsights service. + +--- +## Status +[![Az.AppInsights](https://img.shields.io/powershellgallery/v/Az.AppInsights.svg?style=flat-square&label=Az.AppInsights "Az.AppInsights")](https://www.powershellgallery.com/packages/Az.AppInsights/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.6.0 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.AppInsights`, see [how-to.md](how-to.md). + + +# Internal +This directory contains a module to handle *internal only* cmdlets. Cmdlets that you **hide** in configuration are created here. For more information on hiding, see [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression). The cmdlets in this directory are generated at **build-time**. Do not put any custom code, files, cmdlets, etc. into this directory. Please use `..\custom` for all custom implementation. + +## Info +- Modifiable: no +- Generated: all +- Committed: no +- Packaged: yes + +## Details +The `Az.Storage.internal.psm1` file is generated to this folder. This module file handles the hidden cmdlets. These cmdlets will not be exported by `Az.Storage`. Instead, this sub-module is imported by the `..\custom\Az.Storage.custom.psm1` module, allowing you to use hidden cmdlets in your custom, exposed cmdlets. To call these cmdlets in your custom scripts, simply use [module-qualified calls](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_command_precedence?view=powershell-6#qualified-names). For example, `Az.Storage.internal\Get-Example` would call an internal cmdlet named `Get-Example`. + +## Purpose +This allows you to include REST specifications for services that you *do not wish to expose from your module*, but simply want to call within custom cmdlets. For example, if you want to make a custom cmdlet that uses `Storage` services, you could include a simplified `Storage` REST specification that has only the operations you need. When you run the generator and build this module, note the generated `Storage` cmdlets. Then, in your readme configuration, use [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression) on the `Storage` cmdlets and they will *only be exposed to the custom cmdlets* you want to write, and not be exported as part of `Az.Storage`. + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +#output-folder: . +#subject-prefix: '' +#title: AppInsights +#module-version: 4.0.0 +apprepo: https://github.com/Azure/azure-rest-api-specs/blob/resource-hybrid-profile +require: + - $(this-folder)/../../readme.azure.md +``` + +## Multi-API/Profile support for AutoRest v3 generators + +AutoRest V3 generators require the use of `--tag=all-api-versions` to select api files. + +This block is updated by an automatic script. Edits may be lost! + +``` yaml $(tag) == 'all-api-versions' /* autogenerated */ +# include the azure profile definitions from the standard location +require: $(apprepo)/profiles/readme.md +appinsights: $(apprepo)/specification/applicationinsights/resource-manager + +# all the input files across all versions +input-file: + - $(appinsights)/Microsoft.Insights/stable/2015-05-01/componentApiKeys_API.json + - $(appinsights)/Microsoft.Insights/stable/2015-05-01/components_API.json +``` + +# Directives +``` yaml +directive: + - where: + verb: Clear|Remove|Set + subject: ^Component$ + remove: true + - where: + verb: Get + subject: ^ComponentPurgeStatus$ + remove: true + - where: + verb: New|Remove + subject: ^ApiKey$ + remove: true + - where: + verb: Update + subject: ^ComponentTag$ + remove: true + - where: + subject: ^Component$ + set: + subject: AppInsights + - where: + subject: ^ApiKey$ + set: + subject: AppInsightsApiKey + - where: + verb: Get|New + subject: ^AppInsights$ + hide: true + - where: + verb: Get + subject: ^AppInsightsApiKey$ + hide: true +``` diff --git a/tests-upgrade/functions/helpers/AppInsights/readme.noprofile.md b/tests-upgrade/functions/helpers/AppInsights/readme.noprofile.md new file mode 100644 index 00000000000..635fa81cba0 --- /dev/null +++ b/tests-upgrade/functions/helpers/AppInsights/readme.noprofile.md @@ -0,0 +1,119 @@ + +# Az.AppInsights +This directory contains the PowerShell module for the AppInsights service. + +--- +## Status +[![Az.AppInsights](https://img.shields.io/powershellgallery/v/Az.AppInsights.svg?style=flat-square&label=Az.AppInsights "Az.AppInsights")](https://www.powershellgallery.com/packages/Az.AppInsights/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.6.0 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.AppInsights`, see [how-to.md](how-to.md). + + +# Internal +This directory contains a module to handle *internal only* cmdlets. Cmdlets that you **hide** in configuration are created here. For more information on hiding, see [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression). The cmdlets in this directory are generated at **build-time**. Do not put any custom code, files, cmdlets, etc. into this directory. Please use `..\custom` for all custom implementation. + +## Info +- Modifiable: no +- Generated: all +- Committed: no +- Packaged: yes + +## Details +The `Az.Storage.internal.psm1` file is generated to this folder. This module file handles the hidden cmdlets. These cmdlets will not be exported by `Az.Storage`. Instead, this sub-module is imported by the `..\custom\Az.Storage.custom.psm1` module, allowing you to use hidden cmdlets in your custom, exposed cmdlets. To call these cmdlets in your custom scripts, simply use [module-qualified calls](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_command_precedence?view=powershell-6#qualified-names). For example, `Az.Storage.internal\Get-Example` would call an internal cmdlet named `Get-Example`. + +## Purpose +This allows you to include REST specifications for services that you *do not wish to expose from your module*, but simply want to call within custom cmdlets. For example, if you want to make a custom cmdlet that uses `Storage` services, you could include a simplified `Storage` REST specification that has only the operations you need. When you run the generator and build this module, note the generated `Storage` cmdlets. Then, in your readme configuration, use [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression) on the `Storage` cmdlets and they will *only be exposed to the custom cmdlets* you want to write, and not be exported as part of `Az.Storage`. + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +#output-folder: . +#subject-prefix: '' +#title: AppInsights +#module-version: 4.0.0 +require: + - $(this-folder)/../../../readme.azure.noprofile.md +``` + +## Multi-API/Profile support for AutoRest v3 generators + +AutoRest V3 generators require the use of `--tag=all-api-versions` to select api files. + +This block is updated by an automatic script. Edits may be lost! + +``` yaml +apprepo: https://github.com/Azure/azure-rest-api-specs/blob/resource-hybrid-profile +# include the azure profile definitions from the standard location +appinsights: $(apprepo)/specification/applicationinsights/resource-manager + +# all the input files across all versions +input-file: + - $(appinsights)/Microsoft.Insights/stable/2015-05-01/componentApiKeys_API.json + - $(appinsights)/Microsoft.Insights/stable/2015-05-01/components_API.json + +subject-prefix: '' +``` + +# Directives +``` yaml +directive: + - where: + verb: Clear|Remove|Set + subject: ^Component$ + remove: true + - where: + verb: Get + subject: ^ComponentPurgeStatus$ + remove: true + - where: + verb: New|Remove + subject: ^ApiKey$ + remove: true + - where: + verb: Update + subject: ^ComponentTag$ + remove: true + - where: + subject: ^Component$ + set: + subject: AppInsights + - where: + subject: ^ApiKey$ + set: + subject: AppInsightsApiKey + - where: + verb: Get|New + subject: ^AppInsights$ + hide: true + - where: + verb: Get + subject: ^AppInsightsApiKey$ + hide: true + - where: + subject: ^AppInsights.* + set: + subject-prefix: '' +``` diff --git a/tests-upgrade/functions/helpers/KeyVault/readme.noprofile.md b/tests-upgrade/functions/helpers/KeyVault/readme.noprofile.md new file mode 100644 index 00000000000..114f3c1d6f2 --- /dev/null +++ b/tests-upgrade/functions/helpers/KeyVault/readme.noprofile.md @@ -0,0 +1,19 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../../../readme.azure.noprofile.md +input-file: + - $(repo)/specification/keyvault/resource-manager/Microsoft.KeyVault/stable/2016-10-01/keyvault.json + +# subject-prefix: '' + +directive: + # hide all cmdlets + - where: + subject: ^VaultDeleted$|^Vault$|^VaultNameAvailability$|^VaultAccessPolicy$ + hide: true + set: + subject-prefix: '' +``` diff --git a/tests-upgrade/functions/helpers/ManagedIdentity/readme.noprofile.md b/tests-upgrade/functions/helpers/ManagedIdentity/readme.noprofile.md new file mode 100644 index 00000000000..cc028e4031b --- /dev/null +++ b/tests-upgrade/functions/helpers/ManagedIdentity/readme.noprofile.md @@ -0,0 +1,27 @@ +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../../../readme.azure.noprofile.md +input-file: + - $(repo)/specification/msi/resource-manager/Microsoft.ManagedIdentity/stable/2018-11-30/ManagedIdentity.json + +subject-prefix: '' + +directive: + # Remove unnedded cmdlets + - where: + subject: ^Operation$ + remove: true + + # Hide Storage Account cmdlets + - where: + subject: ^UserAssignedIdentity.* + hide: true + - where: + subject: ^UserAssignedIdentity.* + set: + subject-prefix: '' + +``` diff --git a/tests-upgrade/functions/helpers/Storage/readme.md b/tests-upgrade/functions/helpers/Storage/readme.md new file mode 100644 index 00000000000..400c26b59a9 --- /dev/null +++ b/tests-upgrade/functions/helpers/Storage/readme.md @@ -0,0 +1,194 @@ + +# Az.Storage +This directory contains the PowerShell module for the Storage service. + +--- +## Status +[![Az.Storage](https://img.shields.io/powershellgallery/v/Az.Storage.svg?style=flat-square&label=Az.Storage "Az.Storage")](https://www.powershellgallery.com/packages/Az.Storage/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.6.0 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.Storage`, see [how-to.md](how-to.md). + + +# Internal +This directory contains a module to handle *internal only* cmdlets. Cmdlets that you **hide** in configuration are created here. For more information on hiding, see [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression). The cmdlets in this directory are generated at **build-time**. Do not put any custom code, files, cmdlets, etc. into this directory. Please use `..\custom` for all custom implementation. + +## Info +- Modifiable: no +- Generated: all +- Committed: no +- Packaged: yes + +## Details +The `Az.Storage.internal.psm1` file is generated to this folder. This module file handles the hidden cmdlets. These cmdlets will not be exported by `Az.Storage`. Instead, this sub-module is imported by the `..\custom\Az.Storage.custom.psm1` module, allowing you to use hidden cmdlets in your custom, exposed cmdlets. To call these cmdlets in your custom scripts, simply use [module-qualified calls](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_command_precedence?view=powershell-6#qualified-names). For example, `Az.Storage.internal\Get-Example` would call an internal cmdlet named `Get-Example`. + +## Purpose +This allows you to include REST specifications for services that you *do not wish to expose from your module*, but simply want to call within custom cmdlets. For example, if you want to make a custom cmdlet that uses `Storage` services, you could include a simplified `Storage` REST specification that has only the operations you need. When you run the generator and build this module, note the generated `Storage` cmdlets. Then, in your readme configuration, use [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression) on the `Storage` cmdlets and they will *only be exposed to the custom cmdlets* you want to write, and not be exported as part of `Az.Storage`. + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../../readme.azure.md + - $(repo)/specification/storage/resource-manager/readme.md + +subject-prefix: '' +# title: Storage +# module-version: 4.0.0 +# skip-model-cmdlets: true + +directive: + # Remove unnedded cmdlets + - where: + subject: ^BlobContainerLegalHold$ + remove: true + - where: + subject: ^BlobContainer$ + remove: true + - where: + subject: ^BlobContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^BlobService$ + remove: true + - where: + subject: ^BlobServiceProperty$ + remove: true + - where: + subject: ^FileService$ + remove: true + - where: + subject: ^FileServiceProperty$ + remove: true + - where: + subject: ^FileShare$ + remove: true + - where: + subject: ^ManagementPolicy$ + remove: true + - where: + subject: ^Operation$ + remove: true + - where: + subject: ^Sku$ + remove: true + - where: + subject: ^StorageAccountProperty$ + remove: true + - where: + subject: ^Usage$ + remove: true + - where: + subject: ^ExtendBlobContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^LeaseBlobContainer$ + remove: true + - where: + subject: ^StorageAccountFailover$ + remove: true + - where: + subject: ^ContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^StorageAccountUserDelegationKey$ + remove: true + - where: + subject: ^StorageAccountNameAvailability$ + remove: true + - where: + verb: Set|New|Remove|Update + subject: ^StorageAccount$ + remove: true + - where: + verb: Get + subject: ^StorageAccountServiceSas$ + remove: true + - where: + verb: Get + subject: ^StorageAccountSas$ + remove: true + - where: + verb: New + subject: ^StorageAccountKey$ + remove: true + + + # Hide Storage Account cmdlets + - where: + subject: ^StorageAccount.* + hide: true + - where: + subject: ^StorageAccount.* + set: + subject-prefix: '' + + # StorageAccount + - where: + subject: StorageAccount.* + parameter-name: AccountName + set: + parameter-name: Name + - where: + subject: StorageAccount + parameter-name: CustomDomainUseSubDomainName + set: + parameter-name: UseSubDomain + - where: + subject: StorageAccount + parameter-name: NetworkAcls(.*) + set: + parameter-name: NetworkRuleSet$1 + - where: + subject: StorageAccount + parameter-name: BlobEnabled + set: + parameter-name: EncryptBlobService + - where: + subject: StorageAccount + parameter-name: FileEnabled + set: + parameter-name: EncryptFileService + - where: + subject: StorageAccount + parameter-name: QueueEnabled + set: + parameter-name: EncryptQueueService + - where: + subject: StorageAccount + parameter-name: TableEnabled + set: + parameter-name: EncryptTableService + - where: + subject: ^StorageAccount$ + parameter-name: Keyvaultproperty(.*) + set: + parameter-name: $1 + - where: + subject: ^StorageAccount$ + parameter-name: IsHnsEnabled + set: + parameter-name: EnableHierarchicalNamespace +``` diff --git a/tests-upgrade/functions/helpers/Storage/readme.noprofile.md b/tests-upgrade/functions/helpers/Storage/readme.noprofile.md new file mode 100644 index 00000000000..a2bbb2b9471 --- /dev/null +++ b/tests-upgrade/functions/helpers/Storage/readme.noprofile.md @@ -0,0 +1,196 @@ + +# Az.Storage +This directory contains the PowerShell module for the Storage service. + +--- +## Status +[![Az.Storage](https://img.shields.io/powershellgallery/v/Az.Storage.svg?style=flat-square&label=Az.Storage "Az.Storage")](https://www.powershellgallery.com/packages/Az.Storage/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.6.0 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.Storage`, see [how-to.md](how-to.md). + + +# Internal +This directory contains a module to handle *internal only* cmdlets. Cmdlets that you **hide** in configuration are created here. For more information on hiding, see [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression). The cmdlets in this directory are generated at **build-time**. Do not put any custom code, files, cmdlets, etc. into this directory. Please use `..\custom` for all custom implementation. + +## Info +- Modifiable: no +- Generated: all +- Committed: no +- Packaged: yes + +## Details +The `Az.Storage.internal.psm1` file is generated to this folder. This module file handles the hidden cmdlets. These cmdlets will not be exported by `Az.Storage`. Instead, this sub-module is imported by the `..\custom\Az.Storage.custom.psm1` module, allowing you to use hidden cmdlets in your custom, exposed cmdlets. To call these cmdlets in your custom scripts, simply use [module-qualified calls](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_command_precedence?view=powershell-6#qualified-names). For example, `Az.Storage.internal\Get-Example` would call an internal cmdlet named `Get-Example`. + +## Purpose +This allows you to include REST specifications for services that you *do not wish to expose from your module*, but simply want to call within custom cmdlets. For example, if you want to make a custom cmdlet that uses `Storage` services, you could include a simplified `Storage` REST specification that has only the operations you need. When you run the generator and build this module, note the generated `Storage` cmdlets. Then, in your readme configuration, use [cmdlet hiding](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md#cmdlet-hiding-exportation-suppression) on the `Storage` cmdlets and they will *only be exposed to the custom cmdlets* you want to write, and not be exported as part of `Az.Storage`. + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../../../readme.azure.noprofile.md +input-file: + - $(repo)/specification/storage/resource-manager/Microsoft.Storage/stable/2019-04-01/storage.json + - $(repo)/specification/storage/resource-manager/Microsoft.Storage/stable/2019-04-01/blob.json + +subject-prefix: '' +# title: Storage +# module-version: 4.0.0 +# skip-model-cmdlets: true + +directive: + # Remove unnedded cmdlets + - where: + subject: ^BlobContainerLegalHold$ + remove: true + - where: + subject: ^BlobContainer$ + remove: true + - where: + subject: ^BlobContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^BlobService$ + remove: true + - where: + subject: ^BlobServiceProperty$ + remove: true + - where: + subject: ^FileService$ + remove: true + - where: + subject: ^FileServiceProperty$ + remove: true + - where: + subject: ^FileShare$ + remove: true + - where: + subject: ^ManagementPolicy$ + remove: true + - where: + subject: ^Operation$ + remove: true + - where: + subject: ^Sku$ + remove: true + - where: + subject: ^StorageAccountProperty$ + remove: true + - where: + subject: ^Usage$ + remove: true + - where: + subject: ^ExtendBlobContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^LeaseBlobContainer$ + remove: true + - where: + subject: ^StorageAccountFailover$ + remove: true + - where: + subject: ^ContainerImmutabilityPolicy$ + remove: true + - where: + subject: ^StorageAccountUserDelegationKey$ + remove: true + - where: + subject: ^StorageAccountNameAvailability$ + remove: true + - where: + verb: Set|New|Remove|Update + subject: ^StorageAccount$ + remove: true + - where: + verb: Get + subject: ^StorageAccountServiceSas$ + remove: true + - where: + verb: Get + subject: ^StorageAccountSas$ + remove: true + - where: + verb: New + subject: ^StorageAccountKey$ + remove: true + + + # Hide Storage Account cmdlets + - where: + subject: ^StorageAccount.* + hide: true + - where: + subject: ^StorageAccount.* + set: + subject-prefix: '' + + # StorageAccount + - where: + subject: StorageAccount.* + parameter-name: AccountName + set: + parameter-name: Name + - where: + subject: StorageAccount + parameter-name: CustomDomainUseSubDomainName + set: + parameter-name: UseSubDomain + - where: + subject: StorageAccount + parameter-name: NetworkAcls(.*) + set: + parameter-name: NetworkRuleSet$1 + - where: + subject: StorageAccount + parameter-name: BlobEnabled + set: + parameter-name: EncryptBlobService + - where: + subject: StorageAccount + parameter-name: FileEnabled + set: + parameter-name: EncryptFileService + - where: + subject: StorageAccount + parameter-name: QueueEnabled + set: + parameter-name: EncryptQueueService + - where: + subject: StorageAccount + parameter-name: TableEnabled + set: + parameter-name: EncryptTableService + - where: + subject: ^StorageAccount$ + parameter-name: Keyvaultproperty(.*) + set: + parameter-name: $1 + - where: + subject: ^StorageAccount$ + parameter-name: IsHnsEnabled + set: + parameter-name: EnableHierarchicalNamespace +``` diff --git a/tests-upgrade/functions/readme.md b/tests-upgrade/functions/readme.md new file mode 100644 index 00000000000..7b297123629 --- /dev/null +++ b/tests-upgrade/functions/readme.md @@ -0,0 +1,506 @@ + +# Az.Functions +This directory contains the PowerShell module for the Functions service. + +--- +## Status +[![Az.Functions](https://img.shields.io/powershellgallery/v/Az.Functions.svg?style=flat-square&label=Az.Functions "Az.Functions")](https://www.powershellgallery.com/packages/Az.Functions/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.Functions`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g autorest@beta`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +### Suppression + +``` yaml +directive: + - suppress: XmsResourceInPutResponse + from: WebApps.json + where: $.paths["/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/functions/{functionName}/keys/{keyName}"].put + reason: Model type is not an Azure resource + - suppress: RequiredPropertiesMissingInResourceModel + from: WebApps.json + where: $.definitions.KeyInfo + reason: Model type is not an Azure resource + - suppress: BodyTopLevelProperties + from: WebApps.json + where: $.definitions.KeyInfo.properties + reason: Model type is not an Azure resource +``` + +``` yaml +branch: powershell-function +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - $(repo)/specification/web/resource-manager/Microsoft.CertificateRegistration/stable/2019-08-01/AppServiceCertificateOrders.json + - $(repo)/specification/web/resource-manager/Microsoft.CertificateRegistration/stable/2019-08-01/CertificateRegistrationProvider.json + - $(repo)/specification/web/resource-manager/Microsoft.DomainRegistration/stable/2019-08-01/Domains.json + - $(repo)/specification/web/resource-manager/Microsoft.DomainRegistration/stable/2019-08-01/TopLevelDomains.json + - $(repo)/specification/web/resource-manager/Microsoft.DomainRegistration/stable/2019-08-01/DomainRegistrationProvider.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/Certificates.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/CommonDefinitions.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/DeletedWebApps.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/Diagnostics.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/Provider.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/Recommendations.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/ResourceProvider.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/WebApps.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/StaticSites.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/AppServiceEnvironments.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/AppServicePlans.json + - $(repo)/specification/web/resource-manager/Microsoft.Web/stable/2019-08-01/ResourceHealthMetadata.json +module-version: 1.0.1 +title: Functions +subject-prefix: '' + +metadata: + authors: Microsoft Corporation + owners: Microsoft Corporation + description: 'Microsoft Azure PowerShell - Azure Functions service cmdlets for Azure Resource Manager in Windows PowerShell and PowerShell Core.\n\nFor information on Azure Functions, please visit the following: https://docs.microsoft.com/azure/azure-functions/' + copyright: Microsoft Corporation. All rights reserved. + tags: Azure ResourceManager ARM PSModule Functions + companyName: Microsoft Corporation + requireLicenseAcceptance: true + licenseUri: https://aka.ms/azps-license + projectUri: https://github.com/Azure/azure-powershell + +directive: + - from: WebApps.json + where: $.paths["/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/privateEndpointConnections/{privateEndpointConnectionName}"].delete.responses.200 + transform: delete $.schema + - from: WebApps.json + where: $.paths["/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/privateEndpointConnections/{privateEndpointConnectionName}"].delete.responses.202 + transform: delete $.schema + - from: WebApps.json + where: $.paths["/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{name}/privateEndpointConnections/{privateEndpointConnectionName}"].delete.responses.204 + transform: delete $.schema + - from: Diagnostics.json + where: $.paths["/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses/{analysisName}/execute"].post + transform: delete $."x-ms-examples" + - from: swagger-document + where: $..produces + #transform: $ = $.filter( each => each === 'application/json'); + transform: $ = ["application/json"]; + reason: this spec adds produces application/xml and text/json erronously. + - where: + subject: Operation + hide: true + - where: $.definitions.Identifier.properties + suppress: R3019 +# Cmdlet renames + - where: + verb: Backup|Get|New|Remove|Restart|Restore|Publish|Set|Start|Stop|Update + subject: WebApp + variant: (.*) + set: + subject: FunctionApp + hide: true + - where: + subject: WebAppFunction + set: + subject: Function + hide: true + - where: + subject: GeoRegion + set: + subject: FunctionAppAvailableLocation + hide: true + - where: + subject: AppServicePlan + set: + subject: FunctionAppPlan + hide: true + # Formats.ps1xml + - where: + model-name: Site + set: + suppress-format: true + - where: + model-name: GeoRegion + set: + format-table: + properties: + - Name + - where: + model-name: AppServicePlan + set: + suppress-format: true + # Parameter renames + - where: + subject: Function + parameter-name: Name + set: + parameter-name: FunctionAppName + hide: true + - where: + verb: New + subject: Connection + parameter-name: Name + clear-alias: true + - where: + verb: Set + subject: Connection + parameter-name: Name + clear-alias: true +# Cmdlets to hide + - where: + subject: (.*)WebAppApplicationSetting(.*) + hide: true + - where: + subject: (.*)AzWebAppSlot(.*) + hide: true + - where: + subject: (.*)NameAvailability(.*) + hide: true + - where: + subject: (.*)WebAppConfiguration(.*) + hide: true +# Cmdlets to remove + - where: + subject: WebAppPremierAddOn(.*) + remove: true + - where: + subject: WebAppVnetConnection(.*) + remove: true + - where: + subject: WebAppSwiftVirtualNetworkConnection(.*) + remove: true + - where: + subject: WebAppRelayServiceConnection(.*) + remove: true + - where: + subject: WebAppPremierAddOnSlot(.*) + remove: true + - where: + subject: WebAppHybridConnection(.*) + remove: true + - where: + subject: WebAppDomainOwnershipIdentifier(.*) + remove: true + - where: + subject: SiteVnetConnection(.*) + remove: true + - where: + subject: SiteRelayServiceConnection(.*) + remove: true + - where: + subject: (.*)Domain(.*) + remove: true + - where: + subject: (.*)Certificate(.*) + remove: true + - where: + subject: AppServicePlanVnetRoute(.*) + remove: true + - where: + subject: AppServiceEnvironmentWorkerPool(.*) + remove: true + - where: + subject: AppServiceEnvironmentMultiRolePool(.*) + remove: true + - where: + subject: WebAppCustomHostname(.*) + remove: true + - where: + subject: HostingEnvironmentVnet(.*) + remove: true + - where: + subject: GlobalDomainRegistrationDomainPurchase(.*) + remove: true + - where: + subject: WebAppWebSiteNetworkTrace(.*) + remove: true + - where: + subject: WebAppWebSiteNetworkTraceSlot(.*) + remove: true + - where: + subject: WebAppNetworkTrace(.*) + remove: true + - where: + subject: WebAppPublicCertificate(.*) + remove: true + - where: + subject: WebAppDiagnosticLog(.*) + remove: true + - where: + subject: WebAppPerfMonCounter(.*) + remove: true + - where: + subject: WebAppMigrateMySqlStatus(.*) + remove: true + - where: + subject: WebAppMetric(.*) + remove: true + - where: + subject: SiteNetworkFeature(.*) + remove: true + - where: + subject: ResourceHealthMetadata(.*) + remove: true + - where: + subject: (.*)MultiRolePoolInstanceMetric(.*) + remove: true + - where: + subject: (.*)MultiRoleMetricDefinition(.*) + remove: true + - where: + subject: (.*)PremierAddOn(.*) + remove: true + - where: + subject: (.*)WebAppSlot(.*) + remove: true + - where: + subject: (.*)ConnectionConsent(.*) + remove: true + - where: + subject: (.*)WebAppBackup(.*) + remove: true + - where: + subject: (.*)AppServiceEnvironment(.*) + remove: true + - where: + subject: (.*)AppServicePlanHybridConnection(.*) + remove: true + - where: + subject: (.*)AppServicePlanMetric(.*) + remove: true + - where: + subject: (.*)BillingMeter(.*) + remove: true + - where: + subject: (.*)DeletedWebApp(.*) + remove: true + - where: + subject: (.*)DiagnosticSite(.*) + remove: true + - where: + subject: (.*)Global(.*) + remove: true + - where: + subject: (.*)Recommendation(.*) + remove: true + - where: + subject: (.*)ManagedApi(.*) + remove: true + - where: + subject: (.*)ManagedHosting(.*) + remove: true + - where: + subject: (.*)Provider(.*) + remove: true + - where: + subject: (.*)ServerFarm(.*) + remove: true + - where: + subject: (.*)SiteInstance(.*) + remove: true + - where: + subject: (.*)SiteOperation(.*) + remove: true + - where: + subject: (.*)SourceControl(.*) + remove: true + - where: + subject: (.*)SubscriptionDeployment(.*) + remove: true + - where: + subject: (.*)WebAppAzureStorage(.*) + remove: true + - where: + subject: (.*)WebAppConnection(.*) + remove: true + - where: + subject: (.*)WebAppContainer(.*) + remove: true + - where: + subject: (.*)WebAppContinuou(.*) + remove: true + - where: + subject: (.*)WebAppDeployment(.*) + remove: true + - where: + subject: (.*)WebAppInstance(.*) + remove: true + - where: + subject: (.*)WebAppMetadata(.*) + remove: true + - where: + subject: (.*)WebAppMS(.*) + remove: true + - where: + subject: (.*)WebAppNetwork(.*) + remove: true + - where: + subject: (.*)WebAppPrivate(.*) + remove: true + - where: + subject: (.*)WebAppPublishing(.*) + remove: true + - where: + subject: (.*)WebAppSite(.*) + remove: true + - where: + subject: (.*)WebAppSnapshot(.*) + remove: true + - where: + subject: (.*)WebAppSourceControl(.*) + remove: true + - where: + subject: (.*)WebAppSyncFunction(.*) + remove: true + - where: + subject: (.*)WebAppTriggered(.*) + remove: true + - where: + subject: (.*)WebAppUsage(.*) + remove: true + - where: + subject: (.*)AzWebAppWeb(.*) + remove: true + - where: + subject: (.*)Execute(.*) + remove: true + - where: + subject: (.*)WebAppMySql(.*) + remove: true + - where: + subject: (.*)WebAppStorage(.*) + remove: true + - where: + subject: (.*)Connection(.*) + remove: true + - where: + subject: (.*)WebAppDeployment(.*) + remove: true + - where: + subject: (.*)WebAppHost(.*) + remove: true + - where: + subject: (.*)ManagedHosting(.*) + remove: true + - where: + subject: (.*)WebAppFrom(.*) + remove: true + - where: + subject: (.*)WebAppAuthSetting(.*) + remove: true + - where: + subject: (.*)AppServicePlan(.*) + remove: true + - where: + subject: (.*)ClassicMobile(.*) + remove: true + - where: + subject: (.*)Hosting(.*) + remove: true + - where: + subject: (.*)PublishingUser(.*) + remove: true + - where: + subject: (.*)SiteIdentifier(.*) + remove: true + - where: + subject: (.*)WebAppFunctionAdmin(.*) + remove: true + - where: + subject: (.*)WebAppFunctionSecret(.*) + remove: true + - where: + subject: (.*)WebAppProcess(.*) + remove: true + - where: + subject: (.*)WebAppWebJob(.*) + remove: true + - where: + subject: (.*)WebAppWebSite(.*) + remove: true + - where: + subject: (.*)WebAppNewSite(.*) + remove: true + - where: + subject: (.*)WebAppClone(.*) + remove: true + - where: + subject: Move(.*) + remove: true + - where: + subject: (.*)WebAppRepository(.*) + remove: true + - where: + subject: (.*)WebAppFunctionTrigger(.*) + remove: true + - where: + subject: AppServicePlanWebApp + remove: true + - where: + subject: (.*)WebAppSwift(.*) + remove: true + - where: + subject: (.*)WebAppProduction(.*) + remove: true + - where: + subject: (.*)WebAppCloneable(.*) + remove: true + - where: + subject: (.*)ContainerSetting(.*) + remove: true + - where: + subject: (.*)StaticSite(.*) + remove: true + - from: source-file-csharp + where: $ + transform: $ = $.replace(/sb.AppendLine\(\$@\"\{Indent\}FormatsToProcess = \{formatList\}\"\);/, 'sb.AppendLine\(\$@\"\{Indent\}FormatsToProcess = \{formatList\}\"\);\r\nsb.AppendLine\(\$@\"\{Indent\}TypesToProcess = \'./custom/Functions.types.ps1xml\'{Environment.NewLine}\{Indent\}ScriptsToProcess = \'./custom/HelperFunctions.ps1\'{Environment.NewLine}\{Indent\}FunctionsToExport = \'Get-AzFunctionApp\', \'Get-AzFunctionAppAvailableLocation\', \'Get-AzFunctionAppPlan\', \'Get-AzFunctionAppSetting\', \'New-AzFunctionApp\', \'New-AzFunctionAppPlan\', \'Remove-AzFunctionApp\', \'Remove-AzFunctionAppPlan\', \'Remove-AzFunctionAppSetting\', \'Restart-AzFunctionApp\', \'Start-AzFunctionApp\', \'Stop-AzFunctionApp\', \'Update-AzFunctionApp\', \'Update-AzFunctionAppPlan\', \'Update-AzFunctionAppSetting\'\"\);'); + - from: source-file-csharp + where: $ + transform: $ = $.replace(/sb.AppendLine\(\$@\"\{Indent\}AliasesToExport = \{aliasesList\}\"\);/, '') + - from: source-file-csharp + where: $ + transform: $ = $.replace(/sb.AppendLine\(\$@\"\{Indent\}FunctionsToExport = \{cmdletsList\}\"\);/, '') +``` + +``` yaml + +# Add Storage and AppInsights cmdlet subset +require: + - $(this-folder)/helpers/Storage/readme.noprofile.md + - $(this-folder)/helpers/AppInsights/readme.noprofile.md + - $(this-folder)/helpers/ManagedIdentity/readme.noprofile.md + +``` diff --git a/tests-upgrade/kubconf/kubernetesconfiguration.json b/tests-upgrade/kubconf/kubernetesconfiguration.json new file mode 100644 index 00000000000..77717c096ce --- /dev/null +++ b/tests-upgrade/kubconf/kubernetesconfiguration.json @@ -0,0 +1,664 @@ +{ + "swagger": "2.0", + "info": { + "version": "2019-11-01-preview", + "title": "SourceControlConfigurationClient", + "description": "Use these APIs to create Source Control Configuration resources through ARM, for Kubernetes Clusters." + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations/{sourceControlConfigurationName}": { + "get": { + "tags": [ + "SourceControlConfiguration" + ], + "description": "Gets details of the Source Control Configuration.", + "operationId": "SourceControlConfigurations_Get", + "x-ms-examples": { + "Get Source Control Configuration": { + "$ref": "./examples/GetSourceControlConfiguration.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ClusterRpParameter" + }, + { + "$ref": "#/parameters/ClusterResourceNameParameter" + }, + { + "$ref": "#/parameters/ClusterNameParameter" + }, + { + "$ref": "#/parameters/SourceControlConfigurationNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/SourceControlConfiguration" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "put": { + "tags": [ + "SourceControlConfiguration" + ], + "description": "Create a new Kubernetes Source Control Configuration.", + "operationId": "SourceControlConfigurations_CreateOrUpdate", + "x-ms-examples": { + "Create Source Control Configuration": { + "$ref": "./examples/CreateSourceControlConfiguration.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ClusterRpParameter" + }, + { + "$ref": "#/parameters/ClusterResourceNameParameter" + }, + { + "$ref": "#/parameters/ClusterNameParameter" + }, + { + "$ref": "#/parameters/SourceControlConfigurationNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "sourceControlConfiguration", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/SourceControlConfiguration" + }, + "description": "Properties necessary to Create KubernetesConfiguration." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/SourceControlConfiguration" + } + }, + "201": { + "description": "Created.", + "schema": { + "$ref": "#/definitions/SourceControlConfiguration" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "tags": [ + "SourceControlConfiguration" + ], + "description": "This will delete the YAML file used to set up the Source control configuration, thus stopping future sync from the source repo.", + "operationId": "SourceControlConfigurations_Delete", + "x-ms-examples": { + "Delete Source Control Configuration": { + "$ref": "./examples/DeleteSourceControlConfiguration.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ClusterRpParameter" + }, + { + "$ref": "#/parameters/ClusterResourceNameParameter" + }, + { + "$ref": "#/parameters/ClusterNameParameter" + }, + { + "$ref": "#/parameters/SourceControlConfigurationNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK. The request has been completed successfully." + }, + "204": { + "description": "No Content. The request has been accepted but the configuration was not found." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/sourceControlConfigurations": { + "get": { + "tags": [ + "SourceControlConfiguration" + ], + "description": "List all Source Control Configurations.", + "operationId": "SourceControlConfigurations_List", + "x-ms-examples": { + "List Source Control Configuration": { + "$ref": "./examples/ListSourceControlConfiguration.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ClusterRpParameter" + }, + { + "$ref": "#/parameters/ClusterResourceNameParameter" + }, + { + "$ref": "#/parameters/ClusterNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/SourceControlConfigurationList" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/providers/Microsoft.KubernetesConfiguration/operations": { + "get": { + "tags": [ + "Operations" + ], + "operationId": "Operations_List", + "x-ms-examples": { + "BatchAccountDelete": { + "$ref": "./examples/OperationsList.json" + } + }, + "description": "List all the available operations the KubernetesConfiguration resource provider supports.", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK response definition.", + "schema": { + "$ref": "#/definitions/ResourceProviderOperationList" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + } + }, + "definitions": { + "Resource": { + "description": "The Resource model definition.", + "type": "object", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Resource Id" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "Resource name" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "Resource type" + } + }, + "x-ms-azure-resource": true + }, + "ProxyResource": { + "description": "ARM proxy resource.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "properties": {} + }, + "Result": { + "description": "Sample result definition", + "properties": { + "sampleProperty": { + "type": "string", + "description": "Sample property of type string" + } + } + }, + "ErrorResponse": { + "description": "Error response.", + "properties": { + "error": { + "description": "Error definition.", + "$ref": "#/definitions/ErrorDefinition" + } + } + }, + "ErrorDefinition": { + "description": "Error definition.", + "properties": { + "code": { + "description": "Service specific error code which serves as the substatus for the HTTP error code.", + "type": "string", + "readOnly": true + }, + "message": { + "description": "Description of the error.", + "type": "string", + "readOnly": true + }, + "details": { + "description": "Internal error details.", + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDefinition" + }, + "readOnly": true + } + } + }, + "ComplianceStatus": { + "description": "Compliance Status details", + "type": "object", + "readOnly": true, + "properties": { + "complianceState": { + "description": "The compliance state of the configuration.", + "$ref": "#/definitions/ComplianceState", + "readOnly": true + }, + "lastConfigApplied": { + "description": "Datetime the configuration was last applied.", + "type": "string", + "format": "date-time" + }, + "message": { + "description": "Message from when the configuration was applied.", + "type": "string" + }, + "messageLevel": { + "description": "Level of the message.", + "type": "string", + "enum": [ + "Error", + "Warning", + "Information" + ], + "x-ms-enum": { + "name": "messageLevel", + "modelAsString": true + } + } + } + }, + "ComplianceState": { + "description": "The compliance state of the configuration.", + "type": "string", + "readOnly": true, + "x-ms-enum": { + "name": "complianceState", + "modelAsString": true + }, + "enum": [ + "Pending", + "Compliant", + "Noncompliant", + "Installed", + "Failed" + ] + }, + "ChartVersion": { + "description": "Version of the operator Helm chart.", + "type": "string" + }, + "ChartValues": { + "description": "Values override for the operator Helm chart.", + "type": "string" + }, + "HelmOperatorProperties": { + "description": "Properties for Helm operator.", + "type": "object", + "properties": { + "chartVersion": { + "description": "Version of the operator Helm chart.", + "$ref": "#/definitions/ChartVersion" + }, + "chartValues": { + "description": "Values override for the operator Helm chart.", + "$ref": "#/definitions/ChartValues" + } + } + }, + "SourceControlConfiguration": { + "description": "The SourceControl Configuration object.", + "properties": { + "properties": { + "type": "object", + "x-ms-client-flatten": true, + "description": "Properties to create a Source Control Configuration resource", + "properties": { + "repositoryUrl": { + "type": "string", + "description": "Url of the SourceControl Repository." + }, + "operatorNamespace": { + "description": "The namespace to which this operator is installed to. Maximum of 253 lower case alphanumeric characters, hyphen and period only.", + "type": "string", + "default": "default" + }, + "operatorInstanceName": { + "description": "Instance name of the operator - identifying the specific configuration.", + "type": "string" + }, + "operatorType": { + "description": "Type of the operator", + "type": "string", + "enum": [ + "Flux" + ], + "x-ms-enum": { + "name": "operatorType", + "modelAsString": true + } + }, + "operatorParams": { + "description": "Any Parameters for the Operator instance in string format.", + "type": "string" + }, + "operatorScope": { + "description": "Scope at which the operator will be installed.", + "type": "string", + "enum": [ + "cluster", + "namespace" + ], + "default": "cluster", + "x-ms-enum": { + "name": "operatorScope", + "modelAsString": true + } + }, + "repositoryPublicKey": { + "description": "Public Key associated with this SourceControl configuration (either generated within the cluster or provided by the user).", + "type": "string", + "readOnly": true + }, + "enableHelmOperator": { + "description": "Option to enable Helm Operator for this git configuration.", + "type": "string", + "enum": [ + "true", + "false" + ], + "x-ms-enum": { + "name": "enableHelmOperator", + "modelAsString": true + } + }, + "helmOperatorProperties": { + "description": "Properties for Helm operator.", + "type": "object", + "$ref": "#/definitions/HelmOperatorProperties" + }, + "provisioningState": { + "type": "string", + "description": "The provisioning state of the resource provider.", + "readOnly": true, + "x-ms-enum": { + "modelAsString": true, + "name": "ProvisioningState" + }, + "enum": [ + "Accepted", + "Deleting", + "Running", + "Succeeded", + "Failed" + ] + }, + "complianceStatus": { + "type": "object", + "description": "Compliance Status of the Configuration", + "readOnly": true, + "$ref": "#/definitions/ComplianceStatus" + } + } + } + }, + "allOf": [ + { + "$ref": "#/definitions/ProxyResource" + } + ] + }, + "SourceControlConfigurationList": { + "description": "Result of the request to list Source Control Configurations. It contains a list of SourceControlConfiguration objects and a URL link to get the next set of results.", + "properties": { + "value": { + "type": "array", + "readOnly": true, + "items": { + "$ref": "#/definitions/SourceControlConfiguration" + }, + "description": "List of Source Control Configurations within a Kubernetes cluster." + }, + "nextLink": { + "type": "string", + "readOnly": true, + "description": "URL to get the next set of configuration objects, if any." + } + } + }, + "ResourceProviderOperation": { + "description": "Supported operation of this resource provider.", + "readOnly": true, + "properties": { + "name": { + "description": "Operation name, in format of {provider}/{resource}/{operation}", + "type": "string" + }, + "display": { + "description": "Display metadata associated with the operation.", + "properties": { + "provider": { + "description": "Resource provider: Microsoft KubernetesConfiguration.", + "type": "string" + }, + "resource": { + "description": "Resource on which the operation is performed.", + "type": "string" + }, + "operation": { + "description": "Type of operation: get, read, delete, etc.", + "type": "string" + }, + "description": { + "description": "Description of this operation.", + "type": "string" + } + } + } + } + }, + "ResourceProviderOperationList": { + "description": "Result of the request to list operations.", + "readOnly": true, + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/ResourceProviderOperation" + }, + "description": "List of operations supported by this resource provider." + }, + "nextLink": { + "type": "string", + "readOnly": true, + "description": "URL to the next set of results, if any." + } + } + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000)" + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group.", + "x-ms-parameter-location": "method" + }, + "ClusterRpParameter": { + "name": "clusterRp", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "Microsoft.ContainerService", + "Microsoft.Kubernetes" + ], + "description": "The Kubernetes cluster RP - either Microsoft.ContainerService (for AKS clusters) or Microsoft.Kubernetes (for OnPrem K8S clusters).", + "x-ms-parameter-location": "method" + }, + "ClusterResourceNameParameter": { + "name": "clusterResourceName", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "managedClusters", + "connectedClusters" + ], + "description": "The Kubernetes cluster resource name - either managedClusters (for AKS clusters) or connectedClusters (for OnPrem K8S clusters).", + "x-ms-parameter-location": "method" + }, + "ClusterNameParameter": { + "name": "clusterName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the kubernetes cluster.", + "x-ms-parameter-location": "method" + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to be used with the HTTP request." + }, + "SourceControlConfigurationNameParameter": { + "name": "sourceControlConfigurationName", + "in": "path", + "description": "Name of the Source Control Configuration.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + } + } +} diff --git a/tests-upgrade/kubconf/readme.md b/tests-upgrade/kubconf/readme.md new file mode 100644 index 00000000000..d38a81ae602 --- /dev/null +++ b/tests-upgrade/kubconf/readme.md @@ -0,0 +1,86 @@ + +# Az.KubernetesConfiguration +This directory contains the PowerShell module for the KubernetesConfiguration service. + +--- +## Status +[![Az.KubernetesConfiguration](https://img.shields.io/powershellgallery/v/Az.KubernetesConfiguration.svg?style=flat-square&label=Az.KubernetesConfiguration "Az.KubernetesConfiguration")](https://www.powershellgallery.com/packages/Az.KubernetesConfiguration/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.KubernetesConfiguration`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g autorest@beta`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - ./kubernetesconfiguration.json + +title: KubernetesConfiguration +module-version: 0.1.0 +subject-prefix: '' + +identity-correction-for-post: true + +directive: + - where: + variant: ^Create$|^CreateViaIdentity$|^CreateViaIdentityExpanded$|^Update$|^UpdateViaIdentity$ + remove: true + - where: + subject: SourceControlConfiguration + set: + subject: KubernetesConfiguration + - where: + parameter-name: ClusterResourceName + set: + parameter-name: ClusterType + - where: + verb: Set + subject: KubernetesConfiguration + set: + verb: Update + - where: + verb: New|Remove + subject: KubernetesConfiguration + hide: true + - where: + verb: Update + subject: KubernetesConfiguration + remove: true +``` diff --git a/tests-upgrade/mysql/.gitattributes b/tests-upgrade/mysql/.gitattributes new file mode 100644 index 00000000000..2125666142e --- /dev/null +++ b/tests-upgrade/mysql/.gitattributes @@ -0,0 +1 @@ +* text=auto \ No newline at end of file diff --git a/tests-upgrade/mysql/.gitignore b/tests-upgrade/mysql/.gitignore new file mode 100644 index 00000000000..649721c69ce --- /dev/null +++ b/tests-upgrade/mysql/.gitignore @@ -0,0 +1,14 @@ +bin +obj +.vs +generated +internal +exports +custom/*.psm1 +test/*-TestResults.xml +/*.ps1 +/*.ps1xml +/*.psm1 +/*.snk +/*.csproj +/*.nuspec \ No newline at end of file diff --git a/tests-upgrade/mysql/common-types/resource-management/v1/privatelinks.json b/tests-upgrade/mysql/common-types/resource-management/v1/privatelinks.json new file mode 100644 index 00000000000..91e640b03fe --- /dev/null +++ b/tests-upgrade/mysql/common-types/resource-management/v1/privatelinks.json @@ -0,0 +1,174 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0", + "title": "Common types" + }, + "paths": {}, + "definitions": { + "PrivateEndpoint": { + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "The ARM identifier for Private Endpoint" + } + }, + "description": "The Private Endpoint resource." + }, + "PrivateEndpointConnection": { + "properties": { + "properties": { + "$ref": "#/definitions/PrivateEndpointConnectionProperties", + "x-ms-client-flatten": true, + "description": "Resource properties." + } + }, + "allOf": [ + { + "$ref": "./types.json#/definitions/Resource" + } + ], + "description": "The Private Endpoint Connection resource." + }, + "PrivateEndpointConnectionProperties": { + "properties": { + "privateEndpoint": { + "$ref": "#/definitions/PrivateEndpoint", + "description": "The resource of private end point." + }, + "privateLinkServiceConnectionState": { + "$ref": "#/definitions/PrivateLinkServiceConnectionState", + "description": "A collection of information about the state of the connection between service consumer and provider." + }, + "provisioningState": { + "$ref": "#/definitions/PrivateEndpointConnectionProvisioningState", + "description": "The provisioning state of the private endpoint connection resource." + } + }, + "required": [ + "privateLinkServiceConnectionState" + ], + "description": "Properties of the PrivateEndpointConnectProperties." + }, + "PrivateLinkServiceConnectionState": { + "properties": { + "status": { + "$ref": "#/definitions/PrivateEndpointServiceConnectionStatus", + "description": "Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service." + }, + "description": { + "type": "string", + "description": "The reason for approval/rejection of the connection." + }, + "actionsRequired": { + "type": "string", + "description": "A message indicating if changes on the service provider require any updates on the consumer." + } + }, + "description": "A collection of information about the state of the connection between service consumer and provider." + }, + "PrivateEndpointServiceConnectionStatus": { + "type": "string", + "description": "The private endpoint connection status.", + "enum": [ + "Pending", + "Approved", + "Rejected" + ], + "x-ms-enum": { + "name": "PrivateEndpointServiceConnectionStatus", + "modelAsString": true + } + }, + "PrivateEndpointConnectionProvisioningState": { + "type": "string", + "readOnly": true, + "description": "The current provisioning state.", + "enum": [ + "Succeeded", + "Creating", + "Deleting", + "Failed" + ], + "x-ms-enum": { + "name": "PrivateEndpointConnectionProvisioningState", + "modelAsString": true + } + }, + "PrivateLinkResource": { + "properties": { + "properties": { + "$ref": "#/definitions/PrivateLinkResourceProperties", + "description": "Resource properties.", + "x-ms-client-flatten": true + } + }, + "allOf": [ + { + "$ref": "./types.json#/definitions/Resource" + } + ], + "description": "A private link resource" + }, + "PrivateLinkResourceProperties": { + "properties": { + "groupId": { + "description": "The private link resource group id.", + "type": "string", + "readOnly": true + }, + "requiredMembers": { + "description": "The private link resource required member names.", + "type": "array", + "items": { + "type": "string" + }, + "readOnly": true + }, + "requiredZoneNames": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The private link resource Private link DNS zone name." + } + }, + "description": "Properties of a private link resource." + }, + "PrivateEndpointConnectionListResult": { + "properties": { + "value": { + "type": "array", + "description": "Array of private endpoint connections", + "items": { + "$ref": "#/definitions/PrivateEndpointConnection" + } + } + }, + "description": "List of private endpoint connection associated with the specified storage account" + }, + "PrivateLinkResourceListResult": { + "properties": { + "value": { + "type": "array", + "description": "Array of private link resources", + "items": { + "$ref": "#/definitions/PrivateLinkResource" + } + } + }, + "description": "A list of private link resources" + } + }, + "parameters": { + "PrivateEndpointConnectionName": { + "name": "privateEndpointConnectionName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the private endpoint connection associated with the Azure resource", + "x-ms-parameter-location": "method" + } + } +} diff --git a/tests-upgrade/mysql/common-types/resource-management/v1/types.json b/tests-upgrade/mysql/common-types/resource-management/v1/types.json new file mode 100644 index 00000000000..bf32e064a31 --- /dev/null +++ b/tests-upgrade/mysql/common-types/resource-management/v1/types.json @@ -0,0 +1,467 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0", + "title": "Common types" + }, + "paths": {}, + "definitions": { + "Resource": { + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + }, + "x-ms-azure-resource": true + }, + "AzureEntityResource": { + "x-ms-client-name": "AzureEntityResource", + "description": "The resource model definition for a Azure Resource Manager resource with an etag.", + "properties": { + "etag": { + "type": "string", + "readOnly": true, + "description": "Resource Etag." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "TrackedResource": { + "description": "The resource model definition for a ARM tracked top level resource", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "ProxyResource": { + "description": "The resource model definition for a ARM proxy resource. It will have everything other than required location and tags", + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "ResourceModelWithAllowedPropertySet": { + "description": "The resource model definition containing the full set of allowed properties for a resource. Except properties bag, there cannot be a top level property outside of this set.", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "x-ms-mutability": [ + "read" + ], + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "x-ms-mutability": [ + "read" + ], + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + }, + "managedBy": { + "type": "string", + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "The fully qualified resource ID of the resource that manages this resource. Indicates if this resource is managed by another azure resource. If this is present, complete mode deployment will not delete the resource if it is removed from the template since it is managed by another resource." + }, + "kind": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider must validate and persist this value.", + "pattern": "^[-\\w\\._,\\(\\)]+$" + }, + "etag": { + "readOnly": true, + "type": "string", + "description": "The etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal etag convention. Entity tags are used for comparing two or more entities from the same requested resource. HTTP/1.1 uses entity tags in the etag (section 14.19), If-Match (section 14.24), If-None-Match (section 14.26), and If-Range (section 14.27) header fields. " + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "identity": { + "allOf": [ + { + "$ref": "#/definitions/Identity" + } + ] + }, + "sku": { + "allOf": [ + { + "$ref": "#/definitions/Sku" + } + ] + }, + "plan": { + "allOf": [ + { + "$ref": "#/definitions/Plan" + } + ] + } + }, + "x-ms-azure-resource": true + }, + "Sku": { + "description": "The resource model definition representing SKU", + "properties": { + "name": { + "type": "string", + "description": "The name of the SKU. Ex - P3. It is typically a letter+number code" + }, + "tier": { + "type": "string", + "enum": [ + "Free", + "Basic", + "Standard", + "Premium" + ], + "x-ms-enum": { + "name": "SkuTier", + "modelAsString": false + }, + "description": "This field is required to be implemented by the Resource Provider if the service has more than one tier, but is not required on a PUT." + }, + "size": { + "type": "string", + "description": "The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. " + }, + "family": { + "type": "string", + "description": "If the service has different generations of hardware, for the same SKU, then that can be captured here." + }, + "capacity": { + "type": "integer", + "format": "int32", + "description": "If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted." + } + }, + "required": [ + "name" + ] + }, + "Identity": { + "description": "Identity for the resource.", + "properties": { + "principalId": { + "readOnly": true, + "type": "string", + "description": "The principal ID of resource identity." + }, + "tenantId": { + "readOnly": true, + "type": "string", + "description": "The tenant ID of resource." + }, + "type": { + "type": "string", + "description": "The identity type.", + "enum": [ + "SystemAssigned" + ], + "x-ms-enum": { + "name": "ResourceIdentityType", + "modelAsString": false + } + } + } + }, + "Plan": { + "properties": { + "name": { + "type": "string", + "description": "A user defined name of the 3rd Party Artifact that is being procured." + }, + "publisher": { + "type": "string", + "description": "The publisher of the 3rd Party Artifact that is being bought. E.g. NewRelic" + }, + "product": { + "type": "string", + "description": "The 3rd Party artifact that is being procured. E.g. NewRelic. Product maps to the OfferID specified for the artifact at the time of Data Market onboarding. " + }, + "promotionCode": { + "type": "string", + "description": "A publisher provided promotion code as provisioned in Data Market for the said product/artifact." + }, + "version": { + "type": "string", + "description": "The version of the desired product/artifact." + } + }, + "description": "Plan for the resource.", + "required": [ + "name", + "publisher", + "product" + ] + }, + "ErrorResponse": { + "properties": { + "code": { + "readOnly": true, + "type": "string", + "description": "The error code." + }, + "message": { + "readOnly": true, + "type": "string", + "description": "The error message." + }, + "target": { + "readOnly": true, + "type": "string", + "description": "The error target." + }, + "details": { + "readOnly": true, + "type": "array", + "items": { + "$ref": "#/definitions/ErrorResponse" + }, + "description": "The error details." + }, + "additionalInfo": { + "readOnly": true, + "type": "array", + "items": { + "$ref": "#/definitions/ErrorAdditionalInfo" + }, + "description": "The error additional info." + } + }, + "description": "The resource management error response." + }, + "ErrorAdditionalInfo": { + "properties": { + "type": { + "readOnly": true, + "type": "string", + "description": "The additional info type." + }, + "info": { + "readOnly": true, + "type": "object", + "description": "The additional info." + } + }, + "description": "The resource management error additional info." + }, + "locationData": { + "description": "Metadata pertaining to the geographic location of the resource.", + "type": "object", + "properties": { + "name": { + "type": "string", + "maxLength": 256, + "description": "A canonical name for the geographic or physical location." + }, + "city": { + "type": "string", + "description": "The city or locality where the resource is located." + }, + "district": { + "type": "string", + "description": "The district, state, or province where the resource is located." + }, + "countryOrRegion": { + "type": "string", + "description": "The country or region where the resource is located" + } + }, + "required": [ + "name" + ] + }, + "systemData": { + "description": "Metadata pertaining to creation and last modification of the resource.", + "type": "object", + "readOnly": true, + "properties": { + "createdBy": { + "type": "string", + "description": "The identity that created the resource." + }, + "createdByType": { + "type": "string", + "description": "The type of identity that created the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource creation (UTC)." + }, + "lastModifiedBy": { + "type": "string", + "description": "The identity that last modified the resource." + }, + "lastModifiedByType": { + "type": "string", + "description": "The type of identity that last modified the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "lastModifiedAt": { + "type": "string", + "format": "date-time", + "description": "The type of identity that last modified the resource." + } + } + }, + "encryptionProperties": { + "description": "Configuration of key for data encryption", + "type": "object", + "properties": { + "status": { + "description": "Indicates whether or not the encryption is enabled for container registry.", + "enum": [ + "enabled", + "disabled" + ], + "type": "string", + "x-ms-enum": { + "name": "EncryptionStatus", + "modelAsString": true + } + }, + "keyVaultProperties": { + "$ref": "#/definitions/KeyVaultProperties", + "description": "Key vault properties." + } + } + }, + "KeyVaultProperties": { + "type": "object", + "properties": { + "keyIdentifier": { + "description": "Key vault uri to access the encryption key.", + "type": "string" + }, + "identity": { + "description": "The client id of the identity which will be used to access key vault.", + "type": "string" + } + } + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription.", + "minLength": 1 + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation.", + "minLength": 1 + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/mysql/common-types/resource-management/v2/types.json b/tests-upgrade/mysql/common-types/resource-management/v2/types.json new file mode 100644 index 00000000000..7ee7093af3e --- /dev/null +++ b/tests-upgrade/mysql/common-types/resource-management/v2/types.json @@ -0,0 +1,473 @@ +{ + "swagger": "2.0", + "info": { + "version": "2.0", + "title": "Common types" + }, + "paths": {}, + "definitions": { + "Resource": { + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + }, + "x-ms-azure-resource": true + }, + "AzureEntityResource": { + "x-ms-client-name": "AzureEntityResource", + "description": "The resource model definition for a Azure Resource Manager resource with an etag.", + "properties": { + "etag": { + "type": "string", + "readOnly": true, + "description": "Resource Etag." + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "TrackedResource": { + "description": "The resource model definition for a ARM tracked top level resource", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "ProxyResource": { + "description": "The resource model definition for a ARM proxy resource. It will have everything other than required location and tags", + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "ResourceModelWithAllowedPropertySet": { + "description": "The resource model definition containing the full set of allowed properties for a resource. Except properties bag, there cannot be a top level property outside of this set.", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "x-ms-mutability": [ + "read" + ], + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "x-ms-mutability": [ + "read" + ], + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + }, + "managedBy": { + "type": "string", + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "The fully qualified resource ID of the resource that manages this resource. Indicates if this resource is managed by another azure resource. If this is present, complete mode deployment will not delete the resource if it is removed from the template since it is managed by another resource." + }, + "kind": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider must validate and persist this value.", + "pattern": "^[-\\w\\._,\\(\\)]+$" + }, + "etag": { + "readOnly": true, + "type": "string", + "description": "The etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal etag convention. Entity tags are used for comparing two or more entities from the same requested resource. HTTP/1.1 uses entity tags in the etag (section 14.19), If-Match (section 14.24), If-None-Match (section 14.26), and If-Range (section 14.27) header fields. " + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "identity": { + "allOf": [ + { + "$ref": "#/definitions/Identity" + } + ] + }, + "sku": { + "allOf": [ + { + "$ref": "#/definitions/Sku" + } + ] + }, + "plan": { + "allOf": [ + { + "$ref": "#/definitions/Plan" + } + ] + } + }, + "x-ms-azure-resource": true + }, + "Sku": { + "description": "The resource model definition representing SKU", + "properties": { + "name": { + "type": "string", + "description": "The name of the SKU. Ex - P3. It is typically a letter+number code" + }, + "tier": { + "type": "string", + "enum": [ + "Free", + "Basic", + "Standard", + "Premium" + ], + "x-ms-enum": { + "name": "SkuTier", + "modelAsString": false + }, + "description": "This field is required to be implemented by the Resource Provider if the service has more than one tier, but is not required on a PUT." + }, + "size": { + "type": "string", + "description": "The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. " + }, + "family": { + "type": "string", + "description": "If the service has different generations of hardware, for the same SKU, then that can be captured here." + }, + "capacity": { + "type": "integer", + "format": "int32", + "description": "If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted." + } + }, + "required": [ + "name" + ] + }, + "Identity": { + "description": "Identity for the resource.", + "properties": { + "principalId": { + "readOnly": true, + "type": "string", + "description": "The principal ID of resource identity." + }, + "tenantId": { + "readOnly": true, + "type": "string", + "description": "The tenant ID of resource." + }, + "type": { + "type": "string", + "description": "The identity type.", + "enum": [ + "SystemAssigned" + ], + "x-ms-enum": { + "name": "ResourceIdentityType", + "modelAsString": false + } + } + } + }, + "Plan": { + "properties": { + "name": { + "type": "string", + "description": "A user defined name of the 3rd Party Artifact that is being procured." + }, + "publisher": { + "type": "string", + "description": "The publisher of the 3rd Party Artifact that is being bought. E.g. NewRelic" + }, + "product": { + "type": "string", + "description": "The 3rd Party artifact that is being procured. E.g. NewRelic. Product maps to the OfferID specified for the artifact at the time of Data Market onboarding. " + }, + "promotionCode": { + "type": "string", + "description": "A publisher provided promotion code as provisioned in Data Market for the said product/artifact." + }, + "version": { + "type": "string", + "description": "The version of the desired product/artifact." + } + }, + "description": "Plan for the resource.", + "required": [ + "name", + "publisher", + "product" + ] + }, + "ErrorResponse": { + "properties": { + "error": { + "type": "object", + "description": "The error object.", + "properties": { + "code": { + "readOnly": true, + "type": "string", + "description": "The error code." + }, + "message": { + "readOnly": true, + "type": "string", + "description": "The error message." + }, + "target": { + "readOnly": true, + "type": "string", + "description": "The error target." + }, + "details": { + "readOnly": true, + "type": "array", + "items": { + "$ref": "#/definitions/ErrorResponse" + }, + "description": "The error details." + }, + "additionalInfo": { + "readOnly": true, + "type": "array", + "items": { + "$ref": "#/definitions/ErrorAdditionalInfo" + }, + "description": "The error additional info." + } + } + } + }, + "description": "The resource management error response." + }, + "ErrorAdditionalInfo": { + "properties": { + "type": { + "readOnly": true, + "type": "string", + "description": "The additional info type." + }, + "info": { + "readOnly": true, + "type": "object", + "description": "The additional info." + } + }, + "description": "The resource management error additional info." + }, + "locationData": { + "description": "Metadata pertaining to the geographic location of the resource.", + "type": "object", + "properties": { + "name": { + "type": "string", + "maxLength": 256, + "description": "A canonical name for the geographic or physical location." + }, + "city": { + "type": "string", + "description": "The city or locality where the resource is located." + }, + "district": { + "type": "string", + "description": "The district, state, or province where the resource is located." + }, + "countryOrRegion": { + "type": "string", + "description": "The country or region where the resource is located" + } + }, + "required": [ + "name" + ] + }, + "systemData": { + "description": "Metadata pertaining to creation and last modification of the resource.", + "type": "object", + "readOnly": true, + "properties": { + "createdBy": { + "type": "string", + "description": "The identity that created the resource." + }, + "createdByType": { + "type": "string", + "description": "The type of identity that created the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource creation (UTC)." + }, + "lastModifiedBy": { + "type": "string", + "description": "The identity that last modified the resource." + }, + "lastModifiedByType": { + "type": "string", + "description": "The type of identity that last modified the resource.", + "enum": [ + "User", + "Application", + "ManagedIdentity", + "Key" + ], + "x-ms-enum": { + "name": "createdByType", + "modelAsString": true + } + }, + "lastModifiedAt": { + "type": "string", + "format": "date-time", + "description": "The type of identity that last modified the resource." + } + } + }, + "encryptionProperties": { + "description": "Configuration of key for data encryption", + "type": "object", + "properties": { + "status": { + "description": "Indicates whether or not the encryption is enabled for container registry.", + "enum": [ + "enabled", + "disabled" + ], + "type": "string", + "x-ms-enum": { + "name": "EncryptionStatus", + "modelAsString": true + } + }, + "keyVaultProperties": { + "$ref": "#/definitions/KeyVaultProperties", + "description": "Key vault properties." + } + } + }, + "KeyVaultProperties": { + "type": "object", + "properties": { + "keyIdentifier": { + "description": "Key vault uri to access the encryption key.", + "type": "string" + }, + "identity": { + "description": "The client id of the identity which will be used to access key vault.", + "type": "string" + } + } + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription.", + "minLength": 1 + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation.", + "minLength": 1 + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + } + } +} diff --git a/tests-upgrade/mysql/common-types/rfcs/rfc7517.json b/tests-upgrade/mysql/common-types/rfcs/rfc7517.json new file mode 100644 index 00000000000..f856d152f36 --- /dev/null +++ b/tests-upgrade/mysql/common-types/rfcs/rfc7517.json @@ -0,0 +1,104 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0", + "title": "Common types" + }, + "paths": {}, + "definitions": { + "JSONWebKey": { + "type": "object", + "required": [ + "use", + "kty", + "kid", + "alg" + ], + "properties": { + "alg": { + "description": "The \"alg\" (algorithm) parameter identifies the algorithm intended for\nuse with the key. The values used should either be registered in the\nIANA \"JSON Web Signature and Encryption Algorithms\" registry\nestablished by [JWA] or be a value that contains a Collision-\nResistant Name.", + "type": "string" + }, + "crv": { + "description": "The \"crv\" (curve) parameter identifies the curve type", + "type": "string" + }, + "d": { + "description": "RSA private exponent or ECC private key", + "type": "string" + }, + "dp": { + "description": "RSA Private Key Parameter", + "type": "string" + }, + "dq": { + "description": "RSA Private Key Parameter", + "type": "string" + }, + "e": { + "description": "RSA public exponent, in Base64", + "type": "string" + }, + "k": { + "description": "Symmetric key", + "type": "string" + }, + "kid": { + "description": "The \"kid\" (key ID) parameter is used to match a specific key. This\nis used, for instance, to choose among a set of keys within a JWK Set\nduring key rollover. The structure of the \"kid\" value is\nunspecified. When \"kid\" values are used within a JWK Set, different\nkeys within the JWK Set SHOULD use distinct \"kid\" values. (One\nexample in which different keys might use the same \"kid\" value is if\nthey have different \"kty\" (key type) values but are considered to be\nequivalent alternatives by the application using them.) The \"kid\"\nvalue is a case-sensitive string.", + "type": "string" + }, + "kty": { + "description": "The \"kty\" (key type) parameter identifies the cryptographic algorithm\nfamily used with the key, such as \"RSA\" or \"EC\". \"kty\" values should\neither be registered in the IANA \"JSON Web Key Types\" registry\nestablished by [JWA] or be a value that contains a Collision-\nResistant Name. The \"kty\" value is a case-sensitive string.", + "type": "string" + }, + "n": { + "description": "RSA modulus, in Base64", + "type": "string" + }, + "p": { + "description": "RSA secret prime", + "type": "string" + }, + "q": { + "description": "RSA secret prime, with p < q", + "type": "string" + }, + "qi": { + "description": "RSA Private Key Parameter", + "type": "string" + }, + "use": { + "description": "Use (\"public key use\") identifies the intended use of\nthe public key. The \"use\" parameter is employed to indicate whether\na public key is used for encrypting data or verifying the signature\non data. Values are commonly \"sig\" (signature) or \"enc\" (encryption).", + "type": "string" + }, + "x": { + "description": "X coordinate for the Elliptic Curve point", + "type": "string" + }, + "x5c": { + "description": "The \"x5c\" (X.509 certificate chain) parameter contains a chain of one\nor more PKIX certificates [RFC5280]. The certificate chain is\nrepresented as a JSON array of certificate value strings. Each\nstring in the array is a base64-encoded (Section 4 of [RFC4648] --\nnot base64url-encoded) DER [ITU.X690.1994] PKIX certificate value.\nThe PKIX certificate containing the key value MUST be the first\ncertificate.", + "type": "array", + "items": { + "type": "string" + } + }, + "y": { + "description": "Y coordinate for the Elliptic Curve point", + "type": "string" + } + } + }, + "JSONWebKeySet": { + "type": "object", + "properties": { + "keys": { + "description": "The value of the \"keys\" parameter is an array of JWK values. By\ndefault, the order of the JWK values within the array does not imply\nan order of preference among them, although applications of JWK Sets\ncan choose to assign a meaning to the order for their purposes, if\ndesired.", + "type": "array", + "items": { + "$ref": "#/definitions/JSONWebKey" + } + } + } + } + } +} diff --git a/tests-upgrade/mysql/custom/readme.md b/tests-upgrade/mysql/custom/readme.md new file mode 100644 index 00000000000..f8336dda153 --- /dev/null +++ b/tests-upgrade/mysql/custom/readme.md @@ -0,0 +1,41 @@ +# Custom +This directory contains custom implementation for non-generated cmdlets for the `Az.MySql` module. Both scripts (`.ps1`) and C# files (`.cs`) can be implemented here. They will be used during the build process in `build-module.ps1`, and create cmdlets into the `..\exports` folder. The only generated file into this folder is the `Az.MySql.custom.psm1`. This file should not be modified. + +## Info +- Modifiable: yes +- Generated: partial +- Committed: yes +- Packaged: yes + +## Details +For `Az.MySql` to use custom cmdlets, it does this two different ways. We **highly recommend** creating script cmdlets, as they are easier to write and allow access to the other exported cmdlets. C# cmdlets *cannot access exported cmdlets*. + +For C# cmdlets, they are compiled with the rest of the generated low-level cmdlets into the `./bin/Az.MySql.private.dll`. The names of the cmdlets (methods) and files must follow the `[cmdletName]_[variantName]` syntax used for generated cmdlets. The `variantName` is used as the `ParameterSetName`, so use something appropriate that doesn't clash with already created variant or parameter set names. You cannot use the `ParameterSetName` property in the `Parameter` attribute on C# cmdlets. Each cmdlet must be separated into variants using the same pattern as seen in the `generated/cmdlets` folder. + +For script cmdlets, these are loaded via the `Az.MySql.custom.psm1`. Then, during the build process, this module is loaded and processed in the same manner as the C# cmdlets. The fundemental difference is the script cmdlets use the `ParameterSetName` attribute and C# cmdlets do not. To create a script cmdlet variant of a generated cmdlet, simply decorate all parameters in the script with the new `ParameterSetName` in the `Parameter` attribute. This will appropriately treat each parameter set as a separate variant when processed to be exported during the build. + +## Purpose +This allows the modules to have cmdlets that were not defined in the REST specification. It also allows combining logic using generated cmdlets. This is a level of customization beyond what can be done using the [readme configuration options](https://github.com/Azure/autorest/blob/master/docs/powershell/options.md) that are currently available. These custom cmdlets are then referenced by the cmdlets created at build-time in the `..\exports` folder. + +## Usage +The easiest way currently to start developing custom cmdlets is to copy an existing cmdlet. For C# cmdlets, copy one from the `generated/cmdlets` folder. For script cmdlets, build the project using `build-module.ps1` and copy one of the scripts from the `..\exports` folder. After that, if you want to add new parameter sets, follow the guidelines in the `Details` section above. For implementing a new cmdlets, at minimum, please keep these parameters: +- Break +- DefaultProfile +- HttpPipelineAppend +- HttpPipelinePrepend +- Proxy +- ProxyCredential +- ProxyUseDefaultCredentials + +These provide functionality to our HTTP pipeline and other useful features. In script, you can forward these parameters using `$PSBoundParameters` to the other cmdlets you're calling within `Az.MySql`. For C#, follow the usage seen in the `ProcessRecordAsync` method. + +### Attributes +For processing the cmdlets, we've created some additional attributes: +- `Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.DescriptionAttribute` + - Used in C# cmdlets to provide a high-level description of the cmdlet. This is propegated to reference documentation via [help comments](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_comment_based_help) in the exported scripts. +- `Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.DoNotExportAttribute` + - Used in C# and script cmdlets to suppress creating an exported cmdlet at build-time. These cmdlets will *not be exposed* by `Az.MySql`. +- `Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.InternalExportAttribute` + - Used in C# cmdlets to route exported cmdlets to the `..\internal`, which are *not exposed* by `Az.MySql`. For more information, see [readme.md](..\internal/readme.md) in the `..\internal` folder. +- `Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.ProfileAttribute` + - Used in C# and script cmdlets to define which Azure profiles the cmdlet supports. This is only supported for Azure (`--azure`) modules. \ No newline at end of file diff --git a/tests-upgrade/mysql/docs/readme.md b/tests-upgrade/mysql/docs/readme.md new file mode 100644 index 00000000000..62fb790569c --- /dev/null +++ b/tests-upgrade/mysql/docs/readme.md @@ -0,0 +1,11 @@ +# Docs +This directory contains the documentation of the cmdlets for the `Az.MySql` module. To run documentation generation, use the `generate-help.ps1` script at the root module folder. Files in this folder will *always be overriden on regeneration*. To update documentation examples, please use the `..\examples` folder. + +## Info +- Modifiable: no +- Generated: all +- Committed: yes +- Packaged: yes + +## Details +The process of documentation generation loads `Az.MySql` and analyzes the exported cmdlets from the module. It recognizes the [help comments](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_comment_based_help) that are generated into the scripts in the `..\exports` folder. Additionally, when writing custom cmdlets in the `..\custom` folder, you can use the help comments syntax, which decorate the exported scripts at build-time. The documentation examples are taken from the `..\examples` folder. \ No newline at end of file diff --git a/tests-upgrade/mysql/examples/readme.md b/tests-upgrade/mysql/examples/readme.md new file mode 100644 index 00000000000..ac871d71fc7 --- /dev/null +++ b/tests-upgrade/mysql/examples/readme.md @@ -0,0 +1,11 @@ +# Examples +This directory contains examples from the exported cmdlets of the module. When `build-module.ps1` is ran, example stub files will be generated here. If your module support Azure Profiles, the example stubs will be in individual profile folders. These example stubs should be updated to show how the cmdlet is used. The examples are imported into the documentation when `generate-help.ps1` is ran. + +## Info +- Modifiable: yes +- Generated: partial +- Committed: yes +- Packaged: no + +## Purpose +This separates the example documentation details from the generated documentation information provided directly from the generated cmdlets. Since the cmdlets don't have examples from the REST spec, this provides a means to add examples easily. The example stubs provide the markdown format that is required. The 3 core elements are: the name of the example, the code information of the example, and the description of the example. That information, if the markdown format is followed, will be available to documentation generation and be part of the documents in the `..\docs` folder. \ No newline at end of file diff --git a/tests-upgrade/mysql/how-to.md b/tests-upgrade/mysql/how-to.md new file mode 100644 index 00000000000..8b7309794ce --- /dev/null +++ b/tests-upgrade/mysql/how-to.md @@ -0,0 +1,58 @@ +# How-To +This document describes how to develop for `Az.MySql`. + +## Building `Az.MySql` +To build, run the `build-module.ps1` at the root of the module directory. This will generate the proxy script cmdlets that are the cmdlets being exported by this module. After the build completes, the proxy script cmdlets will be output to the `exports` folder. To read more about the proxy script cmdlets, look at the [readme.md](exports/readme.md) in the `exports` folder. + +## Creating custom cmdlets +To add cmdlets that were not generated by the REST specification, use the `custom` folder. This folder allows you to add handwritten `.ps1` and `.cs` files. Currently, we support using `.ps1` scripts as new cmdlets or as additional low-level variants (via `ParameterSet`), and `.cs` files as low-level (variants) cmdlets that the exported script cmdlets call. We do not support exporting any `.cs` (dll) cmdlets directly. To read more about custom cmdlets, look at the [readme.md](custom/readme.md) in the `custom` folder. + +## Generating documentation +To generate documentation, the process is now integrated into the `build-module.ps1` script. If you don't want to run this process as part of `build-module.ps1`, you can provide the `-NoDocs` switch. If you want to run documentation generation after the build process, you may still run the `generate-help.ps1` script. Overall, the process will look at the documentation comments in the generated and custom cmdlets and types, and create `.md` files into the `docs` folder. Additionally, this pulls in any examples from the `examples` folder and adds them to the generated help markdown documents. To read more about examples, look at the [readme.md](examples/readme.md) in the `examples` folder. To read more about documentation, look at the [readme.md](docs/readme.md) in the `docs` folder. + +## Testing `Az.MySql` +To test the cmdlets, we use [Pester](https://github.com/pester/Pester). Tests scripts (`.ps1`) should be added to the `test` folder. To execute the Pester tests, run the `test-module.ps1` script. This will run all tests in `playback` mode within the `test` folder. To read more about testing cmdlets, look at the [readme.md](examples/readme.md) in the `examples` folder. + +## Packing `Az.MySql` +To pack `Az.MySql` for distribution, run the `pack-module.ps1` script. This will take the contents of multiple directories and certain root-folder files to create a `.nupkg`. The structure of the `.nupkg` is created so it can be loaded part of a [PSRepository](https://docs.microsoft.com/en-us/powershell/module/powershellget/register-psrepository). Additionally, this package is in a format for distribution to the [PSGallery](https://www.powershellgallery.com/). For signing an Azure module, please contact the [Azure PowerShell](https://github.com/Azure/azure-powershell) team. + +## Module Script Details +There are multiple scripts created for performing different actions for developing `Az.MySql`. +- `build-module.ps1` + - Builds the module DLL (`./bin/Az.MySql.private.dll`), creates the exported cmdlets and documentation, generates custom cmdlet test stubs and exported cmdlet example stubs, and updates `./Az.MySql.psd1` with Azure profile information. + - **Parameters**: [`Switch` parameters] + - `-Run`: After building, creates an isolated PowerShell session and loads `Az.MySql`. + - `-Test`: After building, runs the `Pester` tests defined in the `test` folder. + - `-Docs`: After building, generates the Markdown documents for the modules into the `docs` folder. + - `-Pack`: After building, packages the module into a `.nupkg`. + - `-Code`: After building, opens a VSCode window with the module's directory and runs (see `-Run`) the module. + - `-Release`: Builds the module in `Release` configuration (as opposed to `Debug` configuration). + - `-NoDocs`: Supresses writing the documentation markdown files as part of the cmdlet exporting process. + - `-Debugger`: Used when attaching the debugger in Visual Studio to the PowerShell session, and running the build process without recompiling the DLL. This suppresses running the script as an isolated process. +- `run-module.ps1` + - Creates an isolated PowerShell session and loads `Az.MySql` into the session. + - Same as `-Run` in `build-module.ps1`. + - **Parameters**: [`Switch` parameters] + - `-Code`: Opens a VSCode window with the module's directory. + - Same as `-Code` in `build-module.ps1`. +- `generate-help.ps1` + - Generates the Markdown documents for the modules into the `docs` folder. + - Same as `-Docs` in `build-module.ps1`. +- `test-module.ps1` + - Runs the `Pester` tests defined in the `test` folder. + - Same as `-Test` in `build-module.ps1`. +- `pack-module.ps1` + - Packages the module into a `.nupkg` for distribution. + - Same as `-Pack` in `build-module.ps1`. +- `generate-help.ps1` + - Generates the Markdown documents for the modules into the `docs` folder. + - Same as `-Docs` in `build-module.ps1`. + - This process is now integrated into `build-module.ps1` automatically. To disable, use `-NoDocs` when running `build-module.ps1`. +- `export-surface.ps1` + - Generates Markdown documents for both the cmdlet surface and the model (class) surface of the module. + - These files are placed into the `resources` folder. + - Used for investigating the surface of your module. These are *not* documentation for distribution. +- `check-dependencies.ps1` + - Used in `run-module.ps1` and `test-module.ps1` to verify dependent modules are available to run those tasks. + - It will download local (within the module's directory structure) versions of those modules as needed. + - This script *does not* need to be ran by-hand. \ No newline at end of file diff --git a/tests-upgrade/mysql/license.txt b/tests-upgrade/mysql/license.txt new file mode 100644 index 00000000000..b9f3180fb9a --- /dev/null +++ b/tests-upgrade/mysql/license.txt @@ -0,0 +1,227 @@ +MICROSOFT SOFTWARE LICENSE TERMS + +MICROSOFT AZURE POWERSHELL + +These license terms are an agreement between Microsoft Corporation (or based on where you live, one of its affiliates) and you. Please read them. They apply to the software named above, which includes the media on which you received it, if any. + +BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. + + +-----------------START OF LICENSE-------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +-------------------END OF LICENSE------------------------------------------ + + +----------------START OF THIRD PARTY NOTICE-------------------------------- + + +The software includes the AutoMapper library ("AutoMapper"). The MIT License set out below is provided for informational purposes only. It is not the license that governs any part of the software. + +Provided for Informational Purposes Only + +AutoMapper + +The MIT License (MIT) +Copyright (c) 2010 Jimmy Bogard + + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + + + + +*************** + +The software includes Newtonsoft.Json. The MIT License set out below is provided for informational purposes only. It is not the license that governs any part of the software. + +Newtonsoft.Json + +The MIT License (MIT) +Copyright (c) 2007 James Newton-King +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------END OF THIRD PARTY NOTICE---------------------------------------- + diff --git a/tests-upgrade/mysql/mysql.json b/tests-upgrade/mysql/mysql.json new file mode 100644 index 00000000000..78e0d429756 --- /dev/null +++ b/tests-upgrade/mysql/mysql.json @@ -0,0 +1,2576 @@ +{ + "swagger": "2.0", + "info": { + "title": "MySQLManagementClient", + "description": "The Microsoft Azure management API provides create, read, update, and delete functionality for Azure MySQL resources including servers, databases, firewall rules, VNET rules, log files and configurations with new business model.", + "version": "2017-12-01" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}": { + "put": { + "tags": [ + "Servers" + ], + "operationId": "Servers_Create", + "x-ms-examples": { + "Create a new server": { + "$ref": "./examples/ServerCreate.json" + }, + "Create a database as a point in time restore": { + "$ref": "./examples/ServerCreatePointInTimeRestore.json" + }, + "Create a server as a geo restore ": { + "$ref": "./examples/ServerCreateGeoRestoreMode.json" + }, + "Create a replica server": { + "$ref": "./examples/ServerCreateReplicaMode.json" + } + }, + "description": "Creates a new server or updates an existing server. The update action will overwrite the existing server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServerForCreate" + }, + "description": "The required parameters for creating or updating a server." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Server" + } + }, + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/Server" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true + }, + "patch": { + "tags": [ + "Servers" + ], + "operationId": "Servers_Update", + "x-ms-examples": { + "ServerUpdate": { + "$ref": "./examples/ServerUpdate.json" + } + }, + "description": "Updates an existing server. The request body can contain one to many of the properties present in the normal server definition.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServerUpdateParameters" + }, + "description": "The required parameters for updating a server." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Server" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "tags": [ + "Servers" + ], + "operationId": "Servers_Delete", + "x-ms-examples": { + "ServerDelete": { + "$ref": "./examples/ServerDelete.json" + } + }, + "description": "Deletes a server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "202": { + "description": "Accepted" + }, + "204": { + "description": "NoContent" + } + }, + "x-ms-long-running-operation": true + }, + "get": { + "tags": [ + "Servers" + ], + "operationId": "Servers_Get", + "x-ms-examples": { + "ServerGet": { + "$ref": "./examples/ServerGet.json" + } + }, + "description": "Gets information about a server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Server" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers": { + "get": { + "tags": [ + "Servers" + ], + "operationId": "Servers_ListByResourceGroup", + "x-ms-examples": { + "ServerListByResourceGroup": { + "$ref": "./examples/ServerListByResourceGroup.json" + } + }, + "description": "List all the servers in a given resource group.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ServerListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.DBforMySQL/servers": { + "get": { + "tags": [ + "Servers" + ], + "operationId": "Servers_List", + "x-ms-examples": { + "ServerList": { + "$ref": "./examples/ServerList.json" + } + }, + "description": "List all the servers in a given subscription.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ServerListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/restart": { + "post": { + "tags": [ + "ServerRestart" + ], + "operationId": "Servers_Restart", + "x-ms-examples": { + "ServerRestart": { + "$ref": "./examples/ServerRestart.json" + } + }, + "description": "Restarts a server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "202": { + "description": "Accepted" + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/replicas": { + "get": { + "tags": [ + "Replicas" + ], + "operationId": "Replicas_ListByServer", + "x-ms-examples": { + "ReplicasListByServer": { + "$ref": "./examples/ReplicasListByServer.json" + } + }, + "description": "List all the replicas for a given server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ServerListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/firewallRules/{firewallRuleName}": { + "put": { + "tags": [ + "FirewallRules" + ], + "operationId": "FirewallRules_CreateOrUpdate", + "x-ms-examples": { + "FirewallRuleCreate": { + "$ref": "./examples/FirewallRuleCreate.json" + } + }, + "description": "Creates a new firewall rule or updates an existing firewall rule.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/FirewallRuleNameParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/FirewallRule" + }, + "description": "The required parameters for creating or updating a firewall rule." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/FirewallRule" + } + }, + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/FirewallRule" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "tags": [ + "FirewallRules" + ], + "operationId": "FirewallRules_Delete", + "x-ms-examples": { + "FirewallRuleDelete": { + "$ref": "./examples/FirewallRuleDelete.json" + } + }, + "description": "Deletes a server firewall rule.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/FirewallRuleNameParameter" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "202": { + "description": "Accepted" + }, + "204": { + "description": "NoContent" + } + }, + "x-ms-long-running-operation": true + }, + "get": { + "tags": [ + "FirewallRules" + ], + "operationId": "FirewallRules_Get", + "x-ms-examples": { + "FirewallRuleGet": { + "$ref": "./examples/FirewallRuleGet.json" + } + }, + "description": "Gets information about a server firewall rule.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/FirewallRuleNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/FirewallRule" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/firewallRules": { + "get": { + "tags": [ + "FirewallRules" + ], + "operationId": "FirewallRules_ListByServer", + "x-ms-examples": { + "FirewallRuleList": { + "$ref": "./examples/FirewallRuleListByServer.json" + } + }, + "description": "List all the firewall rules in a given server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/FirewallRuleListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/virtualNetworkRules/{virtualNetworkRuleName}": { + "get": { + "tags": [ + "VirtualNetworkRules" + ], + "description": "Gets a virtual network rule.", + "operationId": "VirtualNetworkRules_Get", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/virtualNetworkRuleNameParameter" + } + ], + "responses": { + "200": { + "description": "Successfully retrieved a specified virtual network rule.", + "schema": { + "$ref": "#/definitions/VirtualNetworkRule" + } + }, + "default": { + "description": "*** Error Responses: ***\n\n * 404 SubscriptionDoesNotHaveServer - The requested server was not found\n\n * 404 ResourceNotFound - The requested resource was not found." + } + }, + "x-ms-examples": { + "Gets a virtual network rule": { + "$ref": "./examples/VirtualNetworkRulesGet.json" + } + } + }, + "put": { + "tags": [ + "VirtualNetworkRules" + ], + "description": "Creates or updates an existing virtual network rule.", + "operationId": "VirtualNetworkRules_CreateOrUpdate", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/virtualNetworkRuleNameParameter" + }, + { + "name": "parameters", + "in": "body", + "description": "The requested virtual Network Rule Resource state.", + "required": true, + "schema": { + "$ref": "#/definitions/VirtualNetworkRule" + } + } + ], + "responses": { + "200": { + "description": "Successfully updated a virtual network rule.", + "schema": { + "$ref": "#/definitions/VirtualNetworkRule" + } + }, + "default": { + "description": "*** Error Responses: ***\n\n * 400 InvalidResourceId - Invalid resource identifier.\n\n * 400 MismatchingSubscriptionWithUrl - The provided subscription did not match the subscription in the Url.\n\n * 400 MismatchingResourceGroupNameWithUrl - The provided resource group name did not match the name in the Url.\n\n * 400 MismatchingServerNameWithUrl - The provided server name did not match the name in the Url.\n\n * 400 NullVirtualNetworkRequest - Virtual Network Request is Null\n\n * 400 NullVirtualNetworkRequestParameters - Virtual Network Request Parameters are Null\n\n * 400 NullVirtualNetworkSubnetId - The Virtual Network Subnet Id is null\n\n * 404 SubscriptionDoesNotHaveServer - The requested server was not found\n\n * 404 VirtualNetworkRuleNotEnabled - Azure SQL Server Virtual Network Rule feature is not enabled\n\n * 404 OperationIdNotFound - The operation with Id does not exist.\n\n * 409 OperationCancelled - The operation has been cancelled by user.\n\n * 409 OperationInterrupted - The operation on the resource could not be completed because it was interrupted by another operation on the same resource.\n\n * 500 OperationTimedOut - The operation timed out and automatically rolled back. Please retry the operation." + }, + "202": { + "description": "Accepted" + }, + "201": { + "description": "Successfully created a virtual network rule.", + "schema": { + "$ref": "#/definitions/VirtualNetworkRule" + } + } + }, + "x-ms-long-running-operation": true, + "x-ms-examples": { + "Create or update a virtual network rule": { + "$ref": "./examples/VirtualNetworkRulesCreateOrUpdate.json" + } + } + }, + "delete": { + "tags": [ + "VirtualNetworkRules" + ], + "description": "Deletes the virtual network rule with the given name.", + "operationId": "VirtualNetworkRules_Delete", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/virtualNetworkRuleNameParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Successfully deleted the virtual network rule." + }, + "default": { + "description": "*** Error Responses: ***\n\n * 400 InvalidResourceId - Invalid resource identifier.\n\n * 400 MismatchingSubscriptionWithUrl - The provided subscription did not match the subscription in the Url.\n\n * 400 MismatchingResourceGroupNameWithUrl - The provided resource group name did not match the name in the Url.\n\n * 400 MismatchingServerNameWithUrl - The provided server name did not match the name in the Url.\n\n * 400 NullVirtualNetworkRequest - Virtual Network Request is Null\n\n * 400 NullVirtualNetworkRequestParameters - Virtual Network Request Parameters are Null\n\n * 404 SubscriptionDoesNotHaveServer - The requested server was not found\n\n * 404 OperationIdNotFound - The operation with Id does not exist.\n\n * 409 OperationCancelled - The operation has been cancelled by user.\n\n * 409 OperationInterrupted - The operation on the resource could not be completed because it was interrupted by another operation on the same resource.\n\n * 500 OperationTimedOut - The operation timed out and automatically rolled back. Please retry the operation." + }, + "202": { + "description": "Accepted" + }, + "204": { + "description": "The specified virtual network rule does not exist." + } + }, + "x-ms-long-running-operation": true, + "x-ms-examples": { + "Delete a virtual network rule": { + "$ref": "./examples/VirtualNetworkRulesDelete.json" + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/virtualNetworkRules": { + "get": { + "tags": [ + "VirtualNetworkRules" + ], + "description": "Gets a list of virtual network rules in a server.", + "operationId": "VirtualNetworkRules_ListByServer", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Successfully retrieved the list of virtual network rules.", + "schema": { + "$ref": "#/definitions/VirtualNetworkRuleListResult" + } + }, + "default": { + "description": "*** Error Responses: ***\n\n * 404 SubscriptionDoesNotHaveServer - The requested server was not found\n\n * 404 ResourceNotFound - The requested resource was not found." + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + }, + "x-ms-examples": { + "List virtual network rules": { + "$ref": "./examples/VirtualNetworkRulesList.json" + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/databases/{databaseName}": { + "put": { + "tags": [ + "Databases" + ], + "operationId": "Databases_CreateOrUpdate", + "x-ms-examples": { + "DatabaseCreate": { + "$ref": "./examples/DatabaseCreate.json" + } + }, + "description": "Creates a new database or updates an existing database.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/DatabaseNameParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/Database" + }, + "description": "The required parameters for creating or updating a database." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Database" + } + }, + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/Database" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "tags": [ + "Databases" + ], + "operationId": "Databases_Delete", + "x-ms-examples": { + "DatabaseDelete": { + "$ref": "./examples/DatabaseDelete.json" + } + }, + "description": "Deletes a database.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/DatabaseNameParameter" + } + ], + "responses": { + "200": { + "description": "OK" + }, + "202": { + "description": "Accepted" + }, + "204": { + "description": "NoContent" + } + }, + "x-ms-long-running-operation": true + }, + "get": { + "tags": [ + "Databases" + ], + "operationId": "Databases_Get", + "x-ms-examples": { + "DatabaseGet": { + "$ref": "./examples/DatabaseGet.json" + } + }, + "description": "Gets information about a database.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/DatabaseNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Database" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/databases": { + "get": { + "tags": [ + "Databases" + ], + "operationId": "Databases_ListByServer", + "x-ms-examples": { + "DatabaseList": { + "$ref": "./examples/DatabaseListByServer.json" + } + }, + "description": "List all the databases in a given server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/DatabaseListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/configurations/{configurationName}": { + "put": { + "tags": [ + "Configurations" + ], + "operationId": "Configurations_CreateOrUpdate", + "x-ms-examples": { + "ConfigurationCreateOrUpdate": { + "$ref": "./examples/ConfigurationCreateOrUpdate.json" + } + }, + "description": "Updates a configuration of a server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/ConfigurationNameParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/Configuration" + }, + "description": "The required parameters for updating a server configuration." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Configuration" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true + }, + "get": { + "tags": [ + "Configurations" + ], + "operationId": "Configurations_Get", + "x-ms-examples": { + "ConfigurationGet": { + "$ref": "./examples/ConfigurationGet.json" + } + }, + "description": "Gets information about a configuration of server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "$ref": "#/parameters/ConfigurationNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/Configuration" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/configurations": { + "get": { + "tags": [ + "Configurations" + ], + "operationId": "Configurations_ListByServer", + "x-ms-examples": { + "ConfigurationList": { + "$ref": "./examples/ConfigurationListByServer.json" + } + }, + "description": "List all the configurations in a given server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ConfigurationListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/logFiles": { + "get": { + "tags": [ + "LogFiles" + ], + "operationId": "LogFiles_ListByServer", + "x-ms-examples": { + "LogFileList": { + "$ref": "./examples/LogFileListByServer.json" + } + }, + "description": "List all the log files in a given server.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/LogFileListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/Administrators/activeDirectory": { + "get": { + "tags": [ + "ServerAdministrators" + ], + "operationId": "ServerAdministrators_Get", + "x-ms-examples": { + "ServerAdministratorGet": { + "$ref": "./examples/ServerAdminGet.json" + } + }, + "description": "Gets information about a AAD server administrator.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ServerAdministratorResource" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "put": { + "tags": [ + "ServerAdministrators" + ], + "operationId": "ServerAdministrators_CreateOrUpdate", + "x-ms-examples": { + "ServerAdministratorCreate": { + "$ref": "./examples/ServerAdminCreateUpdate.json" + } + }, + "description": "Creates or update active directory administrator on an existing server. The update action will overwrite the existing administrator.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + }, + { + "name": "properties", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ServerAdministratorResource" + }, + "description": "The required parameters for creating or updating an AAD server administrator." + } + ], + "responses": { + "200": { + "description": "Successfully updated the active directory administrator", + "schema": { + "$ref": "#/definitions/ServerAdministratorResource" + } + }, + "202": { + "description": "Operation in progress", + "schema": { + "$ref": "#/definitions/ServerAdministratorResource" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "tags": [ + "ServerAdministrators" + ], + "operationId": "ServerAdministrators_Delete", + "x-ms-examples": { + "ServerAdministratorsDelete": { + "$ref": "./examples/ServerAdminDelete.json" + } + }, + "description": "Deletes server active directory administrator.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "Successfully deleted the active directory administrator" + }, + "202": { + "description": "Operation in progress" + }, + "204": { + "description": "The specified Server active directory administrator does not exist" + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/administrators": { + "get": { + "tags": [ + "ServerAdministrators" + ], + "operationId": "ServerAdministrators_List", + "description": "Returns a list of server Administrators.", + "x-ms-examples": { + "get a list of server administrators": { + "$ref": "./examples/ServerAdminList.json" + } + }, + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ServerNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ServerAdministratorResourceListResult" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.DBforMySQL/locations/{locationName}/performanceTiers": { + "get": { + "tags": [ + "LocationBasedPerformanceTier" + ], + "operationId": "LocationBasedPerformanceTier_List", + "x-ms-examples": { + "PerformanceTiersList": { + "$ref": "./examples/PerformanceTiersListByLocation.json" + } + }, + "description": "List all the performance tiers at specified location in a given subscription.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/LocationNameParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/PerformanceTierListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": null + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.DBforMySQL/checkNameAvailability": { + "post": { + "tags": [ + "CheckNameAvailability" + ], + "operationId": "CheckNameAvailability_Execute", + "x-ms-examples": { + "NameAvailability": { + "$ref": "./examples/CheckNameAvailability.json" + } + }, + "description": "Check the availability of name for resource", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/SubscriptionIdParameter" + }, + { + "name": "nameAvailabilityRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/NameAvailabilityRequest" + }, + "description": "The required parameters for checking if resource name is available." + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/NameAvailability" + } + } + } + } + }, + "/providers/Microsoft.DBforMySQL/operations": { + "get": { + "tags": [ + "Operations" + ], + "operationId": "Operations_List", + "x-ms-examples": { + "OperationList": { + "$ref": "./examples/OperationList.json" + } + }, + "description": "Lists all of the available REST API operations.", + "parameters": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/OperationListResult" + } + } + } + } + } + }, + "definitions": { + "ServerVersion": { + "type": "string", + "description": "The version of a server.", + "enum": [ + "5.6", + "5.7", + "8.0" + ], + "x-ms-enum": { + "name": "ServerVersion", + "modelAsString": false + } + }, + "SslEnforcement": { + "type": "string", + "description": "Enable ssl enforcement or not when connect to server.", + "enum": [ + "Enabled", + "Disabled" + ], + "x-ms-enum": { + "name": "SslEnforcementEnum", + "modelAsString": false + } + }, + "MinimalTlsVersion": { + "type": "string", + "description": "Enforce a minimal Tls version for the server.", + "enum": [ + "TLS1_0", + "TLS1_1", + "TLS1_2", + "TLSEnforcementDisabled" + ], + "x-ms-enum": { + "name": "MinimalTlsVersionEnum", + "modelAsString": false + } + }, + "InfrastructureEncryption": { + "type": "string", + "description": "Add a second layer of encryption for your data using new encryption algorithm which gives additional data protection. Value is optional but if passed in, must be 'Disabled' or 'Enabled'.", + "enum": [ + "Enabled", + "Disabled" + ], + "x-ms-enum": { + "name": "InfrastructureEncryption", + "modelAsString": false, + "values": [ + { + "value": "Enabled", + "description": "Default value for single layer of encryption for data at rest." + }, + { + "value": "Disabled", + "description": "Additional (2nd) layer of encryption for data at rest" + } + ] + } + }, + "PublicNetworkAccess": { + "type": "string", + "description": "Whether or not public network access is allowed for this server. Value is optional but if passed in, must be 'Enabled' or 'Disabled'", + "enum": [ + "Enabled", + "Disabled" + ], + "x-ms-enum": { + "name": "PublicNetworkAccessEnum", + "modelAsString": false + } + }, + "ServerPrivateEndpointConnection": { + "description": "A private endpoint connection under a server", + "type": "object", + "properties": { + "id": { + "description": "Resource Id of the private endpoint connection.", + "type": "string", + "readOnly": true + }, + "properties": { + "$ref": "#/definitions/ServerPrivateEndpointConnectionProperties", + "description": "Private endpoint connection properties", + "readOnly": true + } + } + }, + "ServerPrivateEndpointConnectionProperties": { + "description": "Properties of a private endpoint connection.", + "type": "object", + "properties": { + "privateEndpoint": { + "$ref": "#/definitions/PrivateEndpointProperty", + "description": "Private endpoint which the connection belongs to." + }, + "privateLinkServiceConnectionState": { + "$ref": "#/definitions/ServerPrivateLinkServiceConnectionStateProperty", + "description": "Connection state of the private endpoint connection." + }, + "provisioningState": { + "description": "State of the private endpoint connection.", + "enum": [ + "Approving", + "Ready", + "Dropping", + "Failed", + "Rejecting" + ], + "type": "string", + "readOnly": true, + "x-ms-enum": { + "name": "PrivateEndpointProvisioningState", + "modelAsString": false + } + } + } + }, + "PrivateEndpointProperty": { + "type": "object", + "properties": { + "id": { + "description": "Resource id of the private endpoint.", + "type": "string" + } + }, + "x-ms-azure-resource": true + }, + "ServerPrivateLinkServiceConnectionStateProperty": { + "required": [ + "status", + "description" + ], + "type": "object", + "properties": { + "status": { + "description": "The private link service connection status.", + "enum": [ + "Approved", + "Pending", + "Rejected", + "Disconnected" + ], + "type": "string", + "x-ms-enum": { + "name": "PrivateLinkServiceConnectionStateStatus", + "modelAsString": false + } + }, + "description": { + "description": "The private link service connection description.", + "type": "string" + }, + "actionsRequired": { + "description": "The actions required for private link service connection.", + "enum": [ + "None", + "app" + ], + "type": "string", + "readOnly": true, + "x-ms-enum": { + "name": "PrivateLinkServiceConnectionStateActionsRequire", + "modelAsString": false + } + } + } + }, + "TrackedResource": { + "description": "Resource properties including location and tags for track resources.", + "properties": { + "location": { + "type": "string", + "description": "The location the resource resides in." + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Application-specific metadata in the form of key-value pairs." + } + }, + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "required": [ + "location" + ] + }, + "ServerProperties": { + "properties": { + "administratorLogin": { + "type": "string", + "description": "The administrator's login name of a server. Can only be specified when the server is being created (and is required for creation)." + }, + "version": { + "$ref": "#/definitions/ServerVersion", + "description": "Server version." + }, + "sslEnforcement": { + "$ref": "#/definitions/SslEnforcement", + "description": "Enable ssl enforcement or not when connect to server." + }, + "minimalTlsVersion": { + "$ref": "#/definitions/MinimalTlsVersion", + "description": "Enforce a minimal Tls version for the server." + }, + "byokEnforcement": { + "type": "string", + "description": "Status showing whether the server data encryption is enabled with customer-managed keys.", + "readOnly": true + }, + "infrastructureEncryption": { + "$ref": "#/definitions/InfrastructureEncryption", + "description": "Status showing whether the server enabled infrastructure encryption." + }, + "userVisibleState": { + "type": "string", + "description": "A state of a server that is visible to user.", + "enum": [ + "Ready", + "Dropping", + "Disabled", + "Inaccessible" + ], + "x-ms-enum": { + "name": "ServerState", + "modelAsString": false + } + }, + "fullyQualifiedDomainName": { + "type": "string", + "description": "The fully qualified domain name of a server." + }, + "earliestRestoreDate": { + "type": "string", + "format": "date-time", + "description": "Earliest restore point creation time (ISO8601 format)" + }, + "storageProfile": { + "$ref": "#/definitions/StorageProfile", + "description": "Storage profile of a server." + }, + "replicationRole": { + "type": "string", + "description": "The replication role of the server." + }, + "masterServerId": { + "type": "string", + "description": "The master server id of a replica server." + }, + "replicaCapacity": { + "type": "integer", + "format": "int32", + "minimum": 0, + "description": "The maximum number of replicas that a master server can have." + }, + "publicNetworkAccess": { + "$ref": "#/definitions/PublicNetworkAccess", + "description": "Whether or not public network access is allowed for this server. Value is optional but if passed in, must be 'Enabled' or 'Disabled'" + }, + "privateEndpointConnections": { + "description": "List of private endpoint connections on a server", + "type": "array", + "items": { + "$ref": "#/definitions/ServerPrivateEndpointConnection" + }, + "readOnly": true + } + }, + "description": "The properties of a server." + }, + "StorageProfile": { + "properties": { + "backupRetentionDays": { + "type": "integer", + "description": "Backup retention days for the server." + }, + "geoRedundantBackup": { + "type": "string", + "description": "Enable Geo-redundant or not for server backup.", + "enum": [ + "Enabled", + "Disabled" + ], + "x-ms-enum": { + "name": "GeoRedundantBackup", + "modelAsString": false + } + }, + "storageMB": { + "type": "integer", + "format": "int32", + "description": "Max storage allowed for a server." + }, + "storageAutogrow": { + "type": "string", + "description": "Enable Storage Auto Grow.", + "enum": [ + "Enabled", + "Disabled" + ], + "x-ms-enum": { + "name": "StorageAutogrow", + "modelAsString": false + } + } + }, + "description": "Storage Profile properties of a server" + }, + "ServerPropertiesForCreate": { + "discriminator": "createMode", + "required": [ + "createMode" + ], + "properties": { + "version": { + "$ref": "#/definitions/ServerVersion", + "description": "Server version." + }, + "sslEnforcement": { + "$ref": "#/definitions/SslEnforcement", + "description": "Enable ssl enforcement or not when connect to server." + }, + "minimalTlsVersion": { + "$ref": "#/definitions/MinimalTlsVersion", + "description": "Enforce a minimal Tls version for the server." + }, + "infrastructureEncryption": { + "$ref": "#/definitions/InfrastructureEncryption", + "description": "Status showing whether the server enabled infrastructure encryption." + }, + "publicNetworkAccess": { + "$ref": "#/definitions/PublicNetworkAccess", + "description": "Whether or not public network access is allowed for this server. Value is optional but if passed in, must be 'Enabled' or 'Disabled'" + }, + "storageProfile": { + "$ref": "#/definitions/StorageProfile", + "description": "Storage profile of a server." + }, + "createMode": { + "type": "string", + "description": "The mode to create a new server.", + "enum": [ + "Default", + "PointInTimeRestore", + "GeoRestore", + "Replica" + ], + "x-ms-enum": { + "name": "CreateMode", + "modelAsString": false + } + } + }, + "description": "The properties used to create a new server." + }, + "ServerPropertiesForDefaultCreate": { + "x-ms-discriminator-value": "Default", + "allOf": [ + { + "$ref": "#/definitions/ServerPropertiesForCreate" + } + ], + "properties": { + "administratorLogin": { + "type": "string", + "description": "The administrator's login name of a server. Can only be specified when the server is being created (and is required for creation)." + }, + "administratorLoginPassword": { + "type": "string", + "format": "password", + "description": "The password of the administrator login." + } + }, + "required": [ + "administratorLogin", + "administratorLoginPassword" + ], + "description": "The properties used to create a new server." + }, + "ServerPropertiesForRestore": { + "x-ms-discriminator-value": "PointInTimeRestore", + "allOf": [ + { + "$ref": "#/definitions/ServerPropertiesForCreate" + } + ], + "properties": { + "sourceServerId": { + "type": "string", + "description": "The source server id to restore from." + }, + "restorePointInTime": { + "type": "string", + "format": "date-time", + "description": "Restore point creation time (ISO8601 format), specifying the time to restore from." + } + }, + "required": [ + "sourceServerId", + "restorePointInTime" + ], + "description": "The properties used to create a new server by restoring from a backup." + }, + "ServerPropertiesForGeoRestore": { + "x-ms-discriminator-value": "GeoRestore", + "allOf": [ + { + "$ref": "#/definitions/ServerPropertiesForCreate" + } + ], + "properties": { + "sourceServerId": { + "type": "string", + "description": "The source server id to restore from." + } + }, + "required": [ + "sourceServerId" + ], + "description": "The properties used to create a new server by restoring to a different region from a geo replicated backup." + }, + "ServerPropertiesForReplica": { + "x-ms-discriminator-value": "Replica", + "allOf": [ + { + "$ref": "#/definitions/ServerPropertiesForCreate" + } + ], + "properties": { + "sourceServerId": { + "type": "string", + "description": "The master server id to create replica from." + } + }, + "required": [ + "sourceServerId" + ], + "description": "The properties to create a new replica." + }, + "Sku": { + "properties": { + "name": { + "type": "string", + "description": "The name of the sku, typically, tier + family + cores, e.g. B_Gen4_1, GP_Gen5_8." + }, + "tier": { + "type": "string", + "description": "The tier of the particular SKU, e.g. Basic.", + "enum": [ + "Basic", + "GeneralPurpose", + "MemoryOptimized" + ], + "x-ms-enum": { + "name": "SkuTier", + "modelAsString": false + } + }, + "capacity": { + "type": "integer", + "format": "int32", + "minimum": 0, + "description": "The scale up/out capacity, representing server's compute units." + }, + "size": { + "type": "string", + "description": "The size code, to be interpreted by resource as appropriate." + }, + "family": { + "type": "string", + "description": "The family of hardware." + } + }, + "description": "Billing information related properties of a server." + }, + "ResourceIdentity": { + "description": "Azure Active Directory identity configuration for a resource.", + "type": "object", + "properties": { + "principalId": { + "format": "uuid", + "description": "The Azure Active Directory principal id.", + "type": "string", + "readOnly": true + }, + "type": { + "description": "The identity type. Set this to 'SystemAssigned' in order to automatically create and assign an Azure Active Directory principal for the resource.", + "enum": [ + "SystemAssigned", + "app" + ], + "type": "string", + "x-ms-enum": { + "name": "IdentityType", + "modelAsString": false + } + }, + "tenantId": { + "format": "uuid", + "description": "The Azure Active Directory tenant id.", + "type": "string", + "readOnly": true + } + } + }, + "Server": { + "properties": { + "identity": { + "$ref": "#/definitions/ResourceIdentity", + "description": "The Azure Active Directory identity of the server." + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The SKU (pricing tier) of the server." + }, + "properties": { + "$ref": "#/definitions/ServerProperties", + "x-ms-client-flatten": true, + "description": "Properties of the server." + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "Represents a server." + }, + "ServerForCreate": { + "properties": { + "identity": { + "$ref": "#/definitions/ResourceIdentity", + "description": "The Azure Active Directory identity of the server." + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The SKU (pricing tier) of the server." + }, + "properties": { + "$ref": "#/definitions/ServerPropertiesForCreate", + "x-ms-client-flatten": false, + "description": "Properties of the server." + }, + "location": { + "type": "string", + "description": "The location the resource resides in." + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Application-specific metadata in the form of key-value pairs." + } + }, + "required": [ + "properties", + "location" + ], + "description": "Represents a server to be created." + }, + "ServerUpdateParameters": { + "properties": { + "identity": { + "$ref": "#/definitions/ResourceIdentity", + "description": "The Azure Active Directory identity of the server." + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The SKU (pricing tier) of the server." + }, + "properties": { + "properties": { + "storageProfile": { + "$ref": "#/definitions/StorageProfile", + "description": "Storage profile of a server." + }, + "administratorLoginPassword": { + "type": "string", + "format": "password", + "description": "The password of the administrator login." + }, + "version": { + "$ref": "#/definitions/ServerVersion", + "description": "The version of a server." + }, + "sslEnforcement": { + "$ref": "#/definitions/SslEnforcement", + "description": "Enable ssl enforcement or not when connect to server." + }, + "minimalTlsVersion": { + "$ref": "#/definitions/MinimalTlsVersion", + "description": "Enforce a minimal Tls version for the server." + }, + "publicNetworkAccess": { + "$ref": "#/definitions/PublicNetworkAccess", + "description": "Whether or not public network access is allowed for this server. Value is optional but if passed in, must be 'Enabled' or 'Disabled'" + }, + "replicationRole": { + "type": "string", + "description": "The replication role of the server." + } + }, + "x-ms-client-flatten": true, + "description": "The properties that can be updated for a server." + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Application-specific metadata in the form of key-value pairs." + } + }, + "description": "Parameters allowed to update for a server." + }, + "ServerListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Server" + }, + "description": "The list of servers" + } + }, + "description": "A list of servers." + }, + "FirewallRuleProperties": { + "properties": { + "startIpAddress": { + "type": "string", + "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + "description": "The start IP address of the server firewall rule. Must be IPv4 format." + }, + "endIpAddress": { + "type": "string", + "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + "description": "The end IP address of the server firewall rule. Must be IPv4 format." + } + }, + "required": [ + "startIpAddress", + "endIpAddress" + ], + "description": "The properties of a server firewall rule." + }, + "FirewallRule": { + "properties": { + "properties": { + "$ref": "#/definitions/FirewallRuleProperties", + "x-ms-client-flatten": true, + "description": "The properties of a firewall rule." + } + }, + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "required": [ + "properties" + ], + "description": "Represents a server firewall rule." + }, + "FirewallRuleListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/FirewallRule" + }, + "description": "The list of firewall rules in a server." + } + }, + "description": "A list of firewall rules." + }, + "VirtualNetworkRuleProperties": { + "description": "Properties of a virtual network rule.", + "required": [ + "virtualNetworkSubnetId" + ], + "type": "object", + "properties": { + "virtualNetworkSubnetId": { + "description": "The ARM resource id of the virtual network subnet.", + "type": "string" + }, + "ignoreMissingVnetServiceEndpoint": { + "description": "Create firewall rule before the virtual network has vnet service endpoint enabled.", + "type": "boolean" + }, + "state": { + "description": "Virtual Network Rule State", + "enum": [ + "Initializing", + "InProgress", + "Ready", + "Deleting", + "Unknown" + ], + "type": "string", + "readOnly": true, + "x-ms-enum": { + "name": "VirtualNetworkRuleState", + "modelAsString": false + } + } + } + }, + "VirtualNetworkRule": { + "description": "A virtual network rule.", + "type": "object", + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "properties": { + "properties": { + "$ref": "#/definitions/VirtualNetworkRuleProperties", + "description": "Resource properties.", + "x-ms-client-flatten": true + } + } + }, + "VirtualNetworkRuleListResult": { + "description": "A list of virtual network rules.", + "type": "object", + "properties": { + "value": { + "description": "Array of results.", + "type": "array", + "items": { + "$ref": "#/definitions/VirtualNetworkRule" + }, + "readOnly": true + }, + "nextLink": { + "description": "Link to retrieve next page of results.", + "type": "string", + "readOnly": true + } + } + }, + "DatabaseProperties": { + "properties": { + "charset": { + "type": "string", + "description": "The charset of the database." + }, + "collation": { + "type": "string", + "description": "The collation of the database." + } + }, + "description": "The properties of a database." + }, + "Database": { + "properties": { + "properties": { + "$ref": "#/definitions/DatabaseProperties", + "x-ms-client-flatten": true, + "description": "The properties of a database." + } + }, + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "description": "Represents a Database." + }, + "DatabaseListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Database" + }, + "description": "The list of databases housed in a server" + } + }, + "description": "A List of databases." + }, + "ConfigurationProperties": { + "properties": { + "value": { + "type": "string", + "description": "Value of the configuration." + }, + "description": { + "type": "string", + "readOnly": true, + "description": "Description of the configuration." + }, + "defaultValue": { + "type": "string", + "readOnly": true, + "description": "Default value of the configuration." + }, + "dataType": { + "type": "string", + "readOnly": true, + "description": "Data type of the configuration." + }, + "allowedValues": { + "type": "string", + "readOnly": true, + "description": "Allowed values of the configuration." + }, + "source": { + "type": "string", + "description": "Source of the configuration." + } + }, + "description": "The properties of a configuration." + }, + "Configuration": { + "properties": { + "properties": { + "$ref": "#/definitions/ConfigurationProperties", + "x-ms-client-flatten": true, + "description": "The properties of a configuration." + } + }, + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "description": "Represents a Configuration." + }, + "ConfigurationListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Configuration" + }, + "description": "The list of server configurations." + } + }, + "description": "A list of server configurations." + }, + "OperationDisplay": { + "properties": { + "provider": { + "type": "string", + "readOnly": true, + "description": "Operation resource provider name." + }, + "resource": { + "type": "string", + "readOnly": true, + "description": "Resource on which the operation is performed." + }, + "operation": { + "type": "string", + "readOnly": true, + "description": "Localized friendly name for the operation." + }, + "description": { + "type": "string", + "readOnly": true, + "description": "Operation description." + } + }, + "description": "Display metadata associated with the operation." + }, + "Operation": { + "properties": { + "name": { + "type": "string", + "readOnly": true, + "description": "The name of the operation being performed on this particular object." + }, + "display": { + "$ref": "#/definitions/OperationDisplay", + "readOnly": true, + "description": "The localized display information for this particular operation or action." + }, + "origin": { + "type": "string", + "readOnly": true, + "description": "The intended executor of the operation.", + "enum": [ + "NotSpecified", + "user", + "system" + ], + "x-ms-enum": { + "name": "OperationOrigin", + "modelAsString": false + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "type": "object" + }, + "readOnly": true, + "x-ms-client-flatten": false, + "description": "Additional descriptions for the operation." + } + }, + "description": "REST API operation definition." + }, + "OperationListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Operation" + }, + "description": "The list of resource provider operations." + } + }, + "description": "A list of resource provider operations." + }, + "LogFileProperties": { + "properties": { + "sizeInKB": { + "type": "integer", + "format": "int64", + "description": "Size of the log file." + }, + "createdTime": { + "type": "string", + "readOnly": true, + "format": "date-time", + "description": "Creation timestamp of the log file." + }, + "lastModifiedTime": { + "type": "string", + "readOnly": true, + "format": "date-time", + "description": "Last modified timestamp of the log file." + }, + "type": { + "type": "string", + "description": "Type of the log file." + }, + "url": { + "type": "string", + "description": "The url to download the log file from." + } + }, + "description": "The properties of a log file." + }, + "LogFile": { + "properties": { + "name": { + "type": "string", + "description": "The name of the log file." + }, + "properties": { + "$ref": "#/definitions/LogFileProperties", + "x-ms-client-flatten": true, + "description": "The properties of the log file." + } + }, + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ], + "description": "Represents a log file." + }, + "LogFileListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/LogFile" + }, + "description": "The list of log files." + } + }, + "description": "A list of log files." + }, + "PerformanceTierServiceLevelObjectives": { + "properties": { + "id": { + "type": "string", + "description": "ID for the service level objective." + }, + "edition": { + "type": "string", + "description": "Edition of the performance tier." + }, + "vCore": { + "type": "integer", + "description": "vCore associated with the service level objective" + }, + "hardwareGeneration": { + "type": "string", + "description": "Hardware generation associated with the service level objective" + }, + "maxBackupRetentionDays": { + "type": "integer", + "description": "Maximum Backup retention in days for the performance tier edition" + }, + "minBackupRetentionDays": { + "type": "integer", + "description": "Minimum Backup retention in days for the performance tier edition" + }, + "maxStorageMB": { + "type": "integer", + "format": "int32", + "description": "Max storage allowed for a server." + }, + "minStorageMB": { + "type": "integer", + "format": "int32", + "description": "Max storage allowed for a server." + } + }, + "description": "Service level objectives for performance tier." + }, + "PerformanceTierProperties": { + "properties": { + "id": { + "type": "string", + "description": "ID of the performance tier." + }, + "serviceLevelObjectives": { + "type": "array", + "items": { + "$ref": "#/definitions/PerformanceTierServiceLevelObjectives" + }, + "description": "Service level objectives associated with the performance tier" + } + }, + "description": "Performance tier properties" + }, + "PerformanceTierListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/PerformanceTierProperties" + }, + "description": "The list of performance tiers" + } + }, + "description": "A list of performance tiers." + }, + "NameAvailabilityRequest": { + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "Resource name to verify." + }, + "type": { + "type": "string", + "description": "Resource type used for verification." + } + }, + "description": "Request from client to check resource name availability." + }, + "NameAvailability": { + "properties": { + "message": { + "type": "string", + "description": "Error Message." + }, + "nameAvailable": { + "type": "boolean", + "description": "Indicates whether the resource name is available." + }, + "reason": { + "type": "string", + "description": "Reason for name being unavailable." + } + }, + "description": "Represents a resource name availability." + }, + "CloudError": { + "x-ms-external": true, + "properties": { + "error": { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ErrorResponse" + } + }, + "description": "An error response from the Batch service." + }, + "ServerAdministratorProperties": { + "properties": { + "administratorType": { + "type": "string", + "description": "The type of administrator.", + "enum": [ + "ActiveDirectory", + "app" + ], + "x-ms-enum": { + "name": "AdministratorType" + } + }, + "login": { + "type": "string", + "description": "The server administrator login account name." + }, + "sid": { + "type": "string", + "description": "The server administrator Sid (Secure ID).", + "format": "uuid" + }, + "tenantId": { + "type": "string", + "description": "The server Active Directory Administrator tenant id.", + "format": "uuid" + } + }, + "required": [ + "tenantId", + "administratorType", + "login", + "sid" + ], + "description": "The properties of an server Administrator." + }, + "ServerAdministratorResource": { + "properties": { + "properties": { + "$ref": "#/definitions/ServerAdministratorProperties", + "x-ms-client-flatten": true, + "description": "Properties of the server AAD administrator." + } + }, + "description": "Represents a and external administrator to be created.", + "allOf": [ + { + "$ref": "./common-types/resource-management/v1/types.json#/definitions/ProxyResource" + } + ] + }, + "ServerAdministratorResourceListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/ServerAdministratorResource" + }, + "description": "The list of server Active Directory Administrators for the server." + } + }, + "description": "The response to a list Active Directory Administrators request." + } + }, + "parameters": { + "ServerNameParameter": { + "name": "serverName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the server.", + "x-ms-parameter-location": "method" + }, + "FirewallRuleNameParameter": { + "name": "firewallRuleName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the server firewall rule.", + "x-ms-parameter-location": "method" + }, + "virtualNetworkRuleNameParameter": { + "name": "virtualNetworkRuleName", + "in": "path", + "description": "The name of the virtual network rule.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "DatabaseNameParameter": { + "name": "databaseName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the database.", + "x-ms-parameter-location": "method" + }, + "ConfigurationNameParameter": { + "name": "configurationName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the server configuration.", + "x-ms-parameter-location": "method" + }, + "LocationNameParameter": { + "name": "locationName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the location.", + "x-ms-parameter-location": "method" + } + } +} \ No newline at end of file diff --git a/tests-upgrade/mysql/readme.md b/tests-upgrade/mysql/readme.md new file mode 100644 index 00000000000..59a155a136f --- /dev/null +++ b/tests-upgrade/mysql/readme.md @@ -0,0 +1,139 @@ + +# Az.MySql +This directory contains the PowerShell module for the MySql service. + +--- +## Status +[![Az.MySql](https://img.shields.io/powershellgallery/v/Az.MySql.svg?style=flat-square&label=Az.MySql "Az.MySql")](https://www.powershellgallery.com/packages/Az.MySql/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.MySql`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g autorest@beta`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - ./mysql.json +module-version: 0.1.0 +title: MySQL +subject-prefix: 'MySQL' + +directive: + - from: mysql.json + where: $.definitions.VirtualNetworkRule + transform: $['required'] = ['properties'] + - where: + verb: Set + subject: Configuration$|FirewallRule$|VirtualNetworkRule$ + set: + verb: Update + - where: + verb: ^New$|^Set$|^Remove$|^Get|^Update$|^Invoke$ + subject: Database$|SecurityAlertPolicy$|Administrator$|LocationBasedPerformanceTier$|LogFile$|ExecuteCheckNameAvailability$ + hide: true + - where: + verb: New$|Update$ + subject: Server$ + hide: true + - where: + verb: New$ + variant: ^Create$ + hide: true + - where: + verb: New$ + variant: ^CreateViaIdentity + hide: true + - where: + verb: New$|Update$ + variant: ^(?!.*?Expanded) + hide: true + - where: + verb: New + subject: Configuration + hide: true + - where: + parameter-name: VirtualNetworkSubnetId + subject: VirtualNetworkRule + set: + parameter-name: SubnetId + - where: + model-name: Server + set: + format-table: + properties: + - Name + - Location + - AdministratorLogin + - Version + - StorageProfileStorageMb + - SkuName + - SkuSize + - SkuTier + - SslEnforcement + - where: + model-name: Configuration + set: + format-table: + properties: + - Name + - Value + - where: + model-name: FirewallRule + set: + format-table: + properties: + - Name + - StartIPAddress + - EndIPAddress + - where: + parameter-name: StorageProfileBackupRetentionDay + subject: Server + set: + parameter-description: Backup retention days for the server. Day count is between 7 and 35. + - from: source-file-csharp + where: $ + transform: $ = $.replace(/OperationOrigin System/, 'OperationOrigin System1'); + - from: source-file-csharp + where: $ + transform: $ = $.replace('internal Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.Api20171201.IServerPropertiesForCreate Property', 'public Microsoft.Azure.PowerShell.Cmdlets.MySql.Models.Api20171201.IServerPropertiesForCreate Property'); + - from: source-file-csharp + where: $ + transform: $ = $.replace('public int StorageProfileBackupRetentionDay', '[System.Management.Automation.ValidateRangeAttribute(7,35)]\n public int StorageProfileBackupRetentionDay'); +``` diff --git a/tests-upgrade/mysql/resources/readme.md b/tests-upgrade/mysql/resources/readme.md new file mode 100644 index 00000000000..937f07f8fec --- /dev/null +++ b/tests-upgrade/mysql/resources/readme.md @@ -0,0 +1,11 @@ +# Resources +This directory can contain any additional resources for module that are not required at runtime. This directory **does not** get packaged with the module. If you have assets for custom implementation, place them into the `..\custom` folder. + +## Info +- Modifiable: yes +- Generated: no +- Committed: yes +- Packaged: no + +## Purpose +Use this folder to put anything you want to keep around as part of the repository for the module, but is not something that is required for the module. For example, development files, packaged builds, or additional information. This is only intended to be used in repositories where the module's output directory is cleaned, but tangential resources for the module want to remain intact. \ No newline at end of file diff --git a/tests-upgrade/mysql/test/loadEnv.ps1 b/tests-upgrade/mysql/test/loadEnv.ps1 new file mode 100644 index 00000000000..c4ebf2e8310 --- /dev/null +++ b/tests-upgrade/mysql/test/loadEnv.ps1 @@ -0,0 +1,28 @@ +# ---------------------------------------------------------------------------------- +# +# Copyright Microsoft Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ---------------------------------------------------------------------------------- +$envFile = 'env.json' +if ($TestMode -eq 'live') { + $envFile = 'localEnv.json' +} + +if (Test-Path -Path (Join-Path $PSScriptRoot $envFile)) { + $envFilePath = Join-Path $PSScriptRoot $envFile +} else { + $envFilePath = Join-Path $PSScriptRoot '..\$envFile' +} +$env = @{} +if (Test-Path -Path $envFilePath) { + $env = Get-Content (Join-Path $PSScriptRoot $envFile) | ConvertFrom-Json + $PSDefaultParameterValues=@{"*:SubscriptionId"=$env.SubscriptionId; "*:Tenant"=$env.Tenant} +} \ No newline at end of file diff --git a/tests-upgrade/mysql/test/readme.md b/tests-upgrade/mysql/test/readme.md new file mode 100644 index 00000000000..7c752b4c8c4 --- /dev/null +++ b/tests-upgrade/mysql/test/readme.md @@ -0,0 +1,17 @@ +# Test +This directory contains the [Pester](https://www.powershellgallery.com/packages/Pester) tests to run for the module. We use Pester as it is the unofficial standard for PowerShell unit testing. Test stubs for custom cmdlets (created in `..\custom`) will be generated into this folder when `build-module.ps1` is ran. These test stubs will fail automatically, to indicate that tests should be written for custom cmdlets. + +## Info +- Modifiable: yes +- Generated: partial +- Committed: yes +- Packaged: no + +## Details +We allow three testing modes: *live*, *record*, and *playback*. These can be selected using the `-Live`, `-Record`, and `-Playback` switches respectively on the `test-module.ps1` script. This script will run through any `.Tests.ps1` scripts in the `test` folder. If you choose the *record* mode, it will create a `.Recording.json` file of the REST calls between the client and server. Then, when you choose *playback* mode, it will use the `.Recording.json` file to mock the communication between server and client. The *live* mode runs the same as the *record* mode; however, it doesn't create the `.Recording.json` file. + +## Purpose +Custom cmdlets generally encompass additional functionality not described in the REST specification, or combines functionality generated from the REST spec. To validate this functionality continues to operate as intended, creating tests that can be ran and re-ran against custom cmdlets is part of the framework. + +## Usage +To execute tests, run the `test-module.ps1`. To write tests, [this example](https://github.com/pester/Pester/blob/8b9cf4248315e44f1ac6673be149f7e0d7f10466/Examples/Planets/Get-Planet.Tests.ps1#L1) from the Pester repository is very useful for getting started. \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/.gitattributes b/tests-upgrade/mysql/tools/Resources/.gitattributes new file mode 100644 index 00000000000..2125666142e --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/.gitattributes @@ -0,0 +1 @@ +* text=auto \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/.gitignore b/tests-upgrade/mysql/tools/Resources/.gitignore new file mode 100644 index 00000000000..649721c69ce --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/.gitignore @@ -0,0 +1,14 @@ +bin +obj +.vs +generated +internal +exports +custom/*.psm1 +test/*-TestResults.xml +/*.ps1 +/*.ps1xml +/*.psm1 +/*.snk +/*.csproj +/*.nuspec \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/custom/New-AzDeployment.ps1 b/tests-upgrade/mysql/tools/Resources/custom/New-AzDeployment.ps1 new file mode 100644 index 00000000000..4ece0d887e4 --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/custom/New-AzDeployment.ps1 @@ -0,0 +1,231 @@ +function New-AzDeployment { + [OutputType('Microsoft.Azure.PowerShell.Cmdlets.Resources.Models.Api20180501.IDeploymentExtended')] + [CmdletBinding(DefaultParameterSetName='CreateWithTemplateFileParameterFile', PositionalBinding=$false, SupportsShouldProcess, ConfirmImpact='Medium')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Description('You can provide the template and parameters directly in the request or link to JSON files.')] + param( + [Parameter(HelpMessage='The name of the deployment. If not provided, the name of the template file will be used. If a template file is not used, a random GUID will be used for the name.')] + [Alias('DeploymentName')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Path')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='deploymentName', Required, PossibleTypes=([System.String]), Description='The name of the deployment.')] + [System.String] + # The name of the deployment. If not provided, the name of the template file will be used. If a template file is not used, a random GUID will be used for the name. + ${Name}, + + [Parameter(Mandatory, HelpMessage='The ID of the target subscription.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Path')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='subscriptionId', Required, PossibleTypes=([System.String]), Description='The ID of the target subscription.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.DefaultInfo(Script='(Get-AzContext).Subscription.Id')] + [System.String] + # The ID of the target subscription. + ${SubscriptionId}, + + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterFile', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterJson', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterObject', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterFile', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterJson', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterObject', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterFile', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterJson', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterObject', Mandatory, HelpMessage='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Path')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='resourceGroupName', Required, PossibleTypes=([System.String]), Description='The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist.')] + [System.String] + # The name of the resource group to deploy the resources to. The name is case insensitive. The resource group must already exist. + ${ResourceGroupName}, + + [Parameter(ParameterSetName='CreateWithTemplateFileParameterFile', Mandatory, HelpMessage='Local path to the JSON template file.')] + [Parameter(ParameterSetName='CreateWithTemplateFileParameterJson', Mandatory, HelpMessage='Local path to the JSON template file.')] + [Parameter(ParameterSetName='CreateWithTemplateFileParameterObject', Mandatory, HelpMessage='Local path to the JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterFile', Mandatory, HelpMessage='Local path to the JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterJson', Mandatory, HelpMessage='Local path to the JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterObject', Mandatory, HelpMessage='Local path to the JSON template file.')] + [System.String] + # Local path to the JSON template file. + ${TemplateFile}, + + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterFile', Mandatory, HelpMessage='The string representation of the JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterJson', Mandatory, HelpMessage='The string representation of the JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterObject', Mandatory, HelpMessage='The string representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterFile', Mandatory, HelpMessage='The string representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterJson', Mandatory, HelpMessage='The string representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterObject', Mandatory, HelpMessage='The string representation of the JSON template.')] + [System.String] + # The string representation of the JSON template. + ${TemplateJson}, + + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterFile', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterFile', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterJson', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterJson', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterObject', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterObject', Mandatory, HelpMessage='The hashtable representation of the JSON template.')] + [System.Collections.Hashtable] + # The hashtable representation of the JSON template. + ${TemplateObject}, + + [Parameter(ParameterSetName='CreateWithTemplateFileParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterFile', Mandatory, HelpMessage='Local path to the parameter JSON template file.')] + [System.String] + # Local path to the parameter JSON template file. + ${TemplateParameterFile}, + + [Parameter(ParameterSetName='CreateWithTemplateFileParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterJson', Mandatory, HelpMessage='The string representation of the parameter JSON template.')] + [System.String] + # The string representation of the parameter JSON template. + ${TemplateParameterJson}, + + [Parameter(ParameterSetName='CreateWithTemplateFileParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateFileParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateJsonParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateJsonParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateWithTemplateObjectParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [Parameter(ParameterSetName='CreateRGWithTemplateObjectParameterObject', Mandatory, HelpMessage='The hashtable representation of the parameter JSON template.')] + [System.Collections.Hashtable] + # The hashtable representation of the parameter JSON template. + ${TemplateParameterObject}, + + [Parameter(Mandatory, HelpMessage='The mode that is used to deploy resources. This value can be either Incremental or Complete. In Incremental mode, resources are deployed without deleting existing resources that are not included in the template. In Complete mode, resources are deployed and existing resources in the resource group that are not included in the template are deleted. Be careful when using Complete mode as you may unintentionally delete resources.')] + [ArgumentCompleter([Microsoft.Azure.PowerShell.Cmdlets.Resources.Support.DeploymentMode])] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Body')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='mode', Required, PossibleTypes=([Microsoft.Azure.PowerShell.Cmdlets.Resources.Support.DeploymentMode]), Description='The mode that is used to deploy resources. This value can be either Incremental or Complete. In Incremental mode, resources are deployed without deleting existing resources that are not included in the template. In Complete mode, resources are deployed and existing resources in the resource group that are not included in the template are deleted. Be careful when using Complete mode as you may unintentionally delete resources.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Support.DeploymentMode] + # The mode that is used to deploy resources. This value can be either Incremental or Complete. In Incremental mode, resources are deployed without deleting existing resources that are not included in the template. In Complete mode, resources are deployed and existing resources in the resource group that are not included in the template are deleted. Be careful when using Complete mode as you may unintentionally delete resources. + ${Mode}, + + [Parameter(HelpMessage='Specifies the type of information to log for debugging. The permitted values are none, requestContent, responseContent, or both requestContent and responseContent separated by a comma. The default is none. When setting this value, carefully consider the type of information you are passing in during deployment. By logging information about the request or response, you could potentially expose sensitive data that is retrieved through the deployment operations.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Body')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='detailLevel', PossibleTypes=([System.String]), Description='Specifies the type of information to log for debugging. The permitted values are none, requestContent, responseContent, or both requestContent and responseContent separated by a comma. The default is none. When setting this value, carefully consider the type of information you are passing in during deployment. By logging information about the request or response, you could potentially expose sensitive data that is retrieved through the deployment operations.')] + [System.String] + # Specifies the type of information to log for debugging. The permitted values are none, requestContent, responseContent, or both requestContent and responseContent separated by a comma. The default is none. When setting this value, carefully consider the type of information you are passing in during deployment. By logging information about the request or response, you could potentially expose sensitive data that is retrieved through the deployment operations. + ${DeploymentDebugLogLevel}, + + [Parameter(HelpMessage='The location to store the deployment data.')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Body')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Runtime.Info(SerializedName='location', PossibleTypes=([System.String]), Description='The location to store the deployment data.')] + [System.String] + # The location to store the deployment data. + ${Location}, + + [Parameter(HelpMessage='The credentials, account, tenant, and subscription used for communication with Azure.')] + [Alias('AzureRMContext', 'AzureCredential')] + [ValidateNotNull()] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Azure')] + [System.Management.Automation.PSObject] + # The credentials, account, tenant, and subscription used for communication with Azure. + ${DefaultProfile}, + + [Parameter(HelpMessage='Run the command as a job')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Runtime')] + [System.Management.Automation.SwitchParameter] + # Run the command as a job + ${AsJob}, + + [Parameter(HelpMessage='Run the command asynchronously')] + [Microsoft.Azure.PowerShell.Cmdlets.Resources.Category('Runtime')] + [System.Management.Automation.SwitchParameter] + # Run the command asynchronously + ${NoWait} + + ) + + process { + if ($PSBoundParameters.ContainsKey("TemplateFile")) + { + if (!(Test-Path -Path $TemplateFile)) + { + throw "Unable to find template file '$TemplateFile'." + } + + if (!$PSBoundParameters.ContainsKey("Name")) + { + $DeploymentName = (Get-Item -Path $TemplateFile).BaseName + $null = $PSBoundParameters.Add("Name", $DeploymentName) + } + + $TemplateJson = [System.IO.File]::ReadAllText($TemplateFile) + $null = $PSBoundParameters.Add("Template", $TemplateJson) + $null = $PSBoundParameters.Remove("TemplateFile") + } + elseif ($PSBoundParameters.ContainsKey("TemplateJson")) + { + $null = $PSBoundParameters.Add("Template", $TemplateJson) + $null = $PSBoundParameters.Remove("TemplateJson") + } + elseif ($PSBoundParameters.ContainsKey("TemplateObject")) + { + $TemplateJson = ConvertTo-Json -InputObject $TemplateObject + $null = $PSBoundParameters.Add("Template", $TemplateJson) + $null = $PSBoundParameters.Remove("TemplateObject") + } + + if ($PSBoundParameters.ContainsKey("TemplateParameterFile")) + { + if (!(Test-Path -Path $TemplateParameterFile)) + { + throw "Unable to find template parameter file '$TemplateParameterFile'." + } + + $ParameterJson = [System.IO.File]::ReadAllText($TemplateParameterFile) + $ParameterObject = ConvertFrom-Json -InputObject $ParameterJson + $ParameterHashtable = @{} + $ParameterObject.PSObject.Properties | ForEach-Object { $ParameterHashtable[$_.Name] = $_.Value } + $ParameterHashtable.Remove("`$schema") + $ParameterHashtable.Remove("contentVersion") + $NestedValues = $ParameterHashtable.parameters + if ($null -ne $NestedValues) + { + $ParameterHashtable.Remove("parameters") + $NestedValues.PSObject.Properties | ForEach-Object { $ParameterHashtable[$_.Name] = $_.Value } + } + + $ParameterJson = ConvertTo-Json -InputObject $ParameterHashtable + $null = $PSBoundParameters.Add("DeploymentPropertyParameter", $ParameterJson) + $null = $PSBoundParameters.Remove("TemplateParameterFile") + } + elseif ($PSBoundParameters.ContainsKey("TemplateParameterJson")) + { + $null = $PSBoundParameters.Add("DeploymentPropertyParameter", $TemplateParameterJson) + $null = $PSBoundParameters.Remove("TemplateParameterJson") + } + elseif ($PSBoundParameters.ContainsKey("TemplateParameterObject")) + { + $TemplateParameterObject.Remove("`$schema") + $TemplateParameterObject.Remove("contentVersion") + $NestedValues = $TemplateParameterObject.parameters + if ($null -ne $NestedValues) + { + $TemplateParameterObject.Remove("parameters") + $NestedValues.PSObject.Properties | ForEach-Object { $TemplateParameterObject[$_.Name] = $_.Value } + } + + $TemplateParameterJson = ConvertTo-Json -InputObject $TemplateParameterObject + $null = $PSBoundParameters.Add("DeploymentPropertyParameter", $TemplateParameterJson) + $null = $PSBoundParameters.Remove("TemplateParameterObject") + } + + if (!$PSBoundParameters.ContainsKey("Name")) + { + $DeploymentName = (New-Guid).Guid + $null = $PSBoundParameters.Add("Name", $DeploymentName) + } + + if ($PSBoundParameters.ContainsKey("ResourceGroupName")) + { + Az.Resources.TestSupport.private\New-AzDeployment_CreateExpanded @PSBoundParameters + } + else + { + Az.Resources.TestSupport.private\New-AzDeployment_CreateExpanded @PSBoundParameters + } + } +} \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/docs/readme.md b/tests-upgrade/mysql/tools/Resources/docs/readme.md new file mode 100644 index 00000000000..95fb0e21daf --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/docs/readme.md @@ -0,0 +1,11 @@ +# Docs +This directory contains the documentation of the cmdlets for the `Az.Resources` module. To run documentation generation, use the `generate-help.ps1` script at the root module folder. Files in this folder will *always be overriden on regeneration*. To update documentation examples, please use the `..\examples` folder. + +## Info +- Modifiable: no +- Generated: all +- Committed: yes +- Packaged: yes + +## Details +The process of documentation generation loads `Az.Resources` and analyzes the exported cmdlets from the module. It recognizes the [help comments](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_comment_based_help) that are generated into the scripts in the `..\exports` folder. Additionally, when writing custom cmdlets in the `..\custom` folder, you can use the help comments syntax, which decorate the exported scripts at build-time. The documentation examples are taken from the `..\examples` folder. \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/examples/readme.md b/tests-upgrade/mysql/tools/Resources/examples/readme.md new file mode 100644 index 00000000000..ac871d71fc7 --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/examples/readme.md @@ -0,0 +1,11 @@ +# Examples +This directory contains examples from the exported cmdlets of the module. When `build-module.ps1` is ran, example stub files will be generated here. If your module support Azure Profiles, the example stubs will be in individual profile folders. These example stubs should be updated to show how the cmdlet is used. The examples are imported into the documentation when `generate-help.ps1` is ran. + +## Info +- Modifiable: yes +- Generated: partial +- Committed: yes +- Packaged: no + +## Purpose +This separates the example documentation details from the generated documentation information provided directly from the generated cmdlets. Since the cmdlets don't have examples from the REST spec, this provides a means to add examples easily. The example stubs provide the markdown format that is required. The 3 core elements are: the name of the example, the code information of the example, and the description of the example. That information, if the markdown format is followed, will be available to documentation generation and be part of the documents in the `..\docs` folder. \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/how-to.md b/tests-upgrade/mysql/tools/Resources/how-to.md new file mode 100644 index 00000000000..c4daf2b254c --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/how-to.md @@ -0,0 +1,58 @@ +# How-To +This document describes how to develop for `Az.Resources`. + +## Building `Az.Resources` +To build, run the `build-module.ps1` at the root of the module directory. This will generate the proxy script cmdlets that are the cmdlets being exported by this module. After the build completes, the proxy script cmdlets will be output to the `exports` folder. To read more about the proxy script cmdlets, look at the [readme.md](exports/readme.md) in the `exports` folder. + +## Creating custom cmdlets +To add cmdlets that were not generated by the REST specification, use the `custom` folder. This folder allows you to add handwritten `.ps1` and `.cs` files. Currently, we support using `.ps1` scripts as new cmdlets or as additional low-level variants (via `ParameterSet`), and `.cs` files as low-level (variants) cmdlets that the exported script cmdlets call. We do not support exporting any `.cs` (dll) cmdlets directly. To read more about custom cmdlets, look at the [readme.md](custom/readme.md) in the `custom` folder. + +## Generating documentation +To generate documentation, the process is now integrated into the `build-module.ps1` script. If you don't want to run this process as part of `build-module.ps1`, you can provide the `-NoDocs` switch. If you want to run documentation generation after the build process, you may still run the `generate-help.ps1` script. Overall, the process will look at the documentation comments in the generated and custom cmdlets and types, and create `.md` files into the `docs` folder. Additionally, this pulls in any examples from the `examples` folder and adds them to the generated help markdown documents. To read more about examples, look at the [readme.md](examples/readme.md) in the `examples` folder. To read more about documentation, look at the [readme.md](docs/readme.md) in the `docs` folder. + +## Testing `Az.Resources` +To test the cmdlets, we use [Pester](https://github.com/pester/Pester). Tests scripts (`.ps1`) should be added to the `test` folder. To execute the Pester tests, run the `test-module.ps1` script. This will run all tests in `playback` mode within the `test` folder. To read more about testing cmdlets, look at the [readme.md](examples/readme.md) in the `examples` folder. + +## Packing `Az.Resources` +To pack `Az.Resources` for distribution, run the `pack-module.ps1` script. This will take the contents of multiple directories and certain root-folder files to create a `.nupkg`. The structure of the `.nupkg` is created so it can be loaded part of a [PSRepository](https://docs.microsoft.com/en-us/powershell/module/powershellget/register-psrepository). Additionally, this package is in a format for distribution to the [PSGallery](https://www.powershellgallery.com/). For signing an Azure module, please contact the [Azure PowerShell](https://github.com/Azure/azure-powershell) team. + +## Module Script Details +There are multiple scripts created for performing different actions for developing `Az.Resources`. +- `build-module.ps1` + - Builds the module DLL (`./bin/Az.Resources.private.dll`), creates the exported cmdlets and documentation, generates custom cmdlet test stubs and exported cmdlet example stubs, and updates `./Az.Resources.psd1` with Azure profile information. + - **Parameters**: [`Switch` parameters] + - `-Run`: After building, creates an isolated PowerShell session and loads `Az.Resources`. + - `-Test`: After building, runs the `Pester` tests defined in the `test` folder. + - `-Docs`: After building, generates the Markdown documents for the modules into the `docs` folder. + - `-Pack`: After building, packages the module into a `.nupkg`. + - `-Code`: After building, opens a VSCode window with the module's directory and runs (see `-Run`) the module. + - `-Release`: Builds the module in `Release` configuration (as opposed to `Debug` configuration). + - `-NoDocs`: Supresses writing the documentation markdown files as part of the cmdlet exporting process. + - `-Debugger`: Used when attaching the debugger in Visual Studio to the PowerShell session, and running the build process without recompiling the DLL. This suppresses running the script as an isolated process. +- `run-module.ps1` + - Creates an isolated PowerShell session and loads `Az.Resources` into the session. + - Same as `-Run` in `build-module.ps1`. + - **Parameters**: [`Switch` parameters] + - `-Code`: Opens a VSCode window with the module's directory. + - Same as `-Code` in `build-module.ps1`. +- `generate-help.ps1` + - Generates the Markdown documents for the modules into the `docs` folder. + - Same as `-Docs` in `build-module.ps1`. +- `test-module.ps1` + - Runs the `Pester` tests defined in the `test` folder. + - Same as `-Test` in `build-module.ps1`. +- `pack-module.ps1` + - Packages the module into a `.nupkg` for distribution. + - Same as `-Pack` in `build-module.ps1`. +- `generate-help.ps1` + - Generates the Markdown documents for the modules into the `docs` folder. + - Same as `-Docs` in `build-module.ps1`. + - This process is now integrated into `build-module.ps1` automatically. To disable, use `-NoDocs` when running `build-module.ps1`. +- `export-surface.ps1` + - Generates Markdown documents for both the cmdlet surface and the model (class) surface of the module. + - These files are placed into the `resources` folder. + - Used for investigating the surface of your module. These are *not* documentation for distribution. +- `check-dependencies.ps1` + - Used in `run-module.ps1` and `test-module.ps1` to verify dependent modules are available to run those tasks. + - It will download local (within the module's directory structure) versions of those modules as needed. + - This script *does not* need to be ran by-hand. \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/license.txt b/tests-upgrade/mysql/tools/Resources/license.txt new file mode 100644 index 00000000000..3d3f8f90d5d --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/license.txt @@ -0,0 +1,203 @@ +MICROSOFT SOFTWARE LICENSE TERMS + +MICROSOFT AZURE POWERSHELL + +These license terms are an agreement between Microsoft Corporation (or based on where you live, one of its affiliates) and you. Please read them. They apply to the software named above, which includes the media on which you received it, if any. + +BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. + + +-----------------START OF LICENSE-------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +-------------------END OF LICENSE------------------------------------------ + + +----------------START OF THIRD PARTY NOTICE-------------------------------- + +The software includes Newtonsoft.Json. The MIT License set out below is provided for informational purposes only. It is not the license that governs any part of the software. + +Newtonsoft.Json + +The MIT License (MIT) +Copyright (c) 2007 James Newton-King +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------END OF THIRD PARTY NOTICE---------------------------------------- + diff --git a/tests-upgrade/mysql/tools/Resources/readme.md b/tests-upgrade/mysql/tools/Resources/readme.md new file mode 100644 index 00000000000..aa61ef906c4 --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/readme.md @@ -0,0 +1,440 @@ + +# Az.Resources.TestSupport +This directory contains the PowerShell module for the Resources service. + +--- +## Status +[![Az.Resources.TestSupport](https://img.shields.io/powershellgallery/v/Az.Resources.TestSupport.svg?style=flat-square&label=Az.Resources.TestSupport "Az.Resources.TestSupport")](https://www.powershellgallery.com/packages/Az.Resources.TestSupport/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.Resources.TestSupport`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g @autorest/autorest`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +> Values +``` yaml +azure: true +powershell: true +branch: master +repo: https://github.com/Azure/azure-rest-api-specs/blob/master +metadata: + authors: Microsoft Corporation + owners: Microsoft Corporation + copyright: Microsoft Corporation. All rights reserved. + companyName: Microsoft Corporation + requireLicenseAcceptance: true + licenseUri: https://aka.ms/azps-license + projectUri: https://github.com/Azure/azure-powershell +``` + +> Names +``` yaml +prefix: Az +``` + +> Folders +``` yaml +clear-output-folder: true +``` + +``` yaml +input-file: + - https://github.com/Azure/azure-rest-api-specs/blob/master/specification/resources/resource-manager/Microsoft.Resources/stable/2018-05-01/resources.json +module-name: Az.Resources.TestSupport +namespace: Microsoft.Azure.PowerShell.Cmdlets.Resources + +subject-prefix: '' +module-version: 0.0.1 +title: Resources + +directive: + - where: + subject: Operation + hide: true + - where: + parameter-name: SubscriptionId + set: + default: + script: '(Get-AzContext).Subscription.Id' + - from: swagger-document + where: $..parameters[?(@.name=='$filter')] + transform: $['x-ms-skip-url-encoding'] = true + - from: swagger-document + where: $..[?( /Resources_(CreateOrUpdate|Update|Delete|Get|GetById|CheckExistence|CheckExistenceById)/g.exec(@.operationId))] + transform: "$.parameters = $.parameters.map( each => { each.name = each.name === 'api-version' ? 'explicit-api-version' : each.name; return each; } );" + - from: source-file-csharp + where: $ + transform: $ = $.replace(/explicit-api-version/g, 'api-version'); + - where: + parameter-name: ExplicitApiVersion + set: + parameter-name: ApiVersion + - from: source-file-csharp + where: $ + transform: > + $ = $.replace(/result.OdataNextLink/g,'nextLink' ); + return $.replace( /(^\s*)(if\s*\(\s*nextLink\s*!=\s*null\s*\))/gm, '$1var nextLink = Module.Instance.FixNextLink(responseMessage, result.OdataNextLink);\n$1$2' ); + - from: swagger-document + where: + - $..DeploymentProperties.properties.template + - $..DeploymentProperties.properties.parameters + - $..ResourceGroupExportResult.properties.template + - $..PolicyDefinitionProperties.properties.policyRule + transform: $.additionalProperties = true; + - where: + verb: Set + subject: Resource + remove: true + - where: + verb: Set + subject: Deployment + remove: true + - where: + subject: Resource + parameter-name: GroupName + set: + parameter-name: ResourceGroupName + clear-alias: true + - where: + subject: Resource + parameter-name: Id + set: + parameter-name: ResourceId + clear-alias: true + - where: + subject: Resource + parameter-name: Type + set: + parameter-name: ResourceType + clear-alias: true + - where: + subject: Appliance* + remove: true + - where: + verb: Test + subject: CheckNameAvailability + set: + subject: NameAvailability + - where: + verb: Export + subject: ResourceGroupTemplate + set: + subject: ResourceGroup + alias: Export-AzResourceGroupTemplate + - where: + parameter-name: Filter + set: + alias: ODataQuery + - where: + verb: Test + subject: ResourceGroupExistence + set: + subject: ResourceGroup + alias: Test-AzResourceGroupExistence + - where: + verb: Export + subject: DeploymentTemplate + set: + alias: [Save-AzDeploymentTemplate, Save-AzResourceGroupDeploymentTemplate] + - where: + subject: Deployment + set: + alias: ${verb}-AzResourceGroupDeployment + - where: + verb: Get + subject: DeploymentOperation + set: + alias: Get-AzResourceGroupDeploymentOperation + - where: + verb: New + subject: Deployment + variant: Create.*Expanded.* + parameter-name: Parameter + set: + parameter-name: DeploymentPropertyParameter + - where: + verb: New + subject: Deployment + hide: true + - where: + verb: Test + subject: Deployment + variant: Validate.*Expanded.* + parameter-name: Parameter + set: + parameter-name: DeploymentPropertyParameter + - where: + verb: New + subject: Deployment + parameter-name: DebugSettingDetailLevel + set: + parameter-name: DeploymentDebugLogLevel + - where: + subject: Provider + set: + subject: ResourceProvider + - where: + subject: ProviderFeature|ResourceProvider|ResourceLock + parameter-name: ResourceProviderNamespace + set: + alias: ProviderNamespace + - where: + verb: Update + subject: ResourceGroup + parameter-name: Name + clear-alias: true + - where: + parameter-name: UpnOrObjectId + set: + alias: ['UserPrincipalName', 'Upn', 'ObjectId'] + - where: + subject: Deployment + variant: (.*)Expanded(.*) + parameter-name: Parameter + set: + parameter-name: DeploymentParameter + # Format output + - where: + model-name: GenericResource + set: + format-table: + properties: + - Name + - ResourceGroupName + - Type + - Location + labels: + Type: ResourceType + - where: + model-name: ResourceGroup + set: + format-table: + properties: + - Name + - Location + - ProvisioningState + - where: + model-name: DeploymentExtended + set: + format-table: + properties: + - Name + - ProvisioningState + - Timestamp + - Mode + - where: + model-name: PolicyAssignment + set: + format-table: + properties: + - Name + - DisplayName + - Id + - where: + model-name: PolicyDefinition + set: + format-table: + properties: + - Name + - DisplayName + - Id + - where: + model-name: PolicySetDefinition + set: + format-table: + properties: + - Name + - DisplayName + - Id + - where: + model-name: Provider + set: + format-table: + properties: + - Namespace + - RegistrationState + - where: + model-name: ProviderResourceType + set: + format-table: + properties: + - ResourceType + - Location + - ApiVersion + - where: + model-name: FeatureResult + set: + format-table: + properties: + - Name + - State + - where: + model-name: TagDetails + set: + format-table: + properties: + - TagName + - CountValue + - where: + model-name: Application + set: + format-table: + properties: + - DisplayName + - ObjectId + - AppId + - Homepage + - AvailableToOtherTenant + - where: + model-name: KeyCredential + set: + format-table: + properties: + - StartDate + - EndDate + - KeyId + - Type + - where: + model-name: PasswordCredential + set: + format-table: + properties: + - StartDate + - EndDate + - KeyId + - where: + model-name: User + set: + format-table: + properties: + - PrincipalName + - DisplayName + - ObjectId + - Type + - where: + model-name: AdGroup + set: + format-table: + properties: + - DisplayName + - Mail + - ObjectId + - SecurityEnabled + - where: + model-name: ServicePrincipal + set: + format-table: + properties: + - DisplayName + - ObjectId + - AppDisplayName + - AppId + - where: + model-name: Location + set: + format-table: + properties: + - Name + - DisplayName + - where: + model-name: ManagementLockObject + set: + format-table: + properties: + - Name + - Level + - ResourceId + - where: + model-name: RoleAssignment + set: + format-table: + properties: + - DisplayName + - ObjectId + - ObjectType + - RoleDefinitionName + - Scope + - where: + model-name: RoleDefinition + set: + format-table: + properties: + - RoleName + - Name + - Action +# To remove cmdlets not used in the test frame + - where: + subject: Operation + remove: true + - where: + subject: Deployment + variant: (.*)1|Cancel(.*)|Validate(.*)|Export(.*)|List(.*)|Delete(.*)|Check(.*)|Calculate(.*) + remove: true + - where: + subject: ResourceProvider + variant: Register(.*)|Unregister(.*)|Get(.*) + remove: true + - where: + subject: ResourceGroup + variant: List(.*)|Update(.*)|Export(.*)|Move(.*) + remove: true + - where: + subject: Resource + remove: true + - where: + subject: Tag|TagValue + remove: true + - where: + subject: DeploymentOperation + remove: true + - where: + subject: DeploymentTemplate + remove: true + - where: + subject: Calculate(.*) + remove: true + - where: + subject: ResourceExistence + remove: true + - where: + subject: ResourceMoveResource + remove: true + - where: + subject: DeploymentExistence + remove: true +``` diff --git a/tests-upgrade/mysql/tools/Resources/resources/CmdletSurface-latest-2019-04-30.md b/tests-upgrade/mysql/tools/Resources/resources/CmdletSurface-latest-2019-04-30.md new file mode 100644 index 00000000000..278ea694e0f --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/resources/CmdletSurface-latest-2019-04-30.md @@ -0,0 +1,598 @@ +### AzADApplication [Get, New, Remove, Update] `IApplication, Boolean` + - TenantId `String` + - ObjectId `String` + - IncludeDeleted `SwitchParameter` + - InputObject `IResourcesIdentity` + - HardDelete `SwitchParameter` + - Filter `String` + - IdentifierUri `String` + - DisplayNameStartWith `String` + - DisplayName `String` + - ApplicationId `String` + - AllowGuestsSignIn `SwitchParameter` + - AllowPassthroughUser `SwitchParameter` + - AppLogoUrl `String` + - AppPermission `String[]` + - AppRole `IAppRole[]` + - AvailableToOtherTenants `SwitchParameter` + - ErrorUrl `String` + - GroupMembershipClaim `GroupMembershipClaimTypes` + - Homepage `String` + - InformationalUrlMarketing `String` + - InformationalUrlPrivacy `String` + - InformationalUrlSupport `String` + - InformationalUrlTermsOfService `String` + - IsDeviceOnlyAuthSupported `SwitchParameter` + - KeyCredentials `IKeyCredential[]` + - KnownClientApplication `String[]` + - LogoutUrl `String` + - Oauth2AllowImplicitFlow `SwitchParameter` + - Oauth2AllowUrlPathMatching `SwitchParameter` + - Oauth2Permission `IOAuth2Permission[]` + - Oauth2RequirePostResponse `SwitchParameter` + - OptionalClaimAccessToken `IOptionalClaim[]` + - OptionalClaimIdToken `IOptionalClaim[]` + - OptionalClaimSamlToken `IOptionalClaim[]` + - OrgRestriction `String[]` + - PasswordCredentials `IPasswordCredential[]` + - PreAuthorizedApplication `IPreAuthorizedApplication[]` + - PublicClient `SwitchParameter` + - PublisherDomain `String` + - ReplyUrl `String[]` + - RequiredResourceAccess `IRequiredResourceAccess[]` + - SamlMetadataUrl `String` + - SignInAudience `String` + - WwwHomepage `String` + - Parameter `IApplicationCreateParameters` + - PassThru `SwitchParameter` + - AvailableToOtherTenant `SwitchParameter` + +### AzADApplicationOwner [Add, Get, Remove] `Boolean, IDirectoryObject` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + - OwnerObjectId `String` + - AdditionalProperties `Hashtable` + - Url `String` + - Parameter `IAddOwnerParameters` + +### AzADDeletedApplication [Restore] `IApplication` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + +### AzADGroup [Get, New, Remove] `IAdGroup, Boolean` + - TenantId `String` + - ObjectId `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - DisplayNameStartsWith `String` + - DisplayName `String` + - AdditionalProperties `Hashtable` + - MailNickname `String` + - Parameter `IGroupCreateParameters` + - PassThru `SwitchParameter` + +### AzADGroupMember [Add, Get, Remove, Test] `Boolean, IDirectoryObject, SwitchParameter` + - GroupObjectId `String` + - TenantId `String` + - MemberObjectId `String[]` + - MemberUserPrincipalName `String[]` + - GroupObject `IAdGroup` + - GroupDisplayName `String` + - InputObject `IResourcesIdentity` + - ObjectId `String` + - ShowOwner `SwitchParameter` + - PassThru `SwitchParameter` + - AdditionalProperties `Hashtable` + - Url `String` + - Parameter `IGroupAddMemberParameters` + - DisplayName `String` + - GroupId `String` + - MemberId `String` + +### AzADGroupMemberGroup [Get] `String` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + - AdditionalProperties `Hashtable` + - SecurityEnabledOnly `SwitchParameter` + - Parameter `IGroupGetMemberGroupsParameters` + +### AzADGroupOwner [Add, Remove] `Boolean` + - ObjectId `String` + - TenantId `String` + - GroupObjectId `String` + - MemberObjectId `String[]` + - InputObject `IResourcesIdentity` + - OwnerObjectId `String` + - PassThru `SwitchParameter` + - AdditionalProperties `Hashtable` + - Url `String` + - Parameter `IAddOwnerParameters` + +### AzADObject [Get] `IDirectoryObject` + - TenantId `String` + - InputObject `IResourcesIdentity` + - AdditionalProperties `Hashtable` + - IncludeDirectoryObjectReference `SwitchParameter` + - ObjectId `String[]` + - Type `String[]` + - Parameter `IGetObjectsParameters` + +### AzADServicePrincipal [Get, New, Remove, Update] `IServicePrincipal, Boolean` + - TenantId `String` + - ObjectId `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - ApplicationObject `IApplication` + - ServicePrincipalName `String` + - DisplayNameBeginsWith `String` + - DisplayName `String` + - ApplicationId `String` + - AccountEnabled `SwitchParameter` + - AppId `String` + - AppRoleAssignmentRequired `SwitchParameter` + - KeyCredentials `IKeyCredential[]` + - PasswordCredentials `IPasswordCredential[]` + - ServicePrincipalType `String` + - Tag `String[]` + - Parameter `IServicePrincipalCreateParameters` + - PassThru `SwitchParameter` + +### AzADServicePrincipalOwner [Get] `IDirectoryObject` + - ObjectId `String` + - TenantId `String` + +### AzADUser [Get, New, Remove, Update] `IUser, Boolean` + - TenantId `String` + - UpnOrObjectId `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - DisplayName `String` + - StartsWith `String` + - Mail `String` + - MailNickname `String` + - Parameter `IUserCreateParameters` + - AccountEnabled `SwitchParameter` + - GivenName `String` + - ImmutableId `String` + - PasswordProfile `IPasswordProfile` + - Surname `String` + - UsageLocation `String` + - UserPrincipalName `String` + - UserType `UserType` + - PassThru `SwitchParameter` + - EnableAccount `SwitchParameter` + +### AzADUserMemberGroup [Get] `String` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + - AdditionalProperties `Hashtable` + - SecurityEnabledOnly `SwitchParameter` + - Parameter `IUserGetMemberGroupsParameters` + +### AzApplicationKeyCredentials [Get, Update] `IKeyCredential, Boolean` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + - Parameter `IKeyCredentialsUpdateParameters` + - Value `IKeyCredential[]` + +### AzApplicationPasswordCredentials [Get, Update] `IPasswordCredential, Boolean` + - ObjectId `String` + - TenantId `String` + - InputObject `IResourcesIdentity` + - Parameter `IPasswordCredentialsUpdateParameters` + - Value `IPasswordCredential[]` + +### AzAuthorizationOperation [Get] `IOperation` + +### AzClassicAdministrator [Get] `IClassicAdministrator` + - SubscriptionId `String[]` + +### AzDenyAssignment [Get] `IDenyAssignment` + - Id `String` + - Scope `String` + - InputObject `IResourcesIdentity` + - ParentResourcePath `String` + - ResourceGroupName `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + - SubscriptionId `String[]` + - Filter `String` + +### AzDeployment [Get, New, Remove, Set, Stop, Test] `IDeploymentExtended, Boolean, IDeploymentValidateResult` + - SubscriptionId `String[]` + - Name `String` + - ResourceGroupName `String` + - Id `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - Top `Int32` + - Parameter `IDeployment` + - DebugSettingDetailLevel `String` + - Location `String` + - Mode `DeploymentMode` + - OnErrorDeploymentName `String` + - OnErrorDeploymentType `OnErrorDeploymentType` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Template `IDeploymentPropertiesTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + - PassThru `SwitchParameter` + +### AzDeploymentExistence [Test] `Boolean` + - DeploymentName `String` + - SubscriptionId `String` + - ResourceGroupName `String` + - InputObject `IResourcesIdentity` + +### AzDeploymentOperation [Get] `IDeploymentOperation` + - DeploymentName `String` + - SubscriptionId `String[]` + - ResourceGroupName `String` + - OperationId `String` + - DeploymentObject `IDeploymentExtended` + - InputObject `IResourcesIdentity` + - Top `Int32` + +### AzDeploymentTemplate [Export] `IDeploymentExportResultTemplate` + - DeploymentName `String` + - SubscriptionId `String` + - ResourceGroupName `String` + - InputObject `IResourcesIdentity` + +### AzDomain [Get] `IDomain` + - TenantId `String` + - Name `String` + - InputObject `IResourcesIdentity` + - Filter `String` + +### AzElevateGlobalAdministratorAccess [Invoke] `Boolean` + +### AzEntity [Get] `IEntityInfo` + - Filter `String` + - GroupName `String` + - Search `String` + - Select `String` + - Skip `Int32` + - Skiptoken `String` + - Top `Int32` + - View `String` + - CacheControl `String` + +### AzManagedApplication [Get, New, Remove, Set, Update] `IApplication, Boolean` + - Id `String` + - Name `String` + - ResourceGroupName `String` + - SubscriptionId `String[]` + - InputObject `IResourcesIdentity` + - Parameter `IApplication` + - ApplicationDefinitionId `String` + - IdentityType `ResourceIdentityType` + - Kind `String` + - Location `String` + - ManagedBy `String` + - ManagedResourceGroupId `String` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - SkuCapacity `Int32` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `Hashtable` + +### AzManagedApplicationDefinition [Get, New, Remove, Set] `IApplicationDefinition, Boolean` + - Id `String` + - Name `String` + - ResourceGroupName `String` + - SubscriptionId `String[]` + - InputObject `IResourcesIdentity` + - Parameter `IApplicationDefinition` + - Artifact `IApplicationArtifact[]` + - Authorization `IApplicationProviderAuthorization[]` + - CreateUiDefinition `IApplicationDefinitionPropertiesCreateUiDefinition` + - Description `String` + - DisplayName `String` + - IdentityType `ResourceIdentityType` + - IsEnabled `String` + - Location `String` + - LockLevel `ApplicationLockLevel` + - MainTemplate `IApplicationDefinitionPropertiesMainTemplate` + - ManagedBy `String` + - PackageFileUri `String` + - SkuCapacity `Int32` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `Hashtable` + +### AzManagementGroup [Get, New, Remove, Set, Update] `IManagementGroup, IManagementGroupInfo, Boolean` + - GroupId `String` + - InputObject `IResourcesIdentity` + - Skiptoken `String` + - Expand `String` + - Filter `String` + - Recurse `SwitchParameter` + - CacheControl `String` + - DisplayName `String` + - Name `String` + - ParentId `String` + - CreateManagementGroupRequest `ICreateManagementGroupRequest` + - PatchGroupRequest `IPatchManagementGroupRequest` + +### AzManagementGroupDescendant [Get] `IDescendantInfo` + - GroupId `String` + - InputObject `IResourcesIdentity` + - Skiptoken `String` + - Top `Int32` + +### AzManagementGroupSubscription [New, Remove] `Boolean` + - GroupId `String` + - SubscriptionId `String` + - InputObject `IResourcesIdentity` + - CacheControl `String` + +### AzManagementLock [Get, New, Remove, Set] `IManagementLockObject, Boolean` + - SubscriptionId `String[]` + - LockName `String` + - ResourceGroupName `String` + - ParentResourcePath `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + - Scope `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - Level `LockLevel` + - Note `String` + - Owner `IManagementLockOwner[]` + - Parameter `IManagementLockObject` + +### AzNameAvailability [Test] `ICheckNameAvailabilityResult` + - Name `String` + - Type `Type` + - CheckNameAvailabilityRequest `ICheckNameAvailabilityRequest` + +### AzOAuth2PermissionGrant [Get, New, Remove] `IOAuth2PermissionGrant, Boolean` + - TenantId `String` + - InputObject `IResourcesIdentity` + - Filter `String` + - ClientId `String` + - ConsentType `ConsentType` + - ExpiryTime `String` + - ObjectId `String` + - OdataType `String` + - PrincipalId `String` + - ResourceId `String` + - Scope `String` + - StartTime `String` + - Body `IOAuth2PermissionGrant` + +### AzPermission [Get] `IPermission` + - ResourceGroupName `String` + - SubscriptionId `String[]` + - ParentResourcePath `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + +### AzPolicyAssignment [Get, New, Remove] `IPolicyAssignment` + - Id `String` + - Name `String` + - Scope `String` + - InputObject `IResourcesIdentity` + - ParentResourcePath `String` + - ResourceGroupName `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + - SubscriptionId `String[]` + - PolicyDefinitionId `String` + - IncludeDescendent `SwitchParameter` + - Filter `String` + - Parameter `IPolicyAssignment` + - Description `String` + - DisplayName `String` + - IdentityType `ResourceIdentityType` + - Location `String` + - Metadata `IPolicyAssignmentPropertiesMetadata` + - NotScope `String[]` + - SkuName `String` + - SkuTier `String` + - PropertiesScope `String` + +### AzPolicyDefinition [Get, New, Remove, Set] `IPolicyDefinition, Boolean` + - SubscriptionId `String[]` + - Name `String` + - ManagementGroupName `String` + - Id `String` + - InputObject `IResourcesIdentity` + - BuiltIn `SwitchParameter` + - Parameter `IPolicyDefinition` + - Description `String` + - DisplayName `String` + - Metadata `IPolicyDefinitionPropertiesMetadata` + - Mode `PolicyMode` + - PolicyRule `IPolicyDefinitionPropertiesPolicyRule` + - PolicyType `PolicyType` + - PassThru `SwitchParameter` + +### AzPolicySetDefinition [Get, New, Remove, Set] `IPolicySetDefinition, Boolean` + - SubscriptionId `String[]` + - Name `String` + - ManagementGroupName `String` + - Id `String` + - InputObject `IResourcesIdentity` + - BuiltIn `SwitchParameter` + - Parameter `IPolicySetDefinition` + - Description `String` + - DisplayName `String` + - Metadata `IPolicySetDefinitionPropertiesMetadata` + - PolicyDefinition `IPolicyDefinitionReference[]` + - PolicyType `PolicyType` + - PassThru `SwitchParameter` + +### AzProviderFeature [Get, Register] `IFeatureResult` + - SubscriptionId `String[]` + - Name `String` + - ResourceProviderNamespace `String` + - InputObject `IResourcesIdentity` + +### AzProviderOperationsMetadata [Get] `IProviderOperationsMetadata` + - ResourceProviderNamespace `String` + - InputObject `IResourcesIdentity` + - Expand `String` + +### AzResource [Get, Move, New, Remove, Set, Test, Update] `IGenericResource, Boolean` + - ResourceId `String` + - Name `String` + - ParentResourcePath `String` + - ProviderNamespace `String` + - ResourceGroupName `String` + - ResourceType `String` + - SubscriptionId `String[]` + - InputObject `IResourcesIdentity` + - SourceResourceGroupName `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - Expand `String` + - Top `Int32` + - TagName `String` + - TagValue `String` + - Tag `Hashtable` + - Filter `String` + - PassThru `SwitchParameter` + - Resource `String[]` + - TargetResourceGroup `String` + - TargetSubscriptionId `String` + - TargetResourceGroupName `String` + - Parameter `IResourcesMoveInfo` + - IdentityType `ResourceIdentityType` + - IdentityUserAssignedIdentity `Hashtable` + - Kind `String` + - Location `String` + - ManagedBy `String` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - Property `IGenericResourceProperties` + - SkuCapacity `Int32` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + +### AzResourceGroup [Export, Get, New, Remove, Set, Test, Update] `IResourceGroupExportResult, IResourceGroup, Boolean` + - ResourceGroupName `String` + - SubscriptionId `String` + - InputObject `IResourcesIdentity` + - Name `String` + - Id `String` + - Filter `String` + - Top `Int32` + - TagName `String` + - TagValue `String` + - Tag `Hashtable` + - Option `String` + - Resource `String[]` + - Parameter `IExportTemplateRequest` + - Location `String` + - ManagedBy `String` + +### AzResourceLink [Get, New, Remove, Set] `IResourceLink, Boolean` + - ResourceId `String` + - InputObject `IResourcesIdentity` + - SubscriptionId `String[]` + - Scope `String` + - FilterById `String` + - FilterByScope `Filter` + - Note `String` + - TargetId `String` + - Parameter `IResourceLink` + +### AzResourceMove [Test] `Boolean` + - SourceResourceGroupName `String` + - SubscriptionId `String` + - InputObject `IResourcesIdentity` + - PassThru `SwitchParameter` + - Resource `String[]` + - TargetResourceGroup `String` + - TargetSubscriptionId `String` + - TargetResourceGroupName `String` + - Parameter `IResourcesMoveInfo` + +### AzResourceProvider [Get, Register, Unregister] `IProvider` + - SubscriptionId `String[]` + - ResourceProviderNamespace `String` + - InputObject `IResourcesIdentity` + - Expand `String` + - Top `Int32` + +### AzResourceProviderOperationDetail [Get] `IResourceProviderOperationDefinition` + - ResourceProviderNamespace `String` + +### AzRoleAssignment [Get, New, Remove] `IRoleAssignment` + - Id `String` + - Name `String` + - Scope `String` + - RoleId `String` + - InputObject `IResourcesIdentity` + - ParentResourceId `String` + - ResourceGroupName `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + - SubscriptionId `String[]` + - ExpandPrincipalGroups `String` + - ServicePrincipalName `String` + - SignInName `String` + - Filter `String` + - CanDelegate `SwitchParameter` + - PrincipalId `String` + - RoleDefinitionId `String` + - Parameter `IRoleAssignmentCreateParameters` + - PrincipalType `PrincipalType` + +### AzRoleDefinition [Get, New, Remove, Set] `IRoleDefinition` + - Id `String` + - Scope `String` + - InputObject `IResourcesIdentity` + - Name `String` + - Custom `SwitchParameter` + - Filter `String` + - AssignableScope `String[]` + - Description `String` + - Permission `IPermission[]` + - RoleName `String` + - RoleType `String` + - RoleDefinition `IRoleDefinition` + +### AzSubscriptionLocation [Get] `ILocation` + - SubscriptionId `String[]` + +### AzTag [Get, New, Remove] `ITagDetails, Boolean` + - SubscriptionId `String[]` + - Name `String` + - Value `String` + - InputObject `IResourcesIdentity` + - PassThru `SwitchParameter` + +### AzTenantBackfill [Start] `ITenantBackfillStatusResult` + +### AzTenantBackfillStatus [Invoke] `ITenantBackfillStatusResult` + diff --git a/tests-upgrade/mysql/tools/Resources/resources/ModelSurface.md b/tests-upgrade/mysql/tools/Resources/resources/ModelSurface.md new file mode 100644 index 00000000000..378e3ec418a --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/resources/ModelSurface.md @@ -0,0 +1,1645 @@ +### AddOwnerParameters \ [Api16] + - Url `String` + +### AdGroup \ [Api16] + - DeletionTimestamp `DateTime?` **{MinValue, MaxValue}** + - DisplayName `String` + - Mail `String` + - MailEnabled `Boolean?` + - MailNickname `String` + - ObjectId `String` + - ObjectType `String` + - SecurityEnabled `Boolean?` + +### AliasPathType [Api20180501] + - ApiVersion `String[]` + - Path `String` + +### AliasType [Api20180501] + - Name `String` + - Path `IAliasPathType[]` + +### Appliance [Api20160901Preview] + - DefinitionId `String` + - Id `String` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - Kind `String` + - Location `String` + - ManagedBy `String` + - ManagedResourceGroupId `String` + - Name `String` + - Output `IAppliancePropertiesOutputs` + - Parameter `IAppliancePropertiesParameters` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - ProvisioningState `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + - UiDefinitionUri `String` + +### ApplianceArtifact [Api20160901Preview] + - Name `String` + - Type `ApplianceArtifactType?` **{Custom, Template}** + - Uri `String` + +### ApplianceDefinition [Api20160901Preview] + - Artifact `IApplianceArtifact[]` + - Authorization `IApplianceProviderAuthorization[]` + - Description `String` + - DisplayName `String` + - Id `String` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - Location `String` + - LockLevel `ApplianceLockLevel` **{CanNotDelete, None, ReadOnly}** + - ManagedBy `String` + - Name `String` + - PackageFileUri `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + +### ApplianceDefinitionListResult [Api20160901Preview] + - NextLink `String` + - Value `IApplianceDefinition[]` + +### ApplianceDefinitionProperties [Api20160901Preview] + - Artifact `IApplianceArtifact[]` + - Authorization `IApplianceProviderAuthorization[]` + - Description `String` + - DisplayName `String` + - LockLevel `ApplianceLockLevel` **{CanNotDelete, None, ReadOnly}** + - PackageFileUri `String` + +### ApplianceListResult [Api20160901Preview] + - NextLink `String` + - Value `IAppliance[]` + +### AppliancePatchable [Api20160901Preview] + - ApplianceDefinitionId `String` + - Id `String` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - Kind `String` + - Location `String` + - ManagedBy `String` + - ManagedResourceGroupId `String` + - Name `String` + - Output `IAppliancePropertiesPatchableOutputs` + - Parameter `IAppliancePropertiesPatchableParameters` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - ProvisioningState `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + - UiDefinitionUri `String` + +### ApplianceProperties [Api20160901Preview] + - ApplianceDefinitionId `String` + - ManagedResourceGroupId `String` + - Output `IAppliancePropertiesOutputs` + - Parameter `IAppliancePropertiesParameters` + - ProvisioningState `String` + - UiDefinitionUri `String` + +### AppliancePropertiesPatchable [Api20160901Preview] + - ApplianceDefinitionId `String` + - ManagedResourceGroupId `String` + - Output `IAppliancePropertiesPatchableOutputs` + - Parameter `IAppliancePropertiesPatchableParameters` + - ProvisioningState `String` + - UiDefinitionUri `String` + +### ApplianceProviderAuthorization [Api20160901Preview] + - PrincipalId `String` + - RoleDefinitionId `String` + +### Application \ [Api16, Api20170901, Api20180601] + - AllowGuestsSignIn `Boolean?` + - AllowPassthroughUser `Boolean?` + - AppId `String` + - AppLogoUrl `String` + - AppPermission `String[]` + - AppRole `IAppRole[]` + - AvailableToOtherTenant `Boolean?` + - DefinitionId `String` + - DeletionTimestamp `DateTime?` **{MinValue, MaxValue}** + - DisplayName `String` + - ErrorUrl `String` + - GroupMembershipClaim `GroupMembershipClaimTypes?` **{All, None, SecurityGroup}** + - Homepage `String` + - Id `String` + - IdentifierUri `String[]` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - InformationalUrlMarketing `String` + - InformationalUrlPrivacy `String` + - InformationalUrlSupport `String` + - InformationalUrlTermsOfService `String` + - IsDeviceOnlyAuthSupported `Boolean?` + - KeyCredentials `IKeyCredential[]` + - Kind `String` + - KnownClientApplication `String[]` + - Location `String` + - LogoutUrl `String` + - ManagedBy `String` + - ManagedResourceGroupId `String` + - Name `String` + - Oauth2AllowImplicitFlow `Boolean?` + - Oauth2AllowUrlPathMatching `Boolean?` + - Oauth2Permission `IOAuth2Permission[]` + - Oauth2RequirePostResponse `Boolean?` + - ObjectId `String` + - ObjectType `String` + - OptionalClaimAccessToken `IOptionalClaim[]` + - OptionalClaimIdToken `IOptionalClaim[]` + - OptionalClaimSamlToken `IOptionalClaim[]` + - OrgRestriction `String[]` + - Output `IApplicationPropertiesOutputs` + - Parameter `IApplicationPropertiesParameters` + - PasswordCredentials `IPasswordCredential[]` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - PreAuthorizedApplication `IPreAuthorizedApplication[]` + - ProvisioningState `String` + - PublicClient `Boolean?` + - PublisherDomain `String` + - ReplyUrl `String[]` + - RequiredResourceAccess `IRequiredResourceAccess[]` + - SamlMetadataUrl `String` + - SignInAudience `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + - UiDefinitionUri `String` + - WwwHomepage `String` + +### ApplicationArtifact [Api20170901] + - Name `String` + - Type `ApplicationArtifactType?` **{Custom, Template}** + - Uri `String` + +### ApplicationBase [Api16] + - AllowGuestsSignIn `Boolean?` + - AllowPassthroughUser `Boolean?` + - AppLogoUrl `String` + - AppPermission `String[]` + - AppRole `IAppRole[]` + - AvailableToOtherTenant `Boolean?` + - ErrorUrl `String` + - GroupMembershipClaim `GroupMembershipClaimTypes?` **{All, None, SecurityGroup}** + - Homepage `String` + - InformationalUrlMarketing `String` + - InformationalUrlPrivacy `String` + - InformationalUrlSupport `String` + - InformationalUrlTermsOfService `String` + - IsDeviceOnlyAuthSupported `Boolean?` + - KeyCredentials `IKeyCredential[]` + - KnownClientApplication `String[]` + - LogoutUrl `String` + - Oauth2AllowImplicitFlow `Boolean?` + - Oauth2AllowUrlPathMatching `Boolean?` + - Oauth2Permission `IOAuth2Permission[]` + - Oauth2RequirePostResponse `Boolean?` + - OptionalClaimAccessToken `IOptionalClaim[]` + - OptionalClaimIdToken `IOptionalClaim[]` + - OptionalClaimSamlToken `IOptionalClaim[]` + - OrgRestriction `String[]` + - PasswordCredentials `IPasswordCredential[]` + - PreAuthorizedApplication `IPreAuthorizedApplication[]` + - PublicClient `Boolean?` + - PublisherDomain `String` + - ReplyUrl `String[]` + - RequiredResourceAccess `IRequiredResourceAccess[]` + - SamlMetadataUrl `String` + - SignInAudience `String` + - WwwHomepage `String` + +### ApplicationCreateParameters [Api16] + - AllowGuestsSignIn `Boolean?` + - AllowPassthroughUser `Boolean?` + - AppLogoUrl `String` + - AppPermission `String[]` + - AppRole `IAppRole[]` + - AvailableToOtherTenant `Boolean?` + - DisplayName `String` + - ErrorUrl `String` + - GroupMembershipClaim `GroupMembershipClaimTypes?` **{All, None, SecurityGroup}** + - Homepage `String` + - IdentifierUri `String[]` + - InformationalUrl `IInformationalUrl` + - InformationalUrlMarketing `String` + - InformationalUrlPrivacy `String` + - InformationalUrlSupport `String` + - InformationalUrlTermsOfService `String` + - IsDeviceOnlyAuthSupported `Boolean?` + - KeyCredentials `IKeyCredential[]` + - KnownClientApplication `String[]` + - LogoutUrl `String` + - Oauth2AllowImplicitFlow `Boolean?` + - Oauth2AllowUrlPathMatching `Boolean?` + - Oauth2Permission `IOAuth2Permission[]` + - Oauth2RequirePostResponse `Boolean?` + - OptionalClaim `IOptionalClaims` + - OptionalClaimAccessToken `IOptionalClaim[]` + - OptionalClaimIdToken `IOptionalClaim[]` + - OptionalClaimSamlToken `IOptionalClaim[]` + - OrgRestriction `String[]` + - PasswordCredentials `IPasswordCredential[]` + - PreAuthorizedApplication `IPreAuthorizedApplication[]` + - PublicClient `Boolean?` + - PublisherDomain `String` + - ReplyUrl `String[]` + - RequiredResourceAccess `IRequiredResourceAccess[]` + - SamlMetadataUrl `String` + - SignInAudience `String` + - WwwHomepage `String` + +### ApplicationDefinition [Api20170901] + - Artifact `IApplicationArtifact[]` + - Authorization `IApplicationProviderAuthorization[]` + - CreateUiDefinition `IApplicationDefinitionPropertiesCreateUiDefinition` + - Description `String` + - DisplayName `String` + - Id `String` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - IsEnabled `String` + - Location `String` + - LockLevel `ApplicationLockLevel` **{CanNotDelete, None, ReadOnly}** + - MainTemplate `IApplicationDefinitionPropertiesMainTemplate` + - ManagedBy `String` + - Name `String` + - PackageFileUri `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + +### ApplicationDefinitionListResult [Api20180601] + - NextLink `String` + - Value `IApplicationDefinition[]` + +### ApplicationDefinitionProperties [Api20170901] + - Artifact `IApplicationArtifact[]` + - Authorization `IApplicationProviderAuthorization[]` + - CreateUiDefinition `IApplicationDefinitionPropertiesCreateUiDefinition` + - Description `String` + - DisplayName `String` + - IsEnabled `String` + - LockLevel `ApplicationLockLevel` **{CanNotDelete, None, ReadOnly}** + - MainTemplate `IApplicationDefinitionPropertiesMainTemplate` + - PackageFileUri `String` + +### ApplicationListResult [Api16, Api20180601] + - NextLink `String` + - OdataNextLink `String` + - Value `IApplication[]` + +### ApplicationPatchable [Api20170901, Api20180601] + - ApplicationDefinitionId `String` + - Id `String` + - Identity `IIdentity` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - Kind `String` + - Location `String` + - ManagedBy `String` + - ManagedResourceGroupId `String` + - Name `String` + - Output `IApplicationPropertiesPatchableOutputs` + - Parameter `IApplicationPropertiesPatchableParameters` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - ProvisioningState `String` + - Sku `ISku` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + - UiDefinitionUri `String` + +### ApplicationProperties [Api20170901, Api20180601] + - ApplicationDefinitionId `String` + - ManagedResourceGroupId `String` + - Output `IApplicationPropertiesOutputs` + - Parameter `IApplicationPropertiesParameters` + - ProvisioningState `String` + - UiDefinitionUri `String` + +### ApplicationPropertiesPatchable [Api20170901, Api20180601] + - ApplicationDefinitionId `String` + - ManagedResourceGroupId `String` + - Output `IApplicationPropertiesPatchableOutputs` + - Parameter `IApplicationPropertiesPatchableParameters` + - ProvisioningState `String` + - UiDefinitionUri `String` + +### ApplicationProviderAuthorization [Api20170901] + - PrincipalId `String` + - RoleDefinitionId `String` + +### ApplicationUpdateParameters [Api16] + - AllowGuestsSignIn `Boolean?` + - AllowPassthroughUser `Boolean?` + - AppLogoUrl `String` + - AppPermission `String[]` + - AppRole `IAppRole[]` + - AvailableToOtherTenant `Boolean?` + - DisplayName `String` + - ErrorUrl `String` + - GroupMembershipClaim `GroupMembershipClaimTypes?` **{All, None, SecurityGroup}** + - Homepage `String` + - IdentifierUri `String[]` + - InformationalUrl `IInformationalUrl` + - InformationalUrlMarketing `String` + - InformationalUrlPrivacy `String` + - InformationalUrlSupport `String` + - InformationalUrlTermsOfService `String` + - IsDeviceOnlyAuthSupported `Boolean?` + - KeyCredentials `IKeyCredential[]` + - KnownClientApplication `String[]` + - LogoutUrl `String` + - Oauth2AllowImplicitFlow `Boolean?` + - Oauth2AllowUrlPathMatching `Boolean?` + - Oauth2Permission `IOAuth2Permission[]` + - Oauth2RequirePostResponse `Boolean?` + - OptionalClaim `IOptionalClaims` + - OptionalClaimAccessToken `IOptionalClaim[]` + - OptionalClaimIdToken `IOptionalClaim[]` + - OptionalClaimSamlToken `IOptionalClaim[]` + - OrgRestriction `String[]` + - PasswordCredentials `IPasswordCredential[]` + - PreAuthorizedApplication `IPreAuthorizedApplication[]` + - PublicClient `Boolean?` + - PublisherDomain `String` + - ReplyUrl `String[]` + - RequiredResourceAccess `IRequiredResourceAccess[]` + - SamlMetadataUrl `String` + - SignInAudience `String` + - WwwHomepage `String` + +### AppRole [Api16] + - AllowedMemberType `String[]` + - Description `String` + - DisplayName `String` + - Id `String` + - IsEnabled `Boolean?` + - Value `String` + +### BasicDependency [Api20180501] + - Id `String` + - ResourceName `String` + - ResourceType `String` + +### CheckGroupMembershipParameters \ [Api16] + - GroupId `String` + - MemberId `String` + +### CheckGroupMembershipResult \ [Api16] + - Value `Boolean?` + +### CheckNameAvailabilityRequest [Api20180301Preview] + - Name `String` + - Type `Type?` **{ProvidersMicrosoftManagementGroups}** + +### CheckNameAvailabilityResult [Api20180301Preview] + - Message `String` + - NameAvailable `Boolean?` + - Reason `Reason?` **{AlreadyExists, Invalid}** + +### ClassicAdministrator [Api20150701] + - EmailAddress `String` + - Id `String` + - Name `String` + - Role `String` + - Type `String` + +### ClassicAdministratorListResult [Api20150701] + - NextLink `String` + - Value `IClassicAdministrator[]` + +### ClassicAdministratorProperties [Api20150701] + - EmailAddress `String` + - Role `String` + +### ComponentsSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties [Api20180501] + - ClientId `String` + - PrincipalId `String` + +### CreateManagementGroupChildInfo [Api20180301Preview] + - Child `ICreateManagementGroupChildInfo[]` + - DisplayName `String` + - Id `String` + - Name `String` + - Role `String[]` + - Type `String` + +### CreateManagementGroupDetails [Api20180301Preview] + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - UpdatedBy `String` + - UpdatedTime `DateTime?` **{MinValue, MaxValue}** + - Version `Single?` + +### CreateManagementGroupProperties [Api20180301Preview] + - Child `ICreateManagementGroupChildInfo[]` + - DetailUpdatedBy `String` + - DetailUpdatedTime `DateTime?` **{MinValue, MaxValue}** + - DetailVersion `Single?` + - DisplayName `String` + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - Role `String[]` + - TenantId `String` + +### CreateManagementGroupRequest [Api20180301Preview] + - Child `ICreateManagementGroupChildInfo[]` + - DetailUpdatedBy `String` + - DetailUpdatedTime `DateTime?` **{MinValue, MaxValue}** + - DetailVersion `Single?` + - DisplayName `String` + - Id `String` + - Name `String` + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - Role `String[]` + - TenantId `String` + - Type `String` + +### CreateParentGroupInfo [Api20180301Preview] + - DisplayName `String` + - Id `String` + - Name `String` + +### DebugSetting [Api20180501] + - DetailLevel `String` + +### DenyAssignment [Api20180701Preview] + - DenyAssignmentName `String` + - Description `String` + - DoNotApplyToChildScope `Boolean?` + - ExcludePrincipal `IPrincipal[]` + - Id `String` + - IsSystemProtected `Boolean?` + - Name `String` + - Permission `IDenyAssignmentPermission[]` + - Principal `IPrincipal[]` + - Scope `String` + - Type `String` + +### DenyAssignmentListResult [Api20180701Preview] + - NextLink `String` + - Value `IDenyAssignment[]` + +### DenyAssignmentPermission [Api20180701Preview] + - Action `String[]` + - DataAction `String[]` + - NotAction `String[]` + - NotDataAction `String[]` + +### DenyAssignmentProperties [Api20180701Preview] + - DenyAssignmentName `String` + - Description `String` + - DoNotApplyToChildScope `Boolean?` + - ExcludePrincipal `IPrincipal[]` + - IsSystemProtected `Boolean?` + - Permission `IDenyAssignmentPermission[]` + - Principal `IPrincipal[]` + - Scope `String` + +### Dependency [Api20180501] + - DependsOn `IBasicDependency[]` + - Id `String` + - ResourceName `String` + - ResourceType `String` + +### Deployment [Api20180501] + - DebugSettingDetailLevel `String` + - Location `String` + - Mode `DeploymentMode` **{Complete, Incremental}** + - OnErrorDeploymentName `String` + - OnErrorDeploymentType `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + - Parameter `IDeploymentPropertiesParameters` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Template `IDeploymentPropertiesTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + +### DeploymentExportResult [Api20180501] + - Template `IDeploymentExportResultTemplate` + +### DeploymentExtended [Api20180501] + - CorrelationId `String` + - DebugSettingDetailLevel `String` + - Dependency `IDependency[]` + - Id `String` + - Location `String` + - Mode `DeploymentMode?` **{Complete, Incremental}** + - Name `String` + - OnErrorDeploymentName `String` + - OnErrorDeploymentProvisioningState `String` + - OnErrorDeploymentType `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + - Output `IDeploymentPropertiesExtendedOutputs` + - Parameter `IDeploymentPropertiesExtendedParameters` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Provider `IProvider[]` + - ProvisioningState `String` + - Template `IDeploymentPropertiesExtendedTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + - Timestamp `DateTime?` **{MinValue, MaxValue}** + - Type `String` + +### DeploymentListResult [Api20180501] + - NextLink `String` + - Value `IDeploymentExtended[]` + +### DeploymentOperation [Api20180501] + - Id `String` + - OperationId `String` + - ProvisioningState `String` + - RequestContent `IHttpMessageContent` + - ResponseContent `IHttpMessageContent` + - ServiceRequestId `String` + - StatusCode `String` + - StatusMessage `IDeploymentOperationPropertiesStatusMessage` + - TargetResourceId `String` + - TargetResourceName `String` + - TargetResourceType `String` + - Timestamp `DateTime?` **{MinValue, MaxValue}** + +### DeploymentOperationProperties [Api20180501] + - ProvisioningState `String` + - RequestContent `IHttpMessageContent` + - ResponseContent `IHttpMessageContent` + - ServiceRequestId `String` + - StatusCode `String` + - StatusMessage `IDeploymentOperationPropertiesStatusMessage` + - TargetResourceId `String` + - TargetResourceName `String` + - TargetResourceType `String` + - Timestamp `DateTime?` **{MinValue, MaxValue}** + +### DeploymentOperationsListResult [Api20180501] + - NextLink `String` + - Value `IDeploymentOperation[]` + +### DeploymentProperties [Api20180501] + - DebugSettingDetailLevel `String` + - Mode `DeploymentMode` **{Complete, Incremental}** + - OnErrorDeploymentName `String` + - OnErrorDeploymentType `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + - Parameter `IDeploymentPropertiesParameters` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Template `IDeploymentPropertiesTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + +### DeploymentPropertiesExtended [Api20180501] + - CorrelationId `String` + - DebugSettingDetailLevel `String` + - Dependency `IDependency[]` + - Mode `DeploymentMode?` **{Complete, Incremental}** + - OnErrorDeploymentName `String` + - OnErrorDeploymentProvisioningState `String` + - OnErrorDeploymentType `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + - Output `IDeploymentPropertiesExtendedOutputs` + - Parameter `IDeploymentPropertiesExtendedParameters` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Provider `IProvider[]` + - ProvisioningState `String` + - Template `IDeploymentPropertiesExtendedTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + - Timestamp `DateTime?` **{MinValue, MaxValue}** + +### DeploymentValidateResult [Api20180501] + - CorrelationId `String` + - DebugSettingDetailLevel `String` + - Dependency `IDependency[]` + - ErrorCode `String` + - ErrorDetail `IResourceManagementErrorWithDetails[]` + - ErrorMessage `String` + - ErrorTarget `String` + - Mode `DeploymentMode?` **{Complete, Incremental}** + - OnErrorDeploymentName `String` + - OnErrorDeploymentProvisioningState `String` + - OnErrorDeploymentType `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + - Output `IDeploymentPropertiesExtendedOutputs` + - Parameter `IDeploymentPropertiesExtendedParameters` + - ParameterLinkContentVersion `String` + - ParameterLinkUri `String` + - Provider `IProvider[]` + - ProvisioningState `String` + - Template `IDeploymentPropertiesExtendedTemplate` + - TemplateLinkContentVersion `String` + - TemplateLinkUri `String` + - Timestamp `DateTime?` **{MinValue, MaxValue}** + +### DescendantInfo [Api20180301Preview] + - DisplayName `String` + - Id `String` + - Name `String` + - ParentId `String` + - Type `String` + +### DescendantInfoProperties [Api20180301Preview] + - DisplayName `String` + - ParentId `String` + +### DescendantListResult [Api20180301Preview] + - NextLink `String` + - Value `IDescendantInfo[]` + +### DescendantParentGroupInfo [Api20180301Preview] + - Id `String` + +### DirectoryObject \ [Api16] + - DeletionTimestamp `DateTime?` **{MinValue, MaxValue}** + - ObjectId `String` + - ObjectType `String` + +### DirectoryObjectListResult [Api16] + - OdataNextLink `String` + - Value `IDirectoryObject[]` + +### Domain \ [Api16] + - AuthenticationType `String` + - IsDefault `Boolean?` + - IsVerified `Boolean?` + - Name `String` + +### DomainListResult [Api16] + - Value `IDomain[]` + +### EntityInfo [Api20180301Preview] + - DisplayName `String` + - Id `String` + - InheritedPermission `String` + - Name `String` + - NumberOfChild `Int32?` + - NumberOfChildGroup `Int32?` + - NumberOfDescendant `Int32?` + - ParentDisplayNameChain `String[]` + - ParentId `String` + - ParentNameChain `String[]` + - Permission `String` + - TenantId `String` + - Type `String` + +### EntityInfoProperties [Api20180301Preview] + - DisplayName `String` + - InheritedPermission `String` + - NumberOfChild `Int32?` + - NumberOfChildGroup `Int32?` + - NumberOfDescendant `Int32?` + - ParentDisplayNameChain `String[]` + - ParentId `String` + - ParentNameChain `String[]` + - Permission `String` + - TenantId `String` + +### EntityListResult [Api20180301Preview] + - Count `Int32?` + - NextLink `String` + - Value `IEntityInfo[]` + +### EntityParentGroupInfo [Api20180301Preview] + - Id `String` + +### ErrorDetails [Api20180301Preview] + - Code `String` + - Detail `String` + - Message `String` + +### ErrorMessage [Api16] + - Message `String` + +### ErrorResponse [Api20160901Preview, Api20180301Preview] + - ErrorCode `String` + - ErrorDetail `String` + - ErrorMessage `String` + - HttpStatus `String` + +### ExportTemplateRequest [Api20180501] + - Option `String` + - Resource `String[]` + +### FeatureOperationsListResult [Api20151201] + - NextLink `String` + - Value `IFeatureResult[]` + +### FeatureProperties [Api20151201] + - State `String` + +### FeatureResult [Api20151201] + - Id `String` + - Name `String` + - State `String` + - Type `String` + +### GenericResource [Api20160901Preview, Api20180501] + - Id `String` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - IdentityUserAssignedIdentity `IIdentityUserAssignedIdentities ` + - Kind `String` + - Location `String` + - ManagedBy `String` + - Name `String` + - PlanName `String` + - PlanProduct `String` + - PlanPromotionCode `String` + - PlanPublisher `String` + - PlanVersion `String` + - Property `IGenericResourceProperties` + - SkuCapacity `Int32?` + - SkuFamily `String` + - SkuModel `String` + - SkuName `String` + - SkuSize `String` + - SkuTier `String` + - Tag `IResourceTags ` + - Type `String` + +### GetObjectsParameters \ [Api16] + - IncludeDirectoryObjectReference `Boolean?` + - ObjectId `String[]` + - Type `String[]` + +### GraphError [Api16] + - ErrorMessageValueMessage `String` + - OdataErrorCode `String` + +### GroupAddMemberParameters \ [Api16] + - Url `String` + +### GroupCreateParameters \ [Api16] + - DisplayName `String` + - MailEnabled `Boolean` + - MailNickname `String` + - SecurityEnabled `Boolean` + +### GroupGetMemberGroupsParameters \ [Api16] + - SecurityEnabledOnly `Boolean` + +### GroupGetMemberGroupsResult [Api16] + - Value `String[]` + +### GroupListResult [Api16] + - OdataNextLink `String` + - Value `IAdGroup[]` + +### HttpMessage [Api20180501] + - Content `IHttpMessageContent` + +### Identity [Api20160901Preview, Api20180501] + - PrincipalId `String` + - TenantId `String` + - Type `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - UserAssignedIdentity `IIdentityUserAssignedIdentities ` + +### Identity1 [Api20180501] + - PrincipalId `String` + - TenantId `String` + - Type `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + +### InformationalUrl [Api16] + - Marketing `String` + - Privacy `String` + - Support `String` + - TermsOfService `String` + +### KeyCredential \ [Api16] + - CustomKeyIdentifier `String` + - EndDate `DateTime?` **{MinValue, MaxValue}** + - KeyId `String` + - StartDate `DateTime?` **{MinValue, MaxValue}** + - Type `String` + - Usage `String` + - Value `String` + +### KeyCredentialListResult [Api16] + - Value `IKeyCredential[]` + +### KeyCredentialsUpdateParameters [Api16] + - Value `IKeyCredential[]` + +### Location [Api20160601] + - DisplayName `String` + - Id `String` + - Latitude `String` + - Longitude `String` + - Name `String` + - SubscriptionId `String` + +### LocationListResult [Api20160601] + - Value `ILocation[]` + +### ManagementGroup [Api20180301Preview] + - Child `IManagementGroupChildInfo[]` + - DetailUpdatedBy `String` + - DetailUpdatedTime `DateTime?` **{MinValue, MaxValue}** + - DetailVersion `Single?` + - DisplayName `String` + - Id `String` + - Name `String` + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - Role `String[]` + - TenantId `String` + - Type `String` + +### ManagementGroupChildInfo [Api20180301Preview] + - Child `IManagementGroupChildInfo[]` + - DisplayName `String` + - Id `String` + - Name `String` + - Role `String[]` + - Type `String` + +### ManagementGroupDetails [Api20180301Preview] + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - UpdatedBy `String` + - UpdatedTime `DateTime?` **{MinValue, MaxValue}** + - Version `Single?` + +### ManagementGroupInfo [Api20180301Preview] + - DisplayName `String` + - Id `String` + - Name `String` + - TenantId `String` + - Type `String` + +### ManagementGroupInfoProperties [Api20180301Preview] + - DisplayName `String` + - TenantId `String` + +### ManagementGroupListResult [Api20180301Preview] + - NextLink `String` + - Value `IManagementGroupInfo[]` + +### ManagementGroupProperties [Api20180301Preview] + - Child `IManagementGroupChildInfo[]` + - DetailUpdatedBy `String` + - DetailUpdatedTime `DateTime?` **{MinValue, MaxValue}** + - DetailVersion `Single?` + - DisplayName `String` + - ParentDisplayName `String` + - ParentId `String` + - ParentName `String` + - Role `String[]` + - TenantId `String` + +### ManagementLockListResult [Api20160901] + - NextLink `String` + - Value `IManagementLockObject[]` + +### ManagementLockObject [Api20160901] + - Id `String` + - Level `LockLevel` **{CanNotDelete, NotSpecified, ReadOnly}** + - Name `String` + - Note `String` + - Owner `IManagementLockOwner[]` + - Type `String` + +### ManagementLockOwner [Api20160901] + - ApplicationId `String` + +### ManagementLockProperties [Api20160901] + - Level `LockLevel` **{CanNotDelete, NotSpecified, ReadOnly}** + - Note `String` + - Owner `IManagementLockOwner[]` + +### OAuth2Permission [Api16] + - AdminConsentDescription `String` + - AdminConsentDisplayName `String` + - Id `String` + - IsEnabled `Boolean?` + - Type `String` + - UserConsentDescription `String` + - UserConsentDisplayName `String` + - Value `String` + +### OAuth2PermissionGrant [Api16] + - ClientId `String` + - ConsentType `ConsentType?` **{AllPrincipals, Principal}** + - ExpiryTime `String` + - ObjectId `String` + - OdataType `String` + - PrincipalId `String` + - ResourceId `String` + - Scope `String` + - StartTime `String` + +### OAuth2PermissionGrantListResult [Api16] + - OdataNextLink `String` + - Value `IOAuth2PermissionGrant[]` + +### OdataError [Api16] + - Code `String` + - ErrorMessageValueMessage `String` + +### OnErrorDeployment [Api20180501] + - DeploymentName `String` + - Type `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + +### OnErrorDeploymentExtended [Api20180501] + - DeploymentName `String` + - ProvisioningState `String` + - Type `OnErrorDeploymentType?` **{LastSuccessful, SpecificDeployment}** + +### Operation [Api20151201, Api20180301Preview] + - DisplayDescription `String` + - DisplayOperation `String` + - DisplayProvider `String` + - DisplayResource `String` + - Name `String` + +### OperationDisplay [Api20151201] + - Operation `String` + - Provider `String` + - Resource `String` + +### OperationDisplayProperties [Api20180301Preview] + - Description `String` + - Operation `String` + - Provider `String` + - Resource `String` + +### OperationListResult [Api20151201, Api20180301Preview] + - NextLink `String` + - Value `IOperation[]` + +### OperationResults [Api20180301Preview] + - Id `String` + - Name `String` + - ProvisioningState `String` + - Type `String` + +### OperationResultsProperties [Api20180301Preview] + - ProvisioningState `String` + +### OptionalClaim [Api16] + - AdditionalProperty `IOptionalClaimAdditionalProperties` + - Essential `Boolean?` + - Name `String` + - Source `String` + +### OptionalClaims [Api16] + - AccessToken `IOptionalClaim[]` + - IdToken `IOptionalClaim[]` + - SamlToken `IOptionalClaim[]` + +### ParametersLink [Api20180501] + - ContentVersion `String` + - Uri `String` + +### ParentGroupInfo [Api20180301Preview] + - DisplayName `String` + - Id `String` + - Name `String` + +### PasswordCredential \ [Api16] + - CustomKeyIdentifier `Byte[]` + - EndDate `DateTime?` **{MinValue, MaxValue}** + - KeyId `String` + - StartDate `DateTime?` **{MinValue, MaxValue}** + - Value `String` + +### PasswordCredentialListResult [Api16] + - Value `IPasswordCredential[]` + +### PasswordCredentialsUpdateParameters [Api16] + - Value `IPasswordCredential[]` + +### PasswordProfile \ [Api16] + - ForceChangePasswordNextLogin `Boolean?` + - Password `String` + +### PatchManagementGroupRequest [Api20180301Preview] + - DisplayName `String` + - ParentId `String` + +### Permission [Api20150701, Api201801Preview] + - Action `String[]` + - DataAction `String[]` + - NotAction `String[]` + - NotDataAction `String[]` + +### PermissionGetResult [Api20150701, Api201801Preview] + - NextLink `String` + - Value `IPermission[]` + +### Plan [Api20160901Preview, Api20180501] + - Name `String` + - Product `String` + - PromotionCode `String` + - Publisher `String` + - Version `String` + +### PlanPatchable [Api20160901Preview] + - Name `String` + - Product `String` + - PromotionCode `String` + - Publisher `String` + - Version `String` + +### PolicyAssignment [Api20151101, Api20161201, Api20180501] + - Description `String` + - DisplayName `String` + - Id `String` + - IdentityPrincipalId `String` + - IdentityTenantId `String` + - IdentityType `ResourceIdentityType?` **{None, SystemAssigned, SystemAssignedUserAssigned, UserAssigned}** + - Location `String` + - Metadata `IPolicyAssignmentPropertiesMetadata` + - Name `String` + - NotScope `String[]` + - Parameter `IPolicyAssignmentPropertiesParameters` + - PolicyDefinitionId `String` + - Scope `String` + - SkuName `String` + - SkuTier `String` + - Type `String` + +### PolicyAssignmentListResult [Api20151101, Api20161201, Api20180501] + - NextLink `String` + - Value `IPolicyAssignment[]` + +### PolicyAssignmentProperties [Api20151101, Api20161201, Api20180501] + - Description `String` + - DisplayName `String` + - Metadata `IPolicyAssignmentPropertiesMetadata` + - NotScope `String[]` + - Parameter `IPolicyAssignmentPropertiesParameters` + - PolicyDefinitionId `String` + - Scope `String` + +### PolicyDefinition [Api20161201, Api20180501] + - Description `String` + - DisplayName `String` + - Id `String` + - Metadata `IPolicyDefinitionPropertiesMetadata` + - Mode `PolicyMode?` **{All, Indexed, NotSpecified}** + - Name `String` + - Parameter `IPolicyDefinitionPropertiesParameters` + - PolicyRule `IPolicyDefinitionPropertiesPolicyRule` + - PolicyType `PolicyType?` **{BuiltIn, Custom, NotSpecified}** + - Property `IPolicyDefinitionProperties` + - Type `String` + +### PolicyDefinitionListResult [Api20161201, Api20180501] + - NextLink `String` + - Value `IPolicyDefinition[]` + +### PolicyDefinitionProperties [Api20161201] + - Description `String` + - DisplayName `String` + - Metadata `IPolicyDefinitionPropertiesMetadata` + - Mode `PolicyMode?` **{All, Indexed, NotSpecified}** + - Parameter `IPolicyDefinitionPropertiesParameters` + - PolicyRule `IPolicyDefinitionPropertiesPolicyRule` + - PolicyType `PolicyType?` **{BuiltIn, Custom, NotSpecified}** + +### PolicyDefinitionReference [Api20180501] + - Parameter `IPolicyDefinitionReferenceParameters` + - PolicyDefinitionId `String` + +### PolicySetDefinition [Api20180501] + - Description `String` + - DisplayName `String` + - Id `String` + - Metadata `IPolicySetDefinitionPropertiesMetadata` + - Name `String` + - Parameter `IPolicySetDefinitionPropertiesParameters` + - PolicyDefinition `IPolicyDefinitionReference[]` + - PolicyType `PolicyType?` **{BuiltIn, Custom, NotSpecified}** + - Type `String` + +### PolicySetDefinitionListResult [Api20180501] + - NextLink `String` + - Value `IPolicySetDefinition[]` + +### PolicySetDefinitionProperties [Api20180501] + - Description `String` + - DisplayName `String` + - Metadata `IPolicySetDefinitionPropertiesMetadata` + - Parameter `IPolicySetDefinitionPropertiesParameters` + - PolicyDefinition `IPolicyDefinitionReference[]` + - PolicyType `PolicyType?` **{BuiltIn, Custom, NotSpecified}** + +### PolicySku [Api20180501] + - Name `String` + - Tier `String` + +### PreAuthorizedApplication [Api16] + - AppId `String` + - Extension `IPreAuthorizedApplicationExtension[]` + - Permission `IPreAuthorizedApplicationPermission[]` + +### PreAuthorizedApplicationExtension [Api16] + - Condition `String[]` + +### PreAuthorizedApplicationPermission [Api16] + - AccessGrant `String[]` + - DirectAccessGrant `Boolean?` + +### Principal [Api20180701Preview] + - Id `String` + - Type `String` + +### Provider [Api20180501] + - Id `String` + - Namespace `String` + - RegistrationState `String` + - ResourceType `IProviderResourceType[]` + +### ProviderListResult [Api20180501] + - NextLink `String` + - Value `IProvider[]` + +### ProviderOperation [Api20150701, Api201801Preview] + - Description `String` + - DisplayName `String` + - IsDataAction `Boolean?` + - Name `String` + - Origin `String` + - Property `IProviderOperationProperties` + +### ProviderOperationsMetadata [Api20150701, Api201801Preview] + - DisplayName `String` + - Id `String` + - Name `String` + - Operation `IProviderOperation[]` + - ResourceType `IResourceType[]` + - Type `String` + +### ProviderOperationsMetadataListResult [Api20150701, Api201801Preview] + - NextLink `String` + - Value `IProviderOperationsMetadata[]` + +### ProviderResourceType [Api20180501] + - Alias `IAliasType[]` + - ApiVersion `String[]` + - Location `String[]` + - Property `IProviderResourceTypeProperties ` + - ResourceType `String` + +### RequiredResourceAccess \ [Api16] + - ResourceAccess `IResourceAccess[]` + - ResourceAppId `String` + +### Resource [Api20160901Preview] + - Id `String` + - Location `String` + - Name `String` + - Tag `IResourceTags ` + - Type `String` + +### ResourceAccess \ [Api16] + - Id `String` + - Type `String` + +### ResourceGroup [Api20180501] + - Id `String` + - Location `String` + - ManagedBy `String` + - Name `String` + - ProvisioningState `String` + - Tag `IResourceGroupTags ` + - Type `String` + +### ResourceGroupExportResult [Api20180501] + - ErrorCode `String` + - ErrorDetail `IResourceManagementErrorWithDetails[]` + - ErrorMessage `String` + - ErrorTarget `String` + - Template `IResourceGroupExportResultTemplate` + +### ResourceGroupListResult [Api20180501] + - NextLink `String` + - Value `IResourceGroup[]` + +### ResourceGroupPatchable [Api20180501] + - ManagedBy `String` + - Name `String` + - ProvisioningState `String` + - Tag `IResourceGroupPatchableTags ` + +### ResourceGroupProperties [Api20180501] + - ProvisioningState `String` + +### ResourceLink [Api20160901] + - Id `String` + - Name `String` + - Note `String` + - SourceId `String` + - TargetId `String` + - Type `IResourceLinkType` + +### ResourceLinkProperties [Api20160901] + - Note `String` + - SourceId `String` + - TargetId `String` + +### ResourceLinkResult [Api20160901] + - NextLink `String` + - Value `IResourceLink[]` + +### ResourceListResult [Api20180501] + - NextLink `String` + - Value `IGenericResource[]` + +### ResourceManagementErrorWithDetails [Api20180501] + - Code `String` + - Detail `IResourceManagementErrorWithDetails[]` + - Message `String` + - Target `String` + +### ResourceProviderOperationDefinition [Api20151101] + - DisplayDescription `String` + - DisplayOperation `String` + - DisplayProvider `String` + - DisplayPublisher `String` + - DisplayResource `String` + - Name `String` + +### ResourceProviderOperationDetailListResult [Api20151101] + - NextLink `String` + - Value `IResourceProviderOperationDefinition[]` + +### ResourceProviderOperationDisplayProperties [Api20151101] + - Description `String` + - Operation `String` + - Provider `String` + - Publisher `String` + - Resource `String` + +### ResourcesIdentity [Models] + - ApplianceDefinitionId `String` + - ApplianceDefinitionName `String` + - ApplianceId `String` + - ApplianceName `String` + - ApplicationDefinitionId `String` + - ApplicationDefinitionName `String` + - ApplicationId `String` + - ApplicationId1 `String` + - ApplicationName `String` + - ApplicationObjectId `String` + - DenyAssignmentId `String` + - DeploymentName `String` + - DomainName `String` + - FeatureName `String` + - GroupId `String` + - GroupObjectId `String` + - Id `String` + - LinkId `String` + - LockName `String` + - ManagementGroupId `String` + - MemberObjectId `String` + - ObjectId `String` + - OperationId `String` + - OwnerObjectId `String` + - ParentResourcePath `String` + - PolicyAssignmentId `String` + - PolicyAssignmentName `String` + - PolicyDefinitionName `String` + - PolicySetDefinitionName `String` + - ResourceGroupName `String` + - ResourceId `String` + - ResourceName `String` + - ResourceProviderNamespace `String` + - ResourceType `String` + - RoleAssignmentId `String` + - RoleAssignmentName `String` + - RoleDefinitionId `String` + - RoleId `String` + - Scope `String` + - SourceResourceGroupName `String` + - SubscriptionId `String` + - TagName `String` + - TagValue `String` + - TenantId `String` + - UpnOrObjectId `String` + +### ResourcesMoveInfo [Api20180501] + - Resource `String[]` + - TargetResourceGroup `String` + +### ResourceType [Api20150701, Api201801Preview] + - DisplayName `String` + - Name `String` + - Operation `IProviderOperation[]` + +### RoleAssignment [Api20150701, Api20171001Preview, Api20180901Preview] + - CanDelegate `Boolean?` + - Id `String` + - Name `String` + - PrincipalId `String` + - PrincipalType `PrincipalType?` **{Application, DirectoryObjectOrGroup, DirectoryRoleTemplate, Everyone, ForeignGroup, Group, Msi, ServicePrincipal, Unknown, User}** + - RoleDefinitionId `String` + - Scope `String` + - Type `String` + +### RoleAssignmentCreateParameters [Api20150701, Api20171001Preview, Api20180901Preview] + - CanDelegate `Boolean?` + - PrincipalId `String` + - PrincipalType `PrincipalType?` **{Application, DirectoryObjectOrGroup, DirectoryRoleTemplate, Everyone, ForeignGroup, Group, Msi, ServicePrincipal, Unknown, User}** + - RoleDefinitionId `String` + +### RoleAssignmentListResult [Api20150701, Api20180901Preview] + - NextLink `String` + - Value `IRoleAssignment[]` + +### RoleAssignmentProperties [Api20150701, Api20171001Preview, Api20180901Preview] + - CanDelegate `Boolean?` + - PrincipalId `String` + - PrincipalType `PrincipalType?` **{Application, DirectoryObjectOrGroup, DirectoryRoleTemplate, Everyone, ForeignGroup, Group, Msi, ServicePrincipal, Unknown, User}** + - RoleDefinitionId `String` + +### RoleAssignmentPropertiesWithScope [Api20150701, Api20171001Preview, Api20180901Preview] + - CanDelegate `Boolean?` + - PrincipalId `String` + - PrincipalType `PrincipalType?` **{Application, DirectoryObjectOrGroup, DirectoryRoleTemplate, Everyone, ForeignGroup, Group, Msi, ServicePrincipal, Unknown, User}** + - RoleDefinitionId `String` + - Scope `String` + +### RoleDefinition [Api20150701, Api201801Preview] + - AssignableScope `String[]` + - Description `String` + - Id `String` + - Name `String` + - Permission `IPermission[]` + - RoleName `String` + - RoleType `String` + - Type `String` + +### RoleDefinitionListResult [Api20150701, Api201801Preview] + - NextLink `String` + - Value `IRoleDefinition[]` + +### RoleDefinitionProperties [Api20150701, Api201801Preview] + - AssignableScope `String[]` + - Description `String` + - Permission `IPermission[]` + - RoleName `String` + - RoleType `String` + +### ServicePrincipal \ [Api16] + - AccountEnabled `Boolean?` + - AlternativeName `String[]` + - AppDisplayName `String` + - AppId `String` + - AppOwnerTenantId `String` + - AppRole `IAppRole[]` + - AppRoleAssignmentRequired `Boolean?` + - DeletionTimestamp `DateTime?` **{MinValue, MaxValue}** + - DisplayName `String` + - ErrorUrl `String` + - Homepage `String` + - KeyCredentials `IKeyCredential[]` + - LogoutUrl `String` + - Name `String[]` + - Oauth2Permission `IOAuth2Permission[]` + - ObjectId `String` + - ObjectType `String` + - PasswordCredentials `IPasswordCredential[]` + - PreferredTokenSigningKeyThumbprint `String` + - PublisherName `String` + - ReplyUrl `String[]` + - SamlMetadataUrl `String` + - Tag `String[]` + - Type `String` + +### ServicePrincipalBase [Api16] + - AccountEnabled `Boolean?` + - AppRoleAssignmentRequired `Boolean?` + - KeyCredentials `IKeyCredential[]` + - PasswordCredentials `IPasswordCredential[]` + - ServicePrincipalType `String` + - Tag `String[]` + +### ServicePrincipalCreateParameters [Api16] + - AccountEnabled `Boolean?` + - AppId `String` + - AppRoleAssignmentRequired `Boolean?` + - KeyCredentials `IKeyCredential[]` + - PasswordCredentials `IPasswordCredential[]` + - ServicePrincipalType `String` + - Tag `String[]` + +### ServicePrincipalListResult [Api16] + - OdataNextLink `String` + - Value `IServicePrincipal[]` + +### ServicePrincipalObjectResult [Api16] + - OdataMetadata `String` + - Value `String` + +### ServicePrincipalUpdateParameters [Api16] + - AccountEnabled `Boolean?` + - AppRoleAssignmentRequired `Boolean?` + - KeyCredentials `IKeyCredential[]` + - PasswordCredentials `IPasswordCredential[]` + - ServicePrincipalType `String` + - Tag `String[]` + +### SignInName \ [Api16] + - Type `String` + - Value `String` + +### Sku [Api20160901Preview, Api20180501] + - Capacity `Int32?` + - Family `String` + - Model `String` + - Name `String` + - Size `String` + - Tier `String` + +### Subscription [Api20160601] + - AuthorizationSource `String` + - DisplayName `String` + - Id `String` + - PolicyLocationPlacementId `String` + - PolicyQuotaId `String` + - PolicySpendingLimit `SpendingLimit?` **{CurrentPeriodOff, Off, On}** + - State `SubscriptionState?` **{Deleted, Disabled, Enabled, PastDue, Warned}** + - SubscriptionId `String` + +### SubscriptionPolicies [Api20160601] + - LocationPlacementId `String` + - QuotaId `String` + - SpendingLimit `SpendingLimit?` **{CurrentPeriodOff, Off, On}** + +### TagCount [Api20180501] + - Type `String` + - Value `Int32?` + +### TagDetails [Api20180501] + - CountType `String` + - CountValue `Int32?` + - Id `String` + - TagName `String` + - Value `ITagValue[]` + +### TagsListResult [Api20180501] + - NextLink `String` + - Value `ITagDetails[]` + +### TagValue [Api20180501] + - CountType `String` + - CountValue `Int32?` + - Id `String` + - TagValue1 `String` + +### TargetResource [Api20180501] + - Id `String` + - ResourceName `String` + - ResourceType `String` + +### TemplateLink [Api20180501] + - ContentVersion `String` + - Uri `String` + +### TenantBackfillStatusResult [Api20180301Preview] + - Status `Status?` **{Cancelled, Completed, Failed, NotStarted, NotStartedButGroupsExist, Started}** + - TenantId `String` + +### TenantIdDescription [Api20160601] + - Id `String` + - TenantId `String` + +### TenantListResult [Api20160601] + - NextLink `String` + - Value `ITenantIdDescription[]` + +### User \ [Api16] + - AccountEnabled `Boolean?` + - DeletionTimestamp `DateTime?` **{MinValue, MaxValue}** + - DisplayName `String` + - GivenName `String` + - ImmutableId `String` + - Mail `String` + - MailNickname `String` + - ObjectId `String` + - ObjectType `String` + - PrincipalName `String` + - SignInName `ISignInName[]` + - Surname `String` + - Type `UserType?` **{Guest, Member}** + - UsageLocation `String` + +### UserBase \ [Api16] + - GivenName `String` + - ImmutableId `String` + - Surname `String` + - UsageLocation `String` + - UserType `UserType?` **{Guest, Member}** + +### UserCreateParameters \ [Api16] + - AccountEnabled `Boolean` + - DisplayName `String` + - GivenName `String` + - ImmutableId `String` + - Mail `String` + - MailNickname `String` + - PasswordProfile `IPasswordProfile ` + - Surname `String` + - UsageLocation `String` + - UserPrincipalName `String` + - UserType `UserType?` **{Guest, Member}** + +### UserGetMemberGroupsParameters \ [Api16] + - SecurityEnabledOnly `Boolean` + +### UserGetMemberGroupsResult [Api16] + - Value `String[]` + +### UserListResult [Api16] + - OdataNextLink `String` + - Value `IUser[]` + +### UserUpdateParameters \ [Api16] + - AccountEnabled `Boolean?` + - DisplayName `String` + - GivenName `String` + - ImmutableId `String` + - MailNickname `String` + - PasswordProfile `IPasswordProfile ` + - Surname `String` + - UsageLocation `String` + - UserPrincipalName `String` + - UserType `UserType?` **{Guest, Member}** + diff --git a/tests-upgrade/mysql/tools/Resources/resources/readme.md b/tests-upgrade/mysql/tools/Resources/resources/readme.md new file mode 100644 index 00000000000..937f07f8fec --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/resources/readme.md @@ -0,0 +1,11 @@ +# Resources +This directory can contain any additional resources for module that are not required at runtime. This directory **does not** get packaged with the module. If you have assets for custom implementation, place them into the `..\custom` folder. + +## Info +- Modifiable: yes +- Generated: no +- Committed: yes +- Packaged: no + +## Purpose +Use this folder to put anything you want to keep around as part of the repository for the module, but is not something that is required for the module. For example, development files, packaged builds, or additional information. This is only intended to be used in repositories where the module's output directory is cleaned, but tangential resources for the module want to remain intact. \ No newline at end of file diff --git a/tests-upgrade/mysql/tools/Resources/test/readme.md b/tests-upgrade/mysql/tools/Resources/test/readme.md new file mode 100644 index 00000000000..7c752b4c8c4 --- /dev/null +++ b/tests-upgrade/mysql/tools/Resources/test/readme.md @@ -0,0 +1,17 @@ +# Test +This directory contains the [Pester](https://www.powershellgallery.com/packages/Pester) tests to run for the module. We use Pester as it is the unofficial standard for PowerShell unit testing. Test stubs for custom cmdlets (created in `..\custom`) will be generated into this folder when `build-module.ps1` is ran. These test stubs will fail automatically, to indicate that tests should be written for custom cmdlets. + +## Info +- Modifiable: yes +- Generated: partial +- Committed: yes +- Packaged: no + +## Details +We allow three testing modes: *live*, *record*, and *playback*. These can be selected using the `-Live`, `-Record`, and `-Playback` switches respectively on the `test-module.ps1` script. This script will run through any `.Tests.ps1` scripts in the `test` folder. If you choose the *record* mode, it will create a `.Recording.json` file of the REST calls between the client and server. Then, when you choose *playback* mode, it will use the `.Recording.json` file to mock the communication between server and client. The *live* mode runs the same as the *record* mode; however, it doesn't create the `.Recording.json` file. + +## Purpose +Custom cmdlets generally encompass additional functionality not described in the REST specification, or combines functionality generated from the REST spec. To validate this functionality continues to operate as intended, creating tests that can be ran and re-ran against custom cmdlets is part of the framework. + +## Usage +To execute tests, run the `test-module.ps1`. To write tests, [this example](https://github.com/pester/Pester/blob/8b9cf4248315e44f1ac6673be149f7e0d7f10466/Examples/Planets/Get-Planet.Tests.ps1#L1) from the Pester repository is very useful for getting started. \ No newline at end of file diff --git a/tests-upgrade/readme.azure.noprofile.md b/tests-upgrade/readme.azure.noprofile.md new file mode 100644 index 00000000000..82284693a17 --- /dev/null +++ b/tests-upgrade/readme.azure.noprofile.md @@ -0,0 +1,47 @@ +# Azure PowerShell AutoRest Configuration + +> Values +``` yaml +azure: true +powershell: true +license-header: MICROSOFT_MIT_NO_VERSION +branch: master +repo: https://github.com/Azure/azure-rest-api-specs/blob/$(branch) +metadata: + authors: Microsoft Corporation + owners: Microsoft Corporation + description: 'Microsoft Azure PowerShell: $(service-name) cmdlets' + copyright: Microsoft Corporation. All rights reserved. + tags: Azure ResourceManager ARM PSModule $(service-name) + companyName: Microsoft Corporation + requireLicenseAcceptance: true + licenseUri: https://aka.ms/azps-license + projectUri: https://github.com/Azure/azure-powershell +``` + +> Names +``` yaml +prefix: Az +subject-prefix: $(service-name) +module-name: $(prefix).$(service-name) +namespace: Microsoft.Azure.PowerShell.Cmdlets.$(service-name) +``` + +> Folders +``` yaml +clear-output-folder: true +output-folder: . +``` + +> Directives +``` yaml +directive: + - where: + subject: Operation + hide: true + - where: + parameter-name: SubscriptionId + set: + default: + script: '(Get-AzContext).Subscription.Id' +``` \ No newline at end of file diff --git a/tests-upgrade/sample-swagger.json b/tests-upgrade/sample-swagger.json new file mode 100644 index 00000000000..8db4896d3dd --- /dev/null +++ b/tests-upgrade/sample-swagger.json @@ -0,0 +1,1266 @@ +{ + "swagger": "2.0", + "info": { + "title": "DatabricksClient", + "version": "2018-04-01", + "description": "ARM Databricks" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Get", + "description": "Gets the workspace.", + "x-ms-examples": { + "Get a workspace": { + "$ref": "./examples/WorkspaceGet.json" + }, + "Get a workspace with custom parameters": { + "$ref": "./examples/WorkspaceGetParameters.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns the workspace.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Delete", + "description": "Deletes the workspace.", + "x-ms-examples": { + "Delete a workspace": { + "$ref": "./examples/WorkspaceDelete.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "204": { + "description": "NoContent" + }, + "202": { + "description": "Accepted - Returns this status until the asynchronous operation has completed." + }, + "200": { + "description": "OK. The request has succeeded." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "put": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_CreateOrUpdate", + "description": "Creates a new workspace.", + "x-ms-examples": { + "Create or update workspace": { + "$ref": "./examples/WorkspaceCreate.json" + }, + "Create or update workspace with custom parameters": { + "$ref": "./examples/WorkspaceCreateWithParameters.json" + }, + "Create a workspace which is ready for Customer-Managed Key (CMK) encryption": { + "$ref": "./examples/PrepareEncryption.json" + }, + "Enable Customer-Managed Key (CMK) encryption on a workspace which is prepared for encryption": { + "$ref": "./examples/EnableEncryption.json" + }, + "Revert Customer-Managed Key (CMK) encryption to Microsoft Managed Keys encryption on a workspace": { + "$ref": "./examples/DisableEncryption.json" + } + }, + "parameters": [ + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/Workspace" + }, + "description": "Parameters supplied to the create or update a workspace." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Created - Returns information about the workspace, including provisioning status.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "201": { + "description": "Created - Returns information about the workspace, including provisioning status.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "patch": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_Update", + "description": "Updates a workspace.", + "parameters": [ + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/WorkspaceUpdate" + }, + "description": "The update to the workspace." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "Successfully updated the workspace.", + "schema": { + "$ref": "#/definitions/Workspace" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + }, + "202": { + "description": "Accepted" + } + }, + "x-ms-long-running-operation": true, + "x-ms-examples": { + "Update a workspace's tags.": { + "$ref": "./examples/WorkspaceUpdate.json" + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListByResourceGroup", + "description": "Gets all the workspaces within a resource group.", + "x-ms-examples": { + "Lists workspaces": { + "$ref": "./examples/WorkspacesListByResourceGroup.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns an array of workspaces.", + "schema": { + "$ref": "#/definitions/WorkspaceListResult" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces": { + "get": { + "tags": [ + "Workspaces" + ], + "operationId": "Workspaces_ListBySubscription", + "description": "Gets all the workspaces within a subscription.", + "x-ms-examples": { + "Lists workspaces": { + "$ref": "./examples/WorkspacesListBySubscription.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "OK - Returns an array of workspaces.", + "schema": { + "$ref": "#/definitions/WorkspaceListResult" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}": { + "get": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_Get", + "description": "Gets the workspace vNet Peering.", + "x-ms-examples": { + "Get a workspace with vNet Peering Configured": { + "$ref": "./examples/WorkspaceVirtualNetPeeringGet.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "200": { + "description": "OK. The request has succeeded.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "204": { + "description": "OK - NoContent" + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + } + }, + "delete": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_Delete", + "description": "Deletes the workspace vNetPeering.", + "x-ms-examples": { + "Delete a workspace vNet Peering": { + "$ref": "./examples/WorkspaceVirtualNetworkPeeringDelete.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "202": { + "description": "Accepted - Returns this status until the asynchronous operation has completed." + }, + "204": { + "description": "NoContent" + }, + "200": { + "description": "OK. The request has succeeded." + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + }, + "put": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_CreateOrUpdate", + "description": "Creates vNet Peering for workspace.", + "x-ms-examples": { + "Create vNet Peering for Workspace": { + "$ref": "./examples/WorkspaceVirtualNetworkPeeringCreateOrUpdate.json" + } + }, + "parameters": [ + { + "name": "VirtualNetworkPeeringParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + }, + "description": "Parameters supplied to the create workspace vNet Peering." + }, + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/PeeringNameParameter" + } + ], + "responses": { + "201": { + "description": "Created - Returns information about the workspace vNet peering, including provisioning status.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "200": { + "description": "Update succeeded - Returns information about the workspace vNet peering.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeering" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-long-running-operation": true + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings": { + "get": { + "tags": [ + "vNetPeering" + ], + "operationId": "vNetPeering_ListByWorkspace", + "description": "Lists the workspace vNet Peerings.", + "x-ms-examples": { + "List all vNet Peerings for the workspace": { + "$ref": "./examples/WorkspaceVirtualNetPeeringList.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ResourceGroupName" + }, + { + "$ref": "#/parameters/WorkspaceName" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/SubscriptionIdParameter" + } + ], + "responses": { + "200": { + "description": "List succeeded. Returns the resulting resource objects in response body.", + "schema": { + "$ref": "#/definitions/VirtualNetworkPeeringList" + } + }, + "default": { + "description": "Error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/providers/Microsoft.Databricks/operations": { + "get": { + "tags": [ + "Operations" + ], + "description": "Lists all of the available RP operations.", + "x-ms-examples": { + "Operations": { + "$ref": "./examples/OperationsList.json" + } + }, + "operationId": "Operations_List", + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "OK. The request has succeeded.", + "schema": { + "$ref": "#/definitions/OperationListResult" + } + }, + "default": { + "description": "Resource Provider error response describing why the operation failed.", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + } + }, + "definitions": { + "Workspace": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/WorkspaceProperties", + "description": "The workspace properties." + }, + "sku": { + "$ref": "#/definitions/Sku", + "description": "The SKU of the resource." + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "Information about workspace." + }, + "WorkspaceProperties": { + "properties": { + "managedResourceGroupId": { + "type": "string", + "description": "The managed resource group Id." + }, + "parameters": { + "$ref": "#/definitions/WorkspaceCustomParameters", + "description": "The workspace's custom parameters." + }, + "provisioningState": { + "readOnly": true, + "$ref": "#/definitions/ProvisioningState", + "description": "The workspace provisioning state." + }, + "uiDefinitionUri": { + "type": "string", + "description": "The blob URI where the UI definition file is located." + }, + "authorizations": { + "description": "The workspace provider authorizations.", + "type": "array", + "items": { + "$ref": "#/definitions/WorkspaceProviderAuthorization" + } + }, + "createdBy": { + "description": "Indicates the Object ID, PUID and Application ID of entity that created the workspace.", + "$ref": "#/definitions/CreatedBy" + }, + "updatedBy": { + "description": "Indicates the Object ID, PUID and Application ID of entity that last updated the workspace.", + "$ref": "#/definitions/CreatedBy" + }, + "createdDateTime": { + "description": "Specifies the date and time when the workspace is created.", + "$ref": "#/definitions/CreatedDateTime" + }, + "workspaceId": { + "readOnly": true, + "description": "The unique identifier of the databricks workspace in databricks control plane.", + "type": "string" + }, + "workspaceUrl": { + "readOnly": true, + "description": "The workspace URL which is of the format 'adb-{workspaceId}.{random}.azuredatabricks.net'", + "type": "string" + }, + "storageAccountIdentity": { + "description": "The details of Managed Identity of Storage Account", + "$ref": "#/definitions/ManagedIdentityConfiguration" + } + }, + "required": [ + "managedResourceGroupId" + ], + "description": "The workspace properties." + }, + "TrackedResource": { + "description": "The resource model definition for a ARM tracked top level resource", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-ms-mutability": [ + "read", + "create", + "update" + ], + "description": "Resource tags." + }, + "location": { + "type": "string", + "x-ms-mutability": [ + "read", + "create" + ], + "description": "The geo-location where the resource lives" + } + }, + "required": [ + "location" + ], + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ] + }, + "Resource": { + "description": "The core properties of ARM resources", + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "The name of the resource" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts." + } + }, + "x-ms-azure-resource": true + }, + "Sku": { + "properties": { + "name": { + "type": "string", + "description": "The SKU name." + }, + "tier": { + "type": "string", + "description": "The SKU tier." + } + }, + "required": [ + "name" + ], + "description": "SKU for the resource." + }, + "ProvisioningState": { + "description": "Provisioning status of the workspace.", + "readOnly": true, + "enum": [ + "Accepted", + "Running", + "Ready", + "Creating", + "Created", + "Deleting", + "Deleted", + "Canceled", + "Failed", + "Succeeded", + "Updating" + ], + "type": "string", + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": true + } + }, + "PeeringProvisioningState": { + "type": "string", + "readOnly": true, + "description": "The current provisioning state.", + "enum": [ + "Succeeded", + "Updating", + "Deleting", + "Failed" + ], + "x-ms-enum": { + "name": "PeeringProvisioningState", + "modelAsString": true + } + }, + "ManagedIdentityConfiguration": { + "description": "The Managed Identity details for storage account.", + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The objectId of the Managed Identity that is linked to the Managed Storage account.", + "readOnly": true + }, + "tenantId": { + "type": "string", + "format": "uuid", + "description": "The tenant Id where the Managed Identity is created.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The type of Identity created. It can be either SystemAssigned or UserAssigned.", + "readOnly": true + } + } + }, + "WorkspaceCustomParameterType": { + "description": "Provisioning status of the workspace.", + "readOnly": true, + "enum": [ + "Bool", + "Object", + "String" + ], + "type": "string", + "x-ms-enum": { + "name": "CustomParameterType", + "modelAsString": true + } + }, + "WorkspaceCustomBooleanParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "boolean", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The value which should be used for this field." + }, + "WorkspaceCustomObjectParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "object", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The value which should be used for this field." + }, + "WorkspaceCustomStringParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "type": "string", + "description": "The value which should be used for this field." + } + }, + "required": [ + "value" + ], + "description": "The Value." + }, + "WorkspaceCustomParameters": { + "properties": { + "customVirtualNetworkId": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The ID of a Virtual Network where this Databricks Cluster should be created" + }, + "customPublicSubnetName": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The name of a Public Subnet within the Virtual Network" + }, + "customPrivateSubnetName": { + "$ref": "#/definitions/WorkspaceCustomStringParameter", + "description": "The name of the Private Subnet within the Virtual Network" + }, + "enableNoPublicIp": { + "$ref": "#/definitions/WorkspaceCustomBooleanParameter", + "description": "Should the Public IP be Disabled?" + }, + "prepareEncryption": { + "$ref": "#/definitions/WorkspaceCustomBooleanParameter", + "description": "Prepare the workspace for encryption. Enables the Managed Identity for managed storage account." + }, + "encryption": { + "$ref": "#/definitions/WorkspaceEncryptionParameter", + "description": "Contains the encryption details for Customer-Managed Key (CMK) enabled workspace." + } + }, + "description": "Custom Parameters used for Cluster Creation." + }, + "CreatedDateTime": { + "type": "string", + "format": "date-time", + "description": "The date and time stamp when the workspace was created.", + "readOnly": true + }, + "CreatedBy": { + "properties": { + "oid": { + "type": "string", + "format": "uuid", + "description": "The Object ID that created the workspace.", + "readOnly": true + }, + "puid": { + "type": "string", + "description": "The Personal Object ID corresponding to the object ID above", + "readOnly": true + }, + "applicationId": { + "type": "string", + "format": "uuid", + "description": "The application ID of the application that initiated the creation of the workspace. For example, Azure Portal.", + "readOnly": true + } + }, + "description": "Provides details of the entity that created/updated the workspace." + }, + "WorkspaceEncryptionParameter": { + "properties": { + "type": { + "$ref": "#/definitions/WorkspaceCustomParameterType", + "description": "The type of variable that this is" + }, + "value": { + "$ref": "#/definitions/Encryption", + "description": "The value which should be used for this field." + } + }, + "description": "The object that contains details of encryption used on the workspace." + }, + "Encryption": { + "properties": { + "keySource": { + "type": "string", + "description": "The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault", + "enum": [ + "Default", + "Microsoft.Keyvault" + ], + "x-ms-enum": { + "name": "KeySource", + "modelAsString": true + }, + "default": "Default" + }, + "KeyName": { + "type": "string", + "description": "The name of KeyVault key." + }, + "keyversion": { + "type": "string", + "description": "The version of KeyVault key.", + "x-ms-client-name": "KeyVersion" + }, + "keyvaulturi": { + "type": "string", + "description": "The Uri of KeyVault.", + "x-ms-client-name": "KeyVaultUri" + } + }, + "description": "The object that contains details of encryption used on the workspace." + }, + "WorkspaceProviderAuthorization": { + "properties": { + "principalId": { + "type": "string", + "format": "uuid", + "description": "The provider's principal identifier. This is the identity that the provider will use to call ARM to manage the workspace resources." + }, + "roleDefinitionId": { + "type": "string", + "format": "uuid", + "description": "The provider's role definition identifier. This role will define all the permissions that the provider must have on the workspace's container resource group. This role definition cannot have permission to delete the resource group." + } + }, + "required": [ + "principalId", + "roleDefinitionId" + ], + "description": "The workspace provider authorization." + }, + "WorkspaceUpdate": { + "description": "An update to a workspace.", + "type": "object", + "properties": { + "tags": { + "description": "Resource tags.", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "ErrorDetail": { + "title": "Error details.", + "type": "object", + "properties": { + "code": { + "description": "The error's code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "target": { + "description": "Indicates which property in the request is responsible for the error.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorInfo": { + "title": "The code and message for an error.", + "type": "object", + "properties": { + "code": { + "description": "A machine readable error code.", + "type": "string" + }, + "message": { + "description": "A human readable error message.", + "type": "string" + }, + "details": { + "description": "error details.", + "type": "array", + "items": { + "$ref": "#/definitions/ErrorDetail" + } + }, + "innererror": { + "description": "Inner error details if they exist.", + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + "ErrorResponse": { + "title": "Error response.", + "description": "Contains details when the response code indicates an error.", + "type": "object", + "properties": { + "error": { + "description": "The error details.", + "$ref": "#/definitions/ErrorInfo" + } + }, + "required": [ + "error" + ] + }, + "WorkspaceListResult": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Workspace" + }, + "description": "The array of workspaces." + }, + "nextLink": { + "type": "string", + "description": "The URL to use for getting the next set of results." + } + }, + "description": "List of workspaces." + }, + "Operation": { + "description": "REST API operation", + "type": "object", + "properties": { + "name": { + "description": "Operation name: {provider}/{resource}/{operation}", + "type": "string" + }, + "display": { + "description": "The object that represents the operation.", + "properties": { + "provider": { + "description": "Service provider: Microsoft.ResourceProvider", + "type": "string" + }, + "resource": { + "description": "Resource on which the operation is performed.", + "type": "string" + }, + "operation": { + "description": "Operation type: Read, write, delete, etc.", + "type": "string" + } + } + } + } + }, + "OperationListResult": { + "description": "Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results.", + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/Operation" + }, + "description": "List of Resource Provider operations supported by the Resource Provider resource provider." + }, + "nextLink": { + "type": "string", + "description": "URL to get the next set of operation list results if there are any." + } + } + }, + "VirtualNetworkPeeringList": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/VirtualNetworkPeering" + }, + "description": "List of virtual network peerings on workspace." + }, + "nextLink": { + "type": "string", + "description": "URL to get the next set of virtual network peering list results if there are any." + } + }, + "description": "Gets all virtual network peerings under a workspace." + }, + "VirtualNetworkPeering": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/VirtualNetworkPeeringPropertiesFormat", + "description": "List of properties for vNet Peering" + }, + "name": { + "type": "string", + "description": "Name of the virtual network peering resource", + "readOnly": true + }, + "id": { + "type": "string", + "description": "Resource ID.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "type of the virtual network peering resource", + "readOnly": true + } + }, + "required": [ + "properties" + ], + "description": "Peerings in a VirtualNetwork resource", + "x-ms-azure-resource": true + }, + "VirtualNetworkPeeringPropertiesFormat": { + "properties": { + "allowVirtualNetworkAccess": { + "type": "boolean", + "description": "Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space." + }, + "allowForwardedTraffic": { + "type": "boolean", + "description": "Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network." + }, + "allowGatewayTransit": { + "type": "boolean", + "description": "If gateway links can be used in remote virtual networking to link to this virtual network." + }, + "useRemoteGateways": { + "type": "boolean", + "description": "If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway." + }, + "databricksVirtualNetwork": { + "properties": { + "id": { + "type": "string", + "description": "The Id of the databricks virtual network." + } + }, + "description": " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)." + }, + "databricksAddressSpace": { + "$ref": "#/definitions/AddressSpace", + "description": "The reference to the databricks virtual network address space." + }, + "remoteVirtualNetwork": { + "properties": { + "id": { + "type": "string", + "description": "The Id of the remote virtual network." + } + }, + "description": " The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering)." + }, + "remoteAddressSpace": { + "$ref": "#/definitions/AddressSpace", + "description": "The reference to the remote virtual network address space." + }, + "peeringState": { + "type": "string", + "description": "The status of the virtual network peering.", + "enum": [ + "Initiated", + "Connected", + "Disconnected" + ], + "x-ms-enum": { + "name": "peeringState", + "modelAsString": true + }, + "readOnly": true + }, + "provisioningState": { + "readOnly": true, + "$ref": "#/definitions/PeeringProvisioningState", + "description": "The provisioning state of the virtual network peering resource." + } + }, + "required": [ + "remoteVirtualNetwork" + ], + "description": "Properties of the virtual network peering." + }, + "AddressSpace": { + "properties": { + "addressPrefixes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of address blocks reserved for this virtual network in CIDR notation." + } + }, + "description": "AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network." + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "required": true, + "type": "string", + "description": "The ID of the target subscription." + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "The API version to use for this operation." + }, + "ResourceGroupName": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the resource group. The name is case insensitive.", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "WorkspaceName": { + "name": "workspaceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace.", + "minLength": 3, + "maxLength": 64, + "x-ms-parameter-location": "method" + }, + "PeeringNameParameter": { + "name": "peeringName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the workspace vNet peering.", + "x-ms-parameter-location": "method" + } + } + } \ No newline at end of file diff --git a/tests-upgrade/tsi/readme.md b/tests-upgrade/tsi/readme.md new file mode 100644 index 00000000000..254dcdea73a --- /dev/null +++ b/tests-upgrade/tsi/readme.md @@ -0,0 +1,130 @@ + +# Az.TimeSeriesInsights +This directory contains the PowerShell module for the TimeSeriesInsights service. + +--- +## Status +[![Az.TimeSeriesInsights](https://img.shields.io/powershellgallery/v/Az.TimeSeriesInsights.svg?style=flat-square&label=Az.TimeSeriesInsights "Az.TimeSeriesInsights")](https://www.powershellgallery.com/packages/Az.TimeSeriesInsights/) + +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 1.7.4 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.TimeSeriesInsights`, see [how-to.md](how-to.md). + + +--- +## Generation Requirements +Use of the beta version of `autorest.powershell` generator requires the following: +- [NodeJS LTS](https://nodejs.org) (10.15.x LTS preferred) + - **Note**: It *will not work* with Node < 10.x. Using 11.x builds may cause issues as they may introduce instability or breaking changes. +> If you want an easy way to install and update Node, [NVS - Node Version Switcher](../nodejs/installing-via-nvs.md) or [NVM - Node Version Manager](../nodejs/installing-via-nvm.md) is recommended. +- [AutoRest](https://aka.ms/autorest) v3 beta
`npm install -g autorest@autorest`
  +- PowerShell 6.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g pwsh`
  +- .NET Core SDK 2.0 or greater + - If you don't have it installed, you can use the cross-platform npm package
`npm install -g dotnet-sdk-2.2`
  + +## Run Generation +In this directory, run AutoRest: +> `autorest-beta` + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +``` yaml +branch: powershell +require: + - $(this-folder)/../readme.azure.noprofile.md +input-file: + - ./timeseriesinsights.json + +module-version: 0.0.1 +title: TimeSeriesInsights +subject-prefix: $(service-name) + +directive: + # Fix errors in swagger + - from: swagger-document + where: $ + transform: return $.replace(/Microsoft.IotHub/g, "Microsoft.IoTHub") + - from: swagger-document + where: $ + transform: return $.replace(/\/eventSources\//g, "/eventsources/") + - from: swagger-document + where: $ + transform: return $.replace(/\/accessPolicies\//g, "/accesspolicies/") + # Remove the unneeded parameter set + - where: + variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$|^Update$|^UpdateViaIdentity$ + remove: true + - where: + subject: ReferenceDataSet|AccessPolicy + variant: ^Create$ + remove: true + - where: + subject: EventSource|Environment + variant: ^Create$|^CreateExpanded$ + hide: true + # Remove the set-* cmdlet + - where: + verb: Set + remove: true + # Hide the operation cmdlet + - where: + subject: Operation + hide: true + # correct some names + - where: + parameter-name: SkuCapacity + set: + parameter-name: Capacity + - where: + parameter-name: SkuName + set: + parameter-name: Sku + # Suppress the table format + - where: + model-name: StandardEnvironmentResource + set: + suppress-format: true + - where: + model-name: LongTermEnvironmentResource + set: + suppress-format: true + - where: + model-name: EventHubEventSourceResource + set: + suppress-format: true + - where: + model-name: IoTHubEventSourceResource + set: + suppress-format: true + # Correct some generated code + - from: source-file-csharp + where: $ + transform: $ = $.replace('internal Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IStandardEnvironmentCreationProperties Property', 'public Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IStandardEnvironmentCreationProperties Property'); + - from: source-file-csharp + where: $ + transform: $ = $.replace('internal Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.ILongTermEnvironmentCreationProperties Property', 'public Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.ILongTermEnvironmentCreationProperties Property'); + - from: source-file-csharp + where: $ + transform: $ = $.replace('internal Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IEventHubEventSourceCreationProperties Property', 'public Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IEventHubEventSourceCreationProperties Property'); + - from: source-file-csharp + where: $ + transform: $ = $.replace('internal Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IIoTHubEventSourceCreationProperties Property', 'public Microsoft.Azure.PowerShell.Cmdlets.TimeSeriesInsights.Models.Api20180815Preview.IIoTHubEventSourceCreationProperties Property'); +``` diff --git a/tests-upgrade/tsi/timeseriesinsights.json b/tests-upgrade/tsi/timeseriesinsights.json new file mode 100644 index 00000000000..240dd0cb741 --- /dev/null +++ b/tests-upgrade/tsi/timeseriesinsights.json @@ -0,0 +1,2061 @@ +{ + "swagger": "2.0", + "info": { + "title": "TimeSeriesInsightsClient", + "description": "Time Series Insights client", + "version": "2017-02-28-preview" + }, + "host": "management.azure.com", + "schemes": [ + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/providers/Microsoft.TimeSeriesInsights/operations": { + "get": { + "tags": [ + "Operations" + ], + "operationId": "Operations_List", + "description": "Lists all of the available Time Series Insights related operations.", + "x-ms-examples": { + "List available operations for the Time Series Insights resource provider": { + "$ref": "./examples/Operation_List.json" + } + }, + "parameters": [ + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Successfully listed the available operations.", + "schema": { + "$ref": "#/definitions/OperationListResult" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}": { + "put": { + "tags": [ + "Environments" + ], + "operationId": "Environments_CreateOrUpdate", + "x-ms-examples": { + "EnvironmentsCreate": { + "$ref": "./examples/EnvironmentsCreate.json" + } + }, + "x-ms-long-running-operation": true, + "description": "Create or update an environment in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "name": "environmentName", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "description": "Name of the environment" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/EnvironmentCreateOrUpdateParameters" + }, + "description": "Parameters for creating an environment resource." + } + ], + "responses": { + "200": { + "description": "The existing environment definition was successfully updated.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "201": { + "description": "The environment create request was accepted. Environment provisioning is an asynchronous operation. You can periodically get your environment definition and monitor progress via the provisioningState property.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "404": { + "description": "The subscription or resource group could not be found." + }, + "default": { + "description": "HTTP 400 (Bad Request): The given environment request body is invalid; See the error code and message in the response for details.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "Environments" + ], + "operationId": "Environments_Get", + "x-ms-examples": { + "EnvironmentsGet": { + "$ref": "./examples/EnvironmentsGet.json" + } + }, + "description": "Gets the environment with the specified name in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The environment definition was successfully retrieved and is in the response. If you are polling for the completion of a provisioning or scale operation, you can check its status via the provisioningState property.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "patch": { + "tags": [ + "Environments" + ], + "operationId": "Environments_Update", + "x-ms-examples": { + "EnvironmentsUpdate": { + "$ref": "./examples/EnvironmentsPatchSkuCapacity.json" + } + }, + "x-ms-long-running-operation": true, + "description": "Updates the environment with the specified name in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "name": "environmentUpdateParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/EnvironmentUpdateParameters" + }, + "description": "Request object that contains the updated information for the environment." + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The environment definition was successfully updated and is in the response. If the environment was updated synchronously, the response will include a provisioningState value of \"Succeeded\". If the environment was updated asynchronously, the response will include a provisioningState value of \"Updating\". You can periodically get your environment definition and monitor progress of the update via the provisioningState property.", + "schema": { + "$ref": "#/definitions/EnvironmentResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "Environments" + ], + "operationId": "Environments_Delete", + "x-ms-examples": { + "EnvironmentsDelete": { + "$ref": "./examples/EnvironmentsDelete.json" + } + }, + "description": "Deletes the environment with the specified name in the specified subscription and resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The environment was successfully deleted." + }, + "204": { + "description": "The environment was successfully deleted." + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments": { + "get": { + "tags": [ + "Environments" + ], + "operationId": "Environments_ListByResourceGroup", + "x-ms-examples": { + "EnvironmentsByResourceGroup": { + "$ref": "./examples/EnvironmentsListByResourceGroup.json" + } + }, + "description": "Lists all the available environments associated with the subscription and within the specified resource group.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Environments returned successfully.", + "schema": { + "$ref": "#/definitions/EnvironmentListResponse" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, or resource group could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/providers/Microsoft.TimeSeriesInsights/environments": { + "get": { + "tags": [ + "Environments" + ], + "operationId": "Environments_ListBySubscription", + "x-ms-examples": { + "EnvironmentsBySubscription": { + "$ref": "./examples/EnvironmentsListBySubscription.json" + } + }, + "description": "Lists all the available environments within a subscription, irrespective of the resource groups.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Environments returned successfully.", + "schema": { + "$ref": "#/definitions/EnvironmentListResponse" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/eventSources/{eventSourceName}": { + "put": { + "tags": [ + "EventSources" + ], + "operationId": "EventSources_CreateOrUpdate", + "description": "Create or update an event source under the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "name": "eventSourceName", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "description": "Name of the event source." + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/EventSourceCreateOrUpdateParameters" + }, + "description": "Parameters for creating an event source resource." + } + ], + "responses": { + "200": { + "description": "The existing event source definition was successfully updated.", + "schema": { + "$ref": "#/definitions/EventSourceResource" + } + }, + "201": { + "description": "The event source was successfully created.", + "schema": { + "$ref": "#/definitions/EventSourceResource" + } + }, + "default": { + "description": "HTTP 400 (Bad Request): The given event source request body is invalid; See the error code and message in the response for details.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "EventSources" + ], + "operationId": "EventSources_Get", + "description": "Gets the event source with the specified name in the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/EventSourceNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The event source definition was successfully retrieved and is in the response.", + "schema": { + "$ref": "#/definitions/EventSourceResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or event source could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "patch": { + "tags": [ + "EventSources" + ], + "operationId": "EventSources_Update", + "description": "Updates the event source with the specified name in the specified subscription, resource group, and environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/EventSourceNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "eventSourceUpdateParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/EventSourceUpdateParameters" + }, + "description": "Request object that contains the updated information for the event source." + } + ], + "responses": { + "200": { + "description": "The event source definition was successfully updated and is in the response.", + "schema": { + "$ref": "#/definitions/EventSourceResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or event source could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "EventSources" + ], + "operationId": "EventSources_Delete", + "description": "Deletes the event source with the specified name in the specified subscription, resource group, and environment", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/EventSourceNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The event source was successfully deleted." + }, + "204": { + "description": "The event source was successfully deleted." + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or event source could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/eventSources": { + "get": { + "tags": [ + "EventSources" + ], + "operationId": "EventSources_ListByEnvironment", + "description": "Lists all the available event sources associated with the subscription and within the specified resource group and environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Environments returned successfully.", + "schema": { + "$ref": "#/definitions/EventSourceListResponse" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets/{referenceDataSetName}": { + "put": { + "tags": [ + "ReferenceDataSets" + ], + "operationId": "ReferenceDataSets_CreateOrUpdate", + "x-ms-examples": { + "ReferenceDataSetsCreate": { + "$ref": "./examples/ReferenceDataSetsCreate.json" + } + }, + "description": "Create or update a reference data set in the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "name": "referenceDataSetName", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[A-Za-z0-9]", + "minLength": 3, + "maxLength": 63, + "description": "Name of the reference data set." + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ReferenceDataSetCreateOrUpdateParameters" + }, + "description": "Parameters for creating a reference data set." + } + ], + "responses": { + "200": { + "description": "The existing reference data set definition was successfully updated.", + "schema": { + "$ref": "#/definitions/ReferenceDataSetResource" + } + }, + "201": { + "description": "The reference data set was successfully created.", + "schema": { + "$ref": "#/definitions/ReferenceDataSetResource" + } + }, + "default": { + "description": "HTTP 400 (Bad Request): The given reference data set request body is invalid; See the error code and message in the response for details.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "ReferenceDataSets" + ], + "operationId": "ReferenceDataSets_Get", + "x-ms-examples": { + "ReferenceDataSetsGet": { + "$ref": "./examples/ReferenceDataSetsGet.json" + } + }, + "description": "Gets the reference data set with the specified name in the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ReferenceDataSetNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The reference data set definition was successfully retrieved and is in the response.", + "schema": { + "$ref": "#/definitions/ReferenceDataSetResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or reference data set could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "patch": { + "tags": [ + "ReferenceDataSets" + ], + "operationId": "ReferenceDataSets_Update", + "x-ms-examples": { + "ReferenceDataSetsUpdate": { + "$ref": "./examples/ReferenceDataSetsPatchTags.json" + } + }, + "description": "Updates the reference data set with the specified name in the specified subscription, resource group, and environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ReferenceDataSetNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "referenceDataSetUpdateParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ReferenceDataSetUpdateParameters" + }, + "description": "Request object that contains the updated information for the reference data set." + } + ], + "responses": { + "200": { + "description": "The reference data set definition was successfully updated and is in the response.", + "schema": { + "$ref": "#/definitions/ReferenceDataSetResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or reference data set could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "ReferenceDataSets" + ], + "operationId": "ReferenceDataSets_Delete", + "x-ms-examples": { + "ReferenceDataSetsDelete": { + "$ref": "./examples/ReferenceDataSetsDelete.json" + } + }, + "description": "Deletes the reference data set with the specified name in the specified subscription, resource group, and environment", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ReferenceDataSetNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The reference data set was successfully deleted." + }, + "204": { + "description": "The reference data set was successfully deleted." + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or reference data set could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets": { + "get": { + "tags": [ + "ReferenceDataSets" + ], + "operationId": "ReferenceDataSets_ListByEnvironment", + "x-ms-examples": { + "ReferenceDataSetsListByEnvironment": { + "$ref": "./examples/ReferenceDataSetsListByEnvironment.json" + } + }, + "description": "Lists all the available reference data sets associated with the subscription and within the specified resource group and environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "Reference data sets returned successfully.", + "schema": { + "$ref": "#/definitions/ReferenceDataSetListResponse" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies/{accessPolicyName}": { + "put": { + "tags": [ + "AccessPolicies" + ], + "operationId": "AccessPolicies_CreateOrUpdate", + "x-ms-examples": { + "AccessPoliciesCreate": { + "$ref": "./examples/AccessPoliciesCreate.json" + } + }, + "description": "Create or update an access policy in the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "name": "accessPolicyName", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "description": "Name of the access policy." + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "parameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AccessPolicyCreateOrUpdateParameters" + }, + "description": "Parameters for creating an access policy." + } + ], + "responses": { + "200": { + "description": "The existing access policy definition was successfully updated.", + "schema": { + "$ref": "#/definitions/AccessPolicyResource" + } + }, + "201": { + "description": "The access policy was successfully created.", + "schema": { + "$ref": "#/definitions/AccessPolicyResource" + } + }, + "default": { + "description": "HTTP 400 (Bad Request): The given access policy request body is invalid; See the error code and message in the response for details.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "AccessPolicies" + ], + "operationId": "AccessPolicies_Get", + "x-ms-examples": { + "AccessPoliciesGet": { + "$ref": "./examples/AccessPoliciesGet.json" + } + }, + "description": "Gets the access policy with the specified name in the specified environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/AccessPolicyNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The access policy definition was successfully retrieved and is in the response.", + "schema": { + "$ref": "#/definitions/AccessPolicyResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or access policy could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "patch": { + "tags": [ + "AccessPolicies" + ], + "operationId": "AccessPolicies_Update", + "x-ms-examples": { + "AccessPoliciesUpdate": { + "$ref": "./examples/AccessPoliciesPatchRoles.json" + } + }, + "description": "Updates the access policy with the specified name in the specified subscription, resource group, and environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/AccessPolicyNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + }, + { + "name": "accessPolicyUpdateParameters", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/AccessPolicyUpdateParameters" + }, + "description": "Request object that contains the updated information for the access policy." + } + ], + "responses": { + "200": { + "description": "The access policy definition was successfully updated and is in the response.", + "schema": { + "$ref": "#/definitions/AccessPolicyResource" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or access policy could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "AccessPolicies" + ], + "operationId": "AccessPolicies_Delete", + "x-ms-examples": { + "AccessPoliciesDelete": { + "$ref": "./examples/AccessPoliciesDelete.json" + } + }, + "description": "Deletes the access policy with the specified name in the specified subscription, resource group, and environment", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/AccessPolicyNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "The access policy was successfully deleted." + }, + "204": { + "description": "The access policy was successfully deleted." + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, environment, or access policy could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/accessPolicies": { + "get": { + "tags": [ + "AccessPolicies" + ], + "operationId": "AccessPolicies_ListByEnvironment", + "x-ms-examples": { + "AccessPoliciesByEnvironment": { + "$ref": "./examples/AccessPoliciesListByEnvironment.json" + } + }, + "description": "Lists all the available access policies associated with the environment.", + "parameters": [ + { + "$ref": "#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/EnvironmentNameParameter" + }, + { + "$ref": "#/parameters/ApiVersionParameter" + } + ], + "responses": { + "200": { + "description": "access policies returned successfully.", + "schema": { + "$ref": "#/definitions/AccessPolicyListResponse" + } + }, + "default": { + "description": "HTTP 404 (Not Found): The subscription, resource group, or environment could not be found.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + } + }, + "definitions": { + "OperationListResult": { + "description": "Result of the request to list Time Series Insights operations. It contains a list of operations and a URL link to get the next set of results.", + "properties": { + "value": { + "description": "List of Time Series Insights operations supported by the Microsoft.TimeSeriesInsights resource provider.", + "type": "array", + "readOnly": true, + "items": { + "$ref": "#/definitions/Operation" + } + }, + "nextLink": { + "description": "URL to get the next set of operation list results if there are any.", + "type": "string", + "readOnly": true + } + } + }, + "Operation": { + "description": "A Time Series Insights REST API operation", + "type": "object", + "properties": { + "name": { + "description": "The name of the operation being performed on this particular object.", + "type": "string", + "readOnly": true + }, + "display": { + "description": "Contains the localized display information for this particular operation / action.", + "readOnly": true, + "properties": { + "provider": { + "description": "The localized friendly form of the resource provider name.", + "type": "string", + "readOnly": true + }, + "resource": { + "description": "The localized friendly form of the resource type related to this action/operation.", + "type": "string", + "readOnly": true + }, + "operation": { + "description": "The localized friendly name for the operation.", + "type": "string", + "readOnly": true + }, + "description": { + "description": "The localized friendly description for the operation.", + "type": "string", + "readOnly": true + } + } + } + } + }, + "Resource": { + "properties": { + "id": { + "readOnly": true, + "type": "string", + "description": "Resource Id" + }, + "name": { + "readOnly": true, + "type": "string", + "description": "Resource name" + }, + "type": { + "readOnly": true, + "type": "string", + "description": "Resource type" + } + }, + "description": "Time Series Insights resource", + "x-ms-azure-resource": true + }, + "TrackedResource": { + "properties": { + "location": { + "type": "string", + "description": "Resource location" + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Resource tags" + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "required": [ + "location" + ], + "description": "Time Series Insights resource that is tracked by Azure Resource Manager." + }, + "ResourceProperties": { + "properties": { + "provisioningState": { + "readOnly": true, + "type": "string", + "description": "Provisioning state of the resource.", + "enum": [ + "Accepted", + "Creating", + "Updating", + "Succeeded", + "Failed", + "Deleting" + ], + "x-ms-enum": { + "name": "ProvisioningState", + "modelAsString": false + } + }, + "creationTime": { + "readOnly": true, + "type": "string", + "format": "date-time", + "description": "The time the resource was created." + } + }, + "description": "Properties that are common to all tracked resources." + }, + "Sku": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of this SKU.", + "enum": [ + "S1", + "S2" + ], + "x-ms-enum": { + "name": "SkuName", + "modelAsString": false + } + }, + "capacity": { + "format": "int32", + "type": "integer", + "description": "The capacity of the sku. This value can be changed to support scale out of environments after they have been created.", + "minimum": 1, + "maximum": 10 + } + }, + "required": [ + "name", + "capacity" + ], + "description": "The sku determines the capacity of the environment, the SLA (in queries-per-minute and total capacity), and the billing rate." + }, + "CreateOrUpdateTrackedResourceProperties": { + "properties": { + "location": { + "type": "string", + "description": "The location of the resource." + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Key-value pairs of additional properties for the resource." + } + }, + "required": [ + "location" + ], + "description": "Properties required to create any resource tracked by Azure Resource Manager." + }, + "EnvironmentCreateOrUpdateParameters": { + "properties": { + "sku": { + "$ref": "#/definitions/Sku" + }, + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EnvironmentCreationProperties" + } + }, + "required": [ + "sku", + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ], + "description": "Parameters supplied to the CreateOrUpdate Environment operation." + }, + "EnvironmentUpdateParameters": { + "type": "object", + "properties": { + "sku": { + "$ref": "#/definitions/Sku", + "description": "The sku of the environment." + }, + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Key-value pairs of additional properties for the environment." + }, + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EnvironmentMutableProperties", + "description": "Properties of the environment." + } + }, + "description": "Parameters supplied to the Update Environment operation." + }, + "EnvironmentListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/EnvironmentResource" + }, + "description": "Result of the List Environments operation." + } + }, + "description": "The response of the List Environments operation." + }, + "EnvironmentResource": { + "properties": { + "sku": { + "$ref": "#/definitions/Sku" + }, + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EnvironmentResourceProperties" + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "An environment is a set of time-series data available for query, and is the top level Azure Time Series Insights resource." + }, + "EnvironmentCreationProperties": { + "properties": { + "dataRetentionTime": { + "type": "string", + "format": "duration", + "description": "ISO8601 timespan specifying the minimum number of days the environment's events will be available for query." + }, + "storageLimitExceededBehavior": { + "type": "string", + "description": "The behavior the Time Series Insights service should take when the environment's capacity has been exceeded. If \"PauseIngress\" is specified, new events will not be read from the event source. If \"PurgeOldData\" is specified, new events will continue to be read and old events will be deleted from the environment. The default behavior is PurgeOldData.", + "enum": [ + "PurgeOldData", + "PauseIngress" + ], + "x-ms-enum": { + "name": "StorageLimitExceededBehavior", + "modelAsString": false + } + } + }, + "required": [ + "dataRetentionTime" + ], + "description": "Properties used to create an environment." + }, + "EnvironmentResourceProperties": { + "properties": { + "dataAccessId": { + "readOnly": true, + "type": "string", + "format": "uuid", + "description": "An id used to access the environment data, e.g. to query the environment's events or upload reference data for the environment." + }, + "dataAccessFqdn": { + "readOnly": true, + "type": "string", + "description": "The fully qualified domain name used to access the environment data, e.g. to query the environment's events or upload reference data for the environment." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EnvironmentCreationProperties" + }, + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "required": [ + "dataRetentionTime" + ], + "description": "Properties of the environment." + }, + "EnvironmentMutableProperties": { + "description": "An object that represents a set of mutable environment resource properties.", + "type": "object", + "properties": { + "dataRetentionTime": { + "type": "string", + "format": "duration", + "description": "ISO8601 timespan specifying the minimum number of days the environment's events will be available for query." + } + } + }, + "EventSourceCreateOrUpdateParameters": { + "discriminator": "kind", + "properties": { + "kind": { + "type": "string", + "description": "The kind of the event source.", + "enum": [ + "Microsoft.EventHub", + "Microsoft.IoTHub" + ], + "x-ms-enum": { + "name": "Kind", + "modelAsString": false + } + } + }, + "required": [ + "kind" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation." + }, + "EventHubEventSourceCreateOrUpdateParameters": { + "x-ms-discriminator-value": "Microsoft.EventHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation for an EventHub event source." + }, + "IoTHubEventSourceCreateOrUpdateParameters": { + "x-ms-discriminator-value": "Microsoft.IoTHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceCreateOrUpdateParameters" + } + ], + "description": "Parameters supplied to the Create or Update Event Source operation for an IoTHub event source." + }, + "EventSourceUpdateParameters": { + "type": "object", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Key-value pairs of additional properties for the event source." + } + }, + "description": "Parameters supplied to the Update Event Source operation." + }, + "EventHubEventSourceUpdateParameters": { + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceMutableProperties", + "description": "Properties of the EventHub event source." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Event Source operation to update an EventHub event source." + }, + "IoTHubEventSourceUpdateParameters": { + "type": "object", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceMutableProperties", + "description": "Properties of the IoTHub event source." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceUpdateParameters" + } + ], + "description": "Parameters supplied to the Update Event Source operation to update an IoTHub event source." + }, + "EventSourceListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/EventSourceResource" + }, + "description": "Result of the List EventSources operation." + } + }, + "description": "The response of the List EventSources operation." + }, + "EventSourceResource": { + "type": "object", + "discriminator": "kind", + "properties": { + "kind": { + "type": "string", + "description": "The kind of the event source.", + "enum": [ + "Microsoft.EventHub", + "Microsoft.IoTHub" + ] + } + }, + "required": [ + "kind" + ], + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "An environment receives data from one or more event sources. Each event source has associated connection info that allows the Time Series Insights ingress pipeline to connect to and pull data from the event source" + }, + "EventHubEventSourceResource": { + "x-ms-discriminator-value": "Microsoft.EventHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/EventHubEventSourceResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceResource" + } + ], + "description": "An event source that receives its data from an Azure EventHub." + }, + "IoTHubEventSourceResource": { + "x-ms-discriminator-value": "Microsoft.IotHub", + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/IoTHubEventSourceResourceProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/EventSourceResource" + } + ], + "description": "An event source that receives its data from an Azure IoTHub." + }, + "EventSourceCommonProperties": { + "properties": { + "timestampPropertyName": { + "type": "string", + "description": "The event property that will be used as the event source's timestamp. If a value isn't specified for timestampPropertyName, or if null or empty-string is specified, the event creation time will be used." + } + }, + "allOf": [ + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "description": "Properties of the event source." + }, + "AzureEventSourceProperties": { + "properties": { + "eventSourceResourceId": { + "type": "string", + "description": "The resource id of the event source in Azure Resource Manager." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceCommonProperties" + } + ], + "required": [ + "eventSourceResourceId" + ], + "description": "Properties of an event source that reads events from an event broker in Azure." + }, + "EventHubEventSourceCommonProperties": { + "properties": { + "serviceBusNamespace": { + "type": "string", + "description": "The name of the service bus that contains the event hub." + }, + "eventHubName": { + "type": "string", + "description": "The name of the event hub." + }, + "consumerGroupName": { + "type": "string", + "description": "The name of the event hub's consumer group that holds the partitions from which events will be read." + }, + "keyName": { + "type": "string", + "description": "The name of the SAS key that grants the Time Series Insights service access to the event hub. The shared access policies for this key must grant 'Listen' permissions to the event hub." + } + }, + "allOf": [ + { + "$ref": "#/definitions/AzureEventSourceProperties" + } + ], + "required": [ + "serviceBusNamespace", + "eventHubName", + "consumerGroupName", + "keyName" + ], + "description": "Properties of the EventHub event source." + }, + "EventHubEventSourceCreationProperties": { + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the event hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventHubEventSourceCommonProperties" + } + ], + "required": [ + "sharedAccessKey" + ], + "description": "Properties of the EventHub event source that are required on create or update requests." + }, + "EventHubEventSourceResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/EventHubEventSourceCommonProperties" + } + ], + "description": "Properties of the EventHub event source resource." + }, + "IoTHubEventSourceCommonProperties": { + "properties": { + "iotHubName": { + "type": "string", + "description": "The name of the iot hub." + }, + "consumerGroupName": { + "type": "string", + "description": "The name of the iot hub's consumer group that holds the partitions from which events will be read." + }, + "keyName": { + "type": "string", + "description": "The name of the Shared Access Policy key that grants the Time Series Insights service access to the iot hub. This shared access policy key must grant 'service connect' permissions to the iot hub." + } + }, + "allOf": [ + { + "$ref": "#/definitions/AzureEventSourceProperties" + } + ], + "required": [ + "iotHubName", + "consumerGroupName", + "keyName" + ], + "description": "Properties of the IoTHub event source." + }, + "IoTHubEventSourceCreationProperties": { + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the Shared Access Policy key that grants the Time Series Insights service read access to the iot hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/IoTHubEventSourceCommonProperties" + } + ], + "required": [ + "sharedAccessKey" + ], + "description": "Properties of the IoTHub event source that are required on create or update requests." + }, + "IoTHubEventSourceResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/IoTHubEventSourceCommonProperties" + } + ], + "description": "Properties of the IoTHub event source resource." + }, + "LocalTimestamp": { + "description": "An object that represents the local timestamp property. It contains the format of local timestamp that needs to be used and the corresponding timezone offset information. If a value isn't specified for localTimestamp, or if null, then the local timestamp will not be ingressed with the events.", + "type": "object", + "properties": { + "format": { + "description": "An enum that represents the format of the local timestamp property that needs to be set.", + "type": "string", + "enum": [ + "Embedded", + "Iana", + "TimeSpan" + ], + "x-ms-enum": { + "name": "LocalTimestampFormat", + "modelAsString": false + } + }, + "timeZoneOffset": { + "description": "An object that represents the offset information for the local timestamp format specified. Should not be specified for LocalTimestampFormat - Embedded.", + "type": "object", + "properties": { + "propertyName": { + "type": "string", + "description": "The event property that will be contain the offset information to calculate the local timestamp. When the LocalTimestampFormat is Iana, the property name will contain the name of the column which contains IANA Timezone Name (eg: Americas/Los Angeles). When LocalTimestampFormat is Timespan, it contains the name of property which contains values representing the offset (eg: P1D or 1.00:00:00)" + } + } + } + } + }, + "EventSourceMutableProperties": { + "description": "An object that represents a set of mutable event source resource properties.", + "type": "object", + "properties": { + "timestampPropertyName": { + "type": "string", + "description": "The event property that will be used as the event source's timestamp. If a value isn't specified for timestampPropertyName, or if null or empty-string is specified, the event creation time will be used." + }, + "localTimestamp": { + "$ref": "#/definitions/LocalTimestamp" + } + } + }, + "EventHubEventSourceMutableProperties": { + "description": "An object that represents a set of mutable EventHub event source resource properties.", + "type": "object", + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the event hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceMutableProperties" + } + ] + }, + "IoTHubEventSourceMutableProperties": { + "description": "An object that represents a set of mutable IoTHub event source resource properties.", + "type": "object", + "properties": { + "sharedAccessKey": { + "type": "string", + "description": "The value of the shared access key that grants the Time Series Insights service read access to the iot hub. This property is not shown in event source responses." + } + }, + "allOf": [ + { + "$ref": "#/definitions/EventSourceMutableProperties" + } + ] + }, + "ReferenceDataSetCreateOrUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/ReferenceDataSetCreationProperties" + } + }, + "required": [ + "properties" + ], + "allOf": [ + { + "$ref": "#/definitions/CreateOrUpdateTrackedResourceProperties" + } + ] + }, + "ReferenceDataSetUpdateParameters": { + "type": "object", + "properties": { + "tags": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Key-value pairs of additional properties for the reference data set." + } + }, + "description": "Parameters supplied to the Update Reference Data Set operation." + }, + "ReferenceDataSetListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/ReferenceDataSetResource" + }, + "description": "Result of the List Reference Data Sets operation." + } + }, + "description": "The response of the List Reference Data Sets operation." + }, + "ReferenceDataSetResource": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/ReferenceDataSetResourceProperties" + } + }, + "allOf": [ + { + "$ref": "#/definitions/TrackedResource" + } + ], + "description": "A reference data set provides metadata about the events in an environment. Metadata in the reference data set will be joined with events as they are read from event sources. The metadata that makes up the reference data set is uploaded or modified through the Time Series Insights data plane APIs." + }, + "ReferenceDataSetCreationProperties": { + "properties": { + "keyProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/ReferenceDataSetKeyProperty" + }, + "description": "The list of key properties for the reference data set." + } + }, + "required": [ + "keyProperties" + ], + "description": "Properties used to create a reference data set." + }, + "ReferenceDataSetResourceProperties": { + "allOf": [ + { + "$ref": "#/definitions/ReferenceDataSetCreationProperties" + }, + { + "$ref": "#/definitions/ResourceProperties" + } + ], + "required": [ + "keyProperties" + ], + "description": "Properties of the reference data set." + }, + "ReferenceDataSetKeyProperty": { + "properties": { + "name": { + "type": "string", + "description": "The name of the key property." + }, + "type": { + "type": "string", + "description": "The type of the key property.", + "enum": [ + "String", + "Double", + "Bool", + "DateTime" + ], + "x-ms-enum": { + "name": "ReferenceDataKeyPropertyType", + "modelAsString": false + } + } + }, + "description": "A key property for the reference data set. A reference data set can have multiple key properties." + }, + "AccessPolicyCreateOrUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyResourceProperties" + } + }, + "required": [ + "properties" + ] + }, + "AccessPolicyUpdateParameters": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyMutableProperties" + } + }, + "required": [ + "properties" + ] + }, + "AccessPolicyListResponse": { + "properties": { + "value": { + "type": "array", + "items": { + "$ref": "#/definitions/AccessPolicyResource" + }, + "description": "Result of the List access policies operation." + } + }, + "description": "The response of the List access policies operation." + }, + "AccessPolicyResource": { + "properties": { + "properties": { + "x-ms-client-flatten": true, + "$ref": "#/definitions/AccessPolicyResourceProperties" + } + }, + "allOf": [ + { + "$ref": "#/definitions/Resource" + } + ], + "description": "An access policy is used to grant users and applications access to the environment. Roles are assigned to service principals in Azure Active Directory. These roles define the actions the principal can perform through the Time Series Insights data plane APIs." + }, + "AccessPolicyResourceProperties": { + "properties": { + "principalObjectId": { + "type": "string", + "description": "The objectId of the principal in Azure Active Directory." + }, + "description": { + "type": "string", + "description": "An description of the access policy." + }, + "roles": { + "type": "array", + "items": { + "type": "string", + "description": "A role defining the data plane operations that a principal can perform on a Time Series Insights client.", + "enum": [ + "Reader", + "Contributor" + ], + "x-ms-enum": { + "name": "AccessPolicyRole", + "modelAsString": false + } + }, + "description": "The list of roles the principal is assigned on the environment." + } + } + }, + "AccessPolicyMutableProperties": { + "description": "An object that represents a set of mutable access policy resource properties.", + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "An description of the access policy." + }, + "roles": { + "type": "array", + "items": { + "type": "string", + "description": "A role defining the data plane operations that a principal can perform on a Time Series Insights client.", + "enum": [ + "Reader", + "Contributor" + ], + "x-ms-enum": { + "name": "AccessPolicyRole", + "modelAsString": false + } + }, + "description": "The list of roles the principal is assigned on the environment." + } + } + }, + "CloudError": { + "type": "object", + "properties": { + "error": { + "$ref": "#/definitions/CloudErrorBody" + } + }, + "description": "Contains information about an API error.", + "x-ms-external": true + }, + "CloudErrorBody": { + "type": "object", + "description": "Describes a particular API error with an error code and a message.", + "properties": { + "code": { + "type": "string", + "description": "An error code that describes the error condition more precisely than an HTTP status code. Can be used to programmatically handle specific error cases." + }, + "message": { + "type": "string", + "description": "A message that describes the error in detail and provides debugging information." + }, + "target": { + "type": "string", + "description": "The target of the particular error (for example, the name of the property in error)." + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/CloudErrorBody" + }, + "description": "Contains nested errors that are related to this error." + } + }, + "x-ms-external": true + } + }, + "parameters": { + "SubscriptionIdParameter": { + "name": "subscriptionId", + "in": "path", + "description": "Azure Subscription ID.", + "required": true, + "type": "string" + }, + "ApiVersionParameter": { + "name": "api-version", + "in": "query", + "required": true, + "type": "string", + "description": "Version of the API to be used with the client request. Current version is 2017-02-28-preview." + }, + "ResourceGroupNameParameter": { + "name": "resourceGroupName", + "in": "path", + "required": true, + "type": "string", + "x-ms-parameter-location": "method", + "description": "Name of an Azure Resource group." + }, + "EnvironmentNameParameter": { + "name": "environmentName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights environment associated with the specified resource group.", + "x-ms-parameter-location": "method" + }, + "EventSourceNameParameter": { + "name": "eventSourceName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights event source associated with the specified environment.", + "x-ms-parameter-location": "method" + }, + "ReferenceDataSetNameParameter": { + "name": "referenceDataSetName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights reference data set associated with the specified environment.", + "x-ms-parameter-location": "method" + }, + "AccessPolicyNameParameter": { + "name": "accessPolicyName", + "in": "path", + "required": true, + "type": "string", + "description": "The name of the Time Series Insights access policy associated with the specified environment.", + "x-ms-parameter-location": "method" + } + } +}