diff --git a/.chronus/config.yaml b/.chronus/config.yaml index f707a38dbd..1aa7cecd08 100644 --- a/.chronus/config.yaml +++ b/.chronus/config.yaml @@ -57,6 +57,7 @@ changelog: ["@chronus/github/changelog", { repo: "microsoft/typespec" }] ignore: - "@typespec/http-client-csharp" - "@typespec/http-client-java" + - "@typespec/http-client-python" changedFiles: - "!**/*.md" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 0164d5223a..84ad4f50cf 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,6 +11,11 @@ ###################### /packages/http-client-java/ @srnagar @weidongxu-microsoft @haolingdong-msft @XiaofeiCao +###################### +# Python +###################### +/packages/http-client-python/ @iscai-msft @tadelesh @msyyc + ###################### # Emiter Shared ###################### diff --git a/.gitignore b/.gitignore index f0996e49b5..37082eeea8 100644 --- a/.gitignore +++ b/.gitignore @@ -210,3 +210,9 @@ BenchmarkDotnet.Artifacts/ # java emitter !packages/http-client-java/package-lock.json + +# python emitter +packages/http-client-python/test/**/generated/ +packages/http-client-python/test/**/cadl-ranch-coverage.json +!packages/http-client-python/package-lock.json + diff --git a/cspell.yaml b/cspell.yaml index cd704bbfd4..18eda75c5c 100644 --- a/cspell.yaml +++ b/cspell.yaml @@ -6,9 +6,12 @@ dictionaries: - typescript words: - Adoptium + - aiohttp - alzimmer - amqp - AQID + - astimezone + - asyncio - atrule - atteo - autorest @@ -17,8 +20,10 @@ words: - azurecr - azuresdk - bifilter + - blib - blockful - blockless + - buongiorno - cadl - cadleditor - cadleng @@ -32,31 +37,43 @@ words: - createsorreplacesresource - createsorupdatesresource - CRUDL + - ctxt - dbaeumer - Dcodegen - debouncer + - debugpy - Declipse - Dedupes - destructures - devdiv - Diagnoser - Dlog + - docutils - dogfood - Dorg - Dosgi + - dotenv - Dskip - eastus - ecmarkup + - elif - EMBEDME - Entra + - enumdiscriminator + - equalto - esbenp - esbuild - espt - ESRP + - etree - fluentui - genproto + - getpgid + - giacamo - globby - graalvm + - headasbooleanfalse + - headasbooleantrue - Gson - imple - Infima @@ -65,34 +82,55 @@ words: - instanceid - interner - intrinsics + - IOHTTP + - isdigit + - isinstance + - issubclass + - itertype - itor + - ivar - Jacoco - jdwp - jobject - jsyaml - keyer + - killpg + - kwargs - lifecyle - LINUXNEXTVMIMAGE - LINUXOS - LINUXVMIMAGE + - ljust + - lmazuel - lropaging + - lstrip - lzutf - MACVMIMAGE + - mday - mgmt + - mgmtplane - mocharc - mqtt + - mros - msbuild + - mspaint - MSRC + - msrest - multis - munge - mylib + - mypy - nanos + - nexted + - nihao - noformat - noopener - noreferrer + - nosec - nostdlib - noverify - npmjs + - nspkg - nupkg - oapi - ODATA @@ -106,31 +144,51 @@ words: - openapiv - Perfolizer - picocolors + - posargs - prismjs - proto - protobuf - protoc - psscriptanalyzer + - ptvsd - pwsh + - pyexpat + - pygen + - pylint + - pylintrc + - pyproject + - pyright + - pyrightconfig + - pytest + - rcfile - reactivex - recase - regen + - repr - respecify + - rjust - rpaas + - rsplit + - rstrip + - rtype - rushx - safeint + - sdkcore - segmentof - serde - sfixed - sint - snakeyaml - srnagar + - ssdlrs - sses - ssvs - statment - strs - syncpack - TCGC + - timegm + - tomli - triaging - TRYIT - tsdoc @@ -140,6 +198,7 @@ words: - tsvs - typespec - typespecvs + - tzname - Uhoh - uitestresults - unassignable @@ -155,19 +214,26 @@ words: - unrepresentable - unsourced - unversioned + - venv + - venvtools - VITE - vitest - vsix - VSSDK - Vsts - vswhere + - wday - weidxu - westus - WINDOWSVMIMAGE + - xiangyan - xiaofei - xlarge + - xmsclientrequestid - xors - xplat + - xxsubtype + - yamls ignorePaths: - "**/node_modules/**" - "**/dist/**" diff --git a/eng/common/config/area.ts b/eng/common/config/area.ts index 4379c0811a..3065177251 100644 --- a/eng/common/config/area.ts +++ b/eng/common/config/area.ts @@ -41,6 +41,7 @@ const isolatedEmitters = ["eng/emitters/"]; export const CIRules = { CSharp: [...all, ...isolatedEmitters, ...AreaPaths["emitter:client:csharp"], ".editorconfig"], Java: [...all, ...isolatedEmitters, ...AreaPaths["emitter:client:java"], ".editorconfig"], + Python: [...all, ...isolatedEmitters, ...AreaPaths["emitter:client:python"], ".editorconfig"], Core: [ "**/*", @@ -51,6 +52,7 @@ export const CIRules = { ...ignore(isolatedEmitters), ...ignore(AreaPaths["emitter:client:csharp"]), ...ignore(AreaPaths["emitter:client:java"]), + ...ignore(AreaPaths["emitter:client:python"]), ], }; diff --git a/eng/common/pipelines/ci.yml b/eng/common/pipelines/ci.yml index ae0841d797..8cea470cf8 100644 --- a/eng/common/pipelines/ci.yml +++ b/eng/common/pipelines/ci.yml @@ -50,6 +50,12 @@ extends: DependsOn: InitStage Condition: eq('true', stageDependencies.InitStage.outputs['InitJob.InitStep.RunJava']) + # Run python stages if RunPython == true + - template: /packages/http-client-python/eng/pipeline/templates/ci-stages.yml + parameters: + DependsOn: InitStage + Condition: eq('true', stageDependencies.InitStage.outputs['InitJob.InitStep.RunPython']) + # Run core stages if RunCore == true - template: /eng/tsp-core/pipelines/stages/ci-stages.yml parameters: diff --git a/eng/common/scripts/utils/find-area-changed.test.ts b/eng/common/scripts/utils/find-area-changed.test.ts index 8fdb9f5a08..ad7d3d942c 100644 --- a/eng/common/scripts/utils/find-area-changed.test.ts +++ b/eng/common/scripts/utils/find-area-changed.test.ts @@ -28,6 +28,16 @@ describe("paths that should trigger Java CI", () => { }); }); +describe("paths that should trigger python CI", () => { + it.each([ + ["packages/http-client-python/emitter/src/emitter.ts"], + ["packages/http-client-python/package.json"], + ])("%s", (...paths) => { + const areas = findAreasChanged(paths); + expect(areas).toEqual(["Python"]); + }); +}); + describe("paths that should trigger Core CI", () => { it.each([ "packages/compiler/package.json", @@ -45,7 +55,7 @@ describe("paths that should trigger all isolated packages", () => { "eng/emitters/pipelines/templates/jobs/test-job.yml", ])("%s", (path) => { const areas = findAreasChanged([path]); - expect(areas).toEqual(["CSharp", "Java"]); + expect(areas).toEqual(["CSharp", "Java", "Python"]); }); }); @@ -53,14 +63,15 @@ it("Should return a combination of core and isolated packages", () => { const areas = findAreasChanged([ "packages/http-client-csharp/src/constants.ts", "packages/http-client-java/src/emitter.ts", + "packages/http-client-python/src/emitter.ts", "packages/compiler/package.json", ]); - expect(areas).toEqual(["CSharp", "Java", "Core"]); + expect(areas).toEqual(["CSharp", "Java", "Python", "Core"]); }); it("Should return CSharp, Core and Java if .editorconfig is changed", () => { const areas = findAreasChanged([".editorconfig"]); - expect(areas).toEqual(["CSharp", "Java", "Core"]); + expect(areas).toEqual(["CSharp", "Java", "Python", "Core"]); }); it("Should not return Core for .prettierignore, .prettierrc.json, cspell.yaml, esling.config.json", () => { @@ -71,8 +82,9 @@ it("Should not return Core for .prettierignore, .prettierrc.json, cspell.yaml, e "esling.config.json", "packages/http-client-csharp/emitter/src/constants.ts", "packages/http-client-java/emitter/src/emitter.ts", + "packages/http-client-python/emitter/src/emitter.ts", ]); - expect(areas).toEqual(["CSharp", "Java"]); + expect(areas).toEqual(["CSharp", "Java", "Python"]); }); it("should return Core for random files at the root", () => { diff --git a/eslint.config.js b/eslint.config.js index a74a00a48e..a4bba007b4 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -73,7 +73,11 @@ const allFilesConfig = tsEslint.config({ export function getTypeScriptProjectRules(root) { return tsEslint.config({ files: ["**/packages/*/src/**/*.ts", "**/packages/*/src/**/*.tsx"], - ignores: ["**/packages/http-client-csharp/**/*", "**/packages/http-client-java/**/*"], // Ignore isolated modules + ignores: [ + "**/packages/http-client-csharp/**/*", + "**/packages/http-client-java/**/*", + "**/packages/http-client-python/**/*", + ], // Ignore isolated modules plugins: {}, languageOptions: { parserOptions: { @@ -139,6 +143,7 @@ export default tsEslint.config( "**/website/build/**/*", "**/.docusaurus/**/*", "packages/compiler/templates/**/*", // Ignore the templates which might have invalid code and not follow exactly our rules. + "**/venv/**/*", // Ignore python virtual env // TODO: enable "**/.scripts/**/*", "eng/tsp-core/scripts/**/*", diff --git a/packages/http-client-python/README.md b/packages/http-client-python/README.md new file mode 100644 index 0000000000..8ac1086a08 --- /dev/null +++ b/packages/http-client-python/README.md @@ -0,0 +1,118 @@ +# TypeSpec Python Client Emitter + +## Getting started + +### Initialize TypeSpec Project + +Follow [TypeSpec Getting Started](https://typespec.io/docs) to initialize your TypeSpec project. + +Make sure `npx tsp compile .` runs correctly. + +### Add `@typespec/http-client-python` to your project + +Include `@typespec/http-client-python` in `package.json`: + +```diff + "dependencies": { ++ "@typespec/http-client-python": "latest" + }, +``` + +Run `npm install` to install the dependency. + +### Generate a Python client library + +You can either specify `@typespec/http-client-python` on the commandline or through tspconfig.yaml. + +#### Generate with `--emit` command + +Run command `npx tsp compile --emit @typespec/http-client-python ` + +e.g. + +```cmd +npx tsp compile main.tsp --emit @typespec/http-client-python +``` + +or + +```cmd +npx tsp compile client.tsp --emit @typespec/http-client-python +``` + +#### Generate with tspconfig.yaml + +Add the following configuration in tspconfig.yaml: + +```diff +emit: + - "@typespec/http-client-python" +options: + "@typespec/http-client-python": ++ package-dir: "contoso" ++ package-name: "contoso" +``` + +Run the command to generate your library: + +```cmd +npx tsp compile main.tsp +``` + +or + +```cmd +npx tsp compile client.tsp +``` + +## Configure the generated library + +You can further configure the generated client library using the emitter options provided through @typespec/http-client-python. + +You can set options in the command line directly via `--option @typespec/http-client-python.=XXX`, e.g. `--option @typespec/http-client-python.package-name="contoso"` + +or + +Modify `tspconfig.yaml` in the TypeSpec project to add emitter options under options/@typespec/http-client-python. + +```diff +emit: + - "@typespec/http-client-python" +options: + "@typespec/http-client-python": ++ package-dir: "{package-dir}" ++ package-name: "contoso" +``` + +### Supported emitter options + +Common emitter configuration example: + +```yaml +emit: + - "@typespec/http-client-python" +options: + "@typespec/http-client-python": + package-dir: "{package-dir}" + package-name: "contoso" +``` + +| Option | Type | Description | +| -------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `package-version` | string | Specify the package version. Default version: `1.0.0b1`. | +| `package-name` | string | Specify the package name. | +| `package-dir` | string | Specify the output directory for the package. | +| `generate-packaging-files` | boolean | Indicate if packaging files, such as setup.py, should be generated. | +| `package-pprint-name` | string | Specify the pretty print name for the package. | +| `flavor` | string | Represents the type of SDK that will be generated. By default, there will be no branding in the generated client library. Specify `"azure"` to generate an SDK following Azure guidelines. | +| `company-name` | string | Specify the company name to be inserted into licensing data. For `"azure"` flavor, the default value inserted is `Microsoft`. | + +**Advanced emitter options** + +| Option | Type | Description | +| ------------------------ | ------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `head-as-boolean` | boolean | Generate head calls to return a boolean. Default: `true`. | +| `packaging-files-dir` | string | Pass in the path to a custom directory with SDK packaging files. | +| `packaging-files-config` | object | Specify configuration options that will be passed directly into the packaging files specified by the `packaging-files-dir` option. | +| `tracing` | boolean | Only available for the `"azure"` flavor of SDKs, provide tracing support in the generated client library. Default: `true`. | +| `debug` | boolean | Enable debugging. | diff --git a/packages/http-client-python/emitter/src/code-model.ts b/packages/http-client-python/emitter/src/code-model.ts new file mode 100644 index 0000000000..6a738850b0 --- /dev/null +++ b/packages/http-client-python/emitter/src/code-model.ts @@ -0,0 +1,272 @@ +import { + SdkBasicServiceMethod, + SdkClientType, + SdkCredentialParameter, + SdkEndpointParameter, + SdkEndpointType, + SdkLroPagingServiceMethod, + SdkLroServiceMethod, + SdkMethodParameter, + SdkPagingServiceMethod, + SdkServiceMethod, + SdkServiceOperation, + UsageFlags, + getCrossLanguagePackageId, +} from "@azure-tools/typespec-client-generator-core"; +import { ignoreDiagnostics } from "@typespec/compiler"; +import { + emitBasicHttpMethod, + emitLroHttpMethod, + emitLroPagingHttpMethod, + emitPagingHttpMethod, +} from "./http.js"; +import { PythonSdkContext } from "./lib.js"; +import { + KnownTypes, + disableGenerationMap, + emitEndpointType, + getType, + simpleTypesMap, + typesMap, +} from "./types.js"; +import { emitParamBase, getImplementation, removeUnderscoresFromNamespace } from "./utils.js"; + +function emitBasicMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkBasicServiceMethod, + operationGroupName: string, +): Record[] { + if (method.operation.kind !== "http") + throw new Error("We only support HTTP operations right now"); + switch (method.operation.kind) { + case "http": + return emitBasicHttpMethod(context, rootClient, method, operationGroupName); + default: + throw new Error("We only support HTTP operations right now"); + } +} + +function emitLroMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroServiceMethod, + operationGroupName: string, +): Record[] { + if (method.operation.kind !== "http") + throw new Error("We only support HTTP operations right now"); + switch (method.operation.kind) { + case "http": + return emitLroHttpMethod(context, rootClient, method, operationGroupName); + default: + throw new Error("We only support HTTP operations right now"); + } +} + +function emitPagingMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkPagingServiceMethod, + operationGroupName: string, +): Record[] { + if (method.operation.kind !== "http") + throw new Error("We only support HTTP operations right now"); + switch (method.operation.kind) { + case "http": + return emitPagingHttpMethod(context, rootClient, method, operationGroupName); + default: + throw new Error("We only support HTTP operations right now"); + } +} + +function emitLroPagingMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroPagingServiceMethod, + operationGroupName: string, +): Record[] { + if (method.operation.kind !== "http") + throw new Error("We only support HTTP operations right now"); + switch (method.operation.kind) { + case "http": + return emitLroPagingHttpMethod(context, rootClient, method, operationGroupName); + default: + throw new Error("We only support HTTP operations right now"); + } +} + +function emitMethodParameter( + context: PythonSdkContext, + parameter: SdkEndpointParameter | SdkCredentialParameter | SdkMethodParameter, +): Record[] { + if (parameter.kind === "endpoint") { + if (parameter.type.kind === "union") { + for (const endpointVal of parameter.type.values) { + return emitEndpointType(context, endpointVal as SdkEndpointType); + } + } else { + return emitEndpointType(context, parameter.type); + } + } + const base = { + ...emitParamBase(context, parameter), + implementation: getImplementation(context, parameter), + clientDefaultValue: parameter.clientDefaultValue, + location: parameter.kind, + }; + if (parameter.isApiVersionParam) { + return [ + { + ...base, + location: "query", + wireName: "api-version", + in_docstring: false, + }, + ]; + } + return [base]; +} + +function emitMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkServiceMethod, + operationGroupName: string, +): Record[] { + switch (method.kind) { + case "basic": + return emitBasicMethod(context, rootClient, method, operationGroupName); + case "lro": + return emitLroMethod(context, rootClient, method, operationGroupName); + case "paging": + return emitPagingMethod(context, rootClient, method, operationGroupName); + default: + return emitLroPagingMethod(context, rootClient, method, operationGroupName); + } +} + +function emitOperationGroups( + context: PythonSdkContext, + client: SdkClientType, + rootClient: SdkClientType, + prefix: string, +): Record[] | undefined { + const operationGroups: Record[] = []; + + for (const method of client.methods) { + if (method.kind === "clientaccessor") { + const operationGroup = method.response; + const name = `${prefix}${operationGroup.name}`; + let operations: Record[] = []; + for (const method of operationGroup.methods) { + if (method.kind === "clientaccessor") continue; + operations = operations.concat(emitMethod(context, rootClient, method, name)); + } + operationGroups.push({ + name: name, + className: name, + propertyName: operationGroup.name, + operations: operations, + operationGroups: emitOperationGroups(context, operationGroup, rootClient, name), + }); + } + } + + // root client should deal with mixin operation group + if (prefix === "") { + let operations: Record[] = []; + for (const method of client.methods) { + if (method.kind === "clientaccessor") continue; + operations = operations.concat(emitMethod(context, rootClient, method, "")); + } + if (operations.length > 0) { + operationGroups.push({ + name: "", + className: "", + propertyName: "", + operations: operations, + }); + } + } + + return operationGroups.length > 0 ? operationGroups : undefined; +} + +function emitClient( + context: PythonSdkContext, + client: SdkClientType, +): Record { + if (client.initialization) { + context.__endpointPathParameters = []; + } + const parameters = + client.initialization?.properties + .map((x) => emitMethodParameter(context, x)) + .reduce((a, b) => [...a, ...b]) ?? []; + + const endpointParameter = client.initialization?.properties.find((x) => x.kind === "endpoint") as + | SdkEndpointParameter + | undefined; + const operationGroups = emitOperationGroups(context, client, client, ""); + let url: string | undefined; + if (endpointParameter?.type.kind === "union") { + url = (endpointParameter.type.values[0] as SdkEndpointType).serverUrl; + } else { + url = endpointParameter?.type.serverUrl; + } + return { + name: client.name, + description: client.description ?? "", + parameters, + operationGroups, + url, + apiVersions: client.apiVersions, + arm: context.arm, + }; +} + +export function emitCodeModel( + sdkContext: PythonSdkContext, +) { + // Get types + const sdkPackage = sdkContext.sdkPackage; + const codeModel: Record = { + namespace: removeUnderscoresFromNamespace(sdkPackage.rootNamespace).toLowerCase(), + clients: [], + subnamespaceToClients: {}, + }; + for (const client of sdkPackage.clients) { + codeModel["clients"].push(emitClient(sdkContext, client)); + if (client.nameSpace === sdkPackage.rootNamespace) { + } else { + codeModel["subnamespaceToClients"][client.nameSpace] = emitClient(sdkContext, client); + } + } + // loop through models and enums since there may be some orphaned models needs to be generated + for (const model of sdkPackage.models) { + if ( + model.name === "" || + ((model.usage & UsageFlags.Spread) > 0 && + (model.usage & UsageFlags.Input) === 0 && + (model.usage & UsageFlags.Output) === 0) + ) { + continue; + } + if (!disableGenerationMap.has(model)) { + getType(sdkContext, model); + } + } + for (const sdkEnum of sdkPackage.enums) { + if (sdkEnum.usage === UsageFlags.ApiVersionEnum) { + continue; + } + getType(sdkContext, sdkEnum); + } + codeModel["types"] = [ + ...typesMap.values(), + ...Object.values(KnownTypes), + ...simpleTypesMap.values(), + ]; + codeModel["crossLanguagePackageId"] = ignoreDiagnostics(getCrossLanguagePackageId(sdkContext)); + return codeModel; +} diff --git a/packages/http-client-python/emitter/src/emitter.ts b/packages/http-client-python/emitter/src/emitter.ts new file mode 100644 index 0000000000..ddd9cf80e9 --- /dev/null +++ b/packages/http-client-python/emitter/src/emitter.ts @@ -0,0 +1,127 @@ +import { + createSdkContext, + SdkContext, + SdkHttpOperation, + SdkServiceOperation, +} from "@azure-tools/typespec-client-generator-core"; +import { EmitContext } from "@typespec/compiler"; +import { execSync } from "child_process"; +import fs from "fs"; +import path, { dirname } from "path"; +import { fileURLToPath } from "url"; +import { emitCodeModel } from "./code-model.js"; +import { saveCodeModelAsYaml } from "./external-process.js"; +import { PythonEmitterOptions, PythonSdkContext } from "./lib.js"; +import { removeUnderscoresFromNamespace } from "./utils.js"; + +export function getModelsMode(context: SdkContext): "dpg" | "none" { + const specifiedModelsMode = context.emitContext.options["models-mode"]; + if (specifiedModelsMode) { + const modelModes = ["dpg", "none"]; + if (modelModes.includes(specifiedModelsMode)) { + return specifiedModelsMode; + } + throw new Error( + `Need to specify models mode with the following values: ${modelModes.join(", ")}`, + ); + } + return "dpg"; +} + +function addDefaultOptions(sdkContext: SdkContext) { + const defaultOptions = { + "package-version": "1.0.0b1", + "generate-packaging-files": true, + flavor: undefined, + }; + sdkContext.emitContext.options = { + ...defaultOptions, + ...sdkContext.emitContext.options, + }; + const options = sdkContext.emitContext.options; + options["models-mode"] = getModelsMode(sdkContext); + if (options["generate-packaging-files"]) { + options["package-mode"] = sdkContext.arm ? "azure-mgmt" : "azure-dataplane"; + } + if (!options["package-name"]) { + options["package-name"] = removeUnderscoresFromNamespace( + sdkContext.sdkPackage.rootNamespace.toLowerCase(), + ).replace(/\./g, "-"); + } + if (options.flavor !== "azure") { + // if they pass in a flavor other than azure, we want to ignore the value + options.flavor = undefined; + } + if (!options.flavor && sdkContext.emitContext.emitterOutputDir.includes("azure")) { + options.flavor = "azure"; + } +} + +async function createPythonSdkContext( + context: EmitContext, +): Promise> { + return { + ...(await createSdkContext( + context, + "@typespec/http-client-python", + { + additionalDecorators: ["TypeSpec\\.@encodedName"], + }, + )), + __endpointPathParameters: [], + }; +} + +export async function $onEmit(context: EmitContext) { + const program = context.program; + const sdkContext = await createPythonSdkContext(context); + const root = path.join(dirname(fileURLToPath(import.meta.url)), "..", ".."); + const outputDir = context.emitterOutputDir; + const yamlMap = emitCodeModel(sdkContext); + addDefaultOptions(sdkContext); + const yamlPath = await saveCodeModelAsYaml("typespec-python-yaml-map", yamlMap); + let venvPath = path.join(root, "venv"); + if (fs.existsSync(path.join(venvPath, "bin"))) { + venvPath = path.join(venvPath, "bin", "python"); + } else if (fs.existsSync(path.join(venvPath, "Scripts"))) { + venvPath = path.join(venvPath, "Scripts", "python.exe"); + } else { + throw new Error("Virtual environment doesn't exist."); + } + const commandArgs = [ + venvPath, + `${root}/eng/scripts/setup/run_tsp.py`, + `--output-folder=${outputDir}`, + `--cadl-file=${yamlPath}`, + ]; + const resolvedOptions = sdkContext.emitContext.options; + if (resolvedOptions["packaging-files-config"]) { + const keyValuePairs = Object.entries(resolvedOptions["packaging-files-config"]).map( + ([key, value]) => { + return `${key}:${value}`; + }, + ); + commandArgs.push(`--packaging-files-config='${keyValuePairs.join("|")}'`); + resolvedOptions["packaging-files-config"] = undefined; + } + if ( + resolvedOptions["package-pprint-name"] !== undefined && + !resolvedOptions["package-pprint-name"].startsWith('"') + ) { + resolvedOptions["package-pprint-name"] = `"${resolvedOptions["package-pprint-name"]}"`; + } + + for (const [key, value] of Object.entries(resolvedOptions)) { + commandArgs.push(`--${key}=${value}`); + } + if (sdkContext.arm === true) { + commandArgs.push("--azure-arm=true"); + } + if (resolvedOptions.flavor === "azure") { + commandArgs.push("--emit-cross-language-definition-file=true"); + } + commandArgs.push("--from-typespec=true"); + if (!program.compilerOptions.noEmit && !program.hasError()) { + execSync(commandArgs.join(" ")); + } +} diff --git a/packages/http-client-python/emitter/src/external-process.ts b/packages/http-client-python/emitter/src/external-process.ts new file mode 100644 index 0000000000..1e3ca74fb6 --- /dev/null +++ b/packages/http-client-python/emitter/src/external-process.ts @@ -0,0 +1,52 @@ +import { joinPaths } from "@typespec/compiler"; +import { ChildProcess, spawn, SpawnOptions } from "child_process"; +import { randomUUID } from "crypto"; +import { mkdir, writeFile } from "fs/promises"; +import jsyaml from "js-yaml"; +import os from "os"; + +const cadlCodeGenTempDir = joinPaths(os.tmpdir(), "cadl-codegen"); + +export function createTempPath(extension: string, prefix: string = "") { + return joinPaths(cadlCodeGenTempDir, prefix + randomUUID() + extension); +} + +/** + * Save the given codemodel in a yaml file. + * @param name Name of the codemodel. To give a guide to the temp file name. + * @param codemodel Codemodel to save + * @return the absolute path to the created codemodel. + */ +export async function saveCodeModelAsYaml(name: string, codemodel: unknown): Promise { + await mkdir(cadlCodeGenTempDir, { recursive: true }); + const filename = createTempPath(".yaml", name); + const yamlStr = jsyaml.dump(codemodel); + await writeFile(filename, yamlStr); + return filename; +} + +/** + * Start external process async + * @param command Command to run. This is the just the executable path or name. + * @param args Command arguments. + * @param options Options + */ +export async function execAsync( + command: string, + args: string[], + options: SpawnOptions = {}, +): Promise<{ exitCode: number; proc: ChildProcess }> { + const child = spawn(command, args, { stdio: "inherit", ...options }); + return new Promise((resolve, reject) => { + child.on("error", (error) => { + reject(error); + }); + + child.on("exit", (exitCode) => { + resolve({ + exitCode: exitCode ?? -1, + proc: child, + }); + }); + }); +} diff --git a/packages/http-client-python/emitter/src/http.ts b/packages/http-client-python/emitter/src/http.ts new file mode 100644 index 0000000000..e684ed23b6 --- /dev/null +++ b/packages/http-client-python/emitter/src/http.ts @@ -0,0 +1,382 @@ +import { + SdkBasicServiceMethod, + SdkBodyParameter, + SdkClientType, + SdkHeaderParameter, + SdkHttpOperation, + SdkHttpOperationExample, + SdkHttpResponse, + SdkLroPagingServiceMethod, + SdkLroServiceMethod, + SdkPagingServiceMethod, + SdkPathParameter, + SdkQueryParameter, + SdkServiceMethod, + SdkServiceResponseHeader, + UsageFlags, +} from "@azure-tools/typespec-client-generator-core"; +import { HttpStatusCodeRange } from "@typespec/http"; +import { PythonSdkContext } from "./lib.js"; +import { KnownTypes, getType } from "./types.js"; +import { + camelToSnakeCase, + emitParamBase, + getAddedOn, + getDelimiterAndExplode, + getDescriptionAndSummary, + getImplementation, + isAbstract, + isAzureCoreErrorResponse, +} from "./utils.js"; + +function isContentTypeParameter(parameter: SdkHeaderParameter) { + return parameter.serializedName.toLowerCase() === "content-type"; +} + +function arrayToRecord(examples: SdkHttpOperationExample[] | undefined): Record { + const result: Record = {}; + if (examples) { + for (const [index, example] of examples.entries()) { + result[index] = { ...example.rawExample, "x-ms-original-file": example.filePath }; + } + } + return result; +} + +export function emitBasicHttpMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkBasicServiceMethod, + operationGroupName: string, +): Record[] { + return [ + { + ...emitHttpOperation(context, rootClient, operationGroupName, method.operation, method), + abstract: isAbstract(method), + internal: method.access === "internal", + name: camelToSnakeCase(method.name), + description: getDescriptionAndSummary(method).description, + summary: getDescriptionAndSummary(method).summary, + }, + ]; +} + +function emitInitialLroHttpMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroServiceMethod | SdkLroPagingServiceMethod, + operationGroupName: string, +): Record { + return { + ...emitHttpOperation(context, rootClient, operationGroupName, method.operation, method), + name: `_${camelToSnakeCase(method.name)}_initial`, + isLroInitialOperation: true, + wantTracing: false, + exposeStreamKeyword: false, + description: getDescriptionAndSummary(method).description, + summary: getDescriptionAndSummary(method).summary, + }; +} + +function addLroInformation( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroServiceMethod | SdkLroPagingServiceMethod, + operationGroupName: string, +) { + return { + ...emitHttpOperation(context, rootClient, operationGroupName, method.operation, method), + name: camelToSnakeCase(method.name), + discriminator: "lro", + initialOperation: emitInitialLroHttpMethod(context, rootClient, method, operationGroupName), + exposeStreamKeyword: false, + description: getDescriptionAndSummary(method).description, + summary: getDescriptionAndSummary(method).summary, + }; +} + +function addPagingInformation( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkPagingServiceMethod | SdkLroPagingServiceMethod, + operationGroupName: string, +) { + for (const response of method.operation.responses.values()) { + if (response.type) { + getType(context, response.type)["usage"] = UsageFlags.None; + } + } + const itemType = getType(context, method.response.type!); + const base = emitHttpOperation(context, rootClient, operationGroupName, method.operation, method); + base.responses.forEach((resp: Record) => { + resp.type = itemType; + }); + return { + ...base, + name: camelToSnakeCase(method.name), + discriminator: "paging", + exposeStreamKeyword: false, + itemName: method.response.resultPath, + continuationTokenName: method.nextLinkPath, + itemType, + description: getDescriptionAndSummary(method).description, + summary: getDescriptionAndSummary(method).summary, + }; +} + +export function emitLroHttpMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroServiceMethod, + operationGroupName: string, +): Record[] { + const lroMethod = addLroInformation(context, rootClient, method, operationGroupName); + return [lroMethod.initialOperation, lroMethod]; +} + +export function emitPagingHttpMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkPagingServiceMethod, + operationGroupName: string, +): Record[] { + const pagingMethod = addPagingInformation(context, rootClient, method, operationGroupName); + return [pagingMethod]; +} + +export function emitLroPagingHttpMethod( + context: PythonSdkContext, + rootClient: SdkClientType, + method: SdkLroPagingServiceMethod, + operationGroupName: string, +): Record[] { + const pagingMethod = addPagingInformation(context, rootClient, method, operationGroupName); + const lroMethod = addLroInformation(context, rootClient, method, operationGroupName); + return [lroMethod.initialOperation, pagingMethod, lroMethod]; +} + +function emitHttpOperation( + context: PythonSdkContext, + rootClient: SdkClientType, + operationGroupName: string, + operation: SdkHttpOperation, + method: SdkServiceMethod, +): Record { + const responses: Record[] = []; + const exceptions: Record[] = []; + for (const [statusCodes, response] of operation.responses) { + responses.push(emitHttpResponse(context, statusCodes, response, method)!); + } + for (const [statusCodes, exception] of operation.exceptions) { + exceptions.push(emitHttpResponse(context, statusCodes, exception, undefined, true)!); + } + const result = { + url: operation.path, + method: operation.verb.toUpperCase(), + parameters: emitHttpParameters(context, rootClient, operation), + bodyParameter: emitHttpBodyParameter(context, operation.bodyParam), + responses, + exceptions, + groupName: operationGroupName, + addedOn: method ? getAddedOn(context, method) : "", + discriminator: "basic", + isOverload: false, + overloads: [], + apiVersions: [], + wantTracing: true, + exposeStreamKeyword: true, + crossLanguageDefinitionId: method?.crossLanguageDefintionId, + samples: arrayToRecord(method?.operation.examples), + }; + if (result.bodyParameter && isSpreadBody(operation.bodyParam)) { + result.bodyParameter["propertyToParameterName"] = {}; + result.bodyParameter["defaultToUnsetSentinel"] = true; + // if body type is not only used for this spread body, but also used in other input/output, we should clone it, then change the type base to json + if ( + (result.bodyParameter.type.usage & UsageFlags.Input) > 0 || + (result.bodyParameter.type.usage & UsageFlags.Output) > 0 + ) { + result.bodyParameter.type = { ...result.bodyParameter.type, name: `${method.name}Request` }; + } + result.bodyParameter.type.base = "json"; + for (const property of result.bodyParameter.type.properties) { + result.bodyParameter["propertyToParameterName"][property["wireName"]] = + property["clientName"]; + result.parameters.push(emitFlattenedParameter(result.bodyParameter, property)); + } + } + return result; +} + +function isSpreadBody(bodyParam: SdkBodyParameter | undefined): boolean { + return ( + bodyParam?.type.kind === "model" && + bodyParam.type !== bodyParam.correspondingMethodParams[0]?.type + ); +} + +function emitFlattenedParameter( + bodyParameter: Record, + property: Record, +): Record { + return { + checkClientInput: false, + clientDefaultValue: null, + clientName: property.clientName, + delimiter: null, + description: property.description, + implementation: "Method", + inDocstring: true, + inFlattenedBody: true, + inOverload: false, + inOverridden: false, + isApiVersion: bodyParameter["isApiVersion"], + location: "other", + optional: property["optional"], + wireName: null, + skipUrlEncoding: false, + type: property["type"], + defaultToUnsetSentinel: true, + }; +} + +function emitHttpPathParameter( + context: PythonSdkContext, + parameter: SdkPathParameter, +) { + const base = emitParamBase(context, parameter); + return { + ...base, + wireName: parameter.serializedName, + location: parameter.kind, + implementation: getImplementation(context, parameter), + clientDefaultValue: parameter.clientDefaultValue, + skipUrlEncoding: parameter.allowReserved, + }; +} +function emitHttpHeaderParameter( + context: PythonSdkContext, + parameter: SdkHeaderParameter, +): Record { + const base = emitParamBase(context, parameter); + const [delimiter, explode] = getDelimiterAndExplode(parameter); + let clientDefaultValue = parameter.clientDefaultValue; + if (isContentTypeParameter(parameter)) { + // we switch to string type for content-type header + if (!clientDefaultValue && parameter.type.kind === "constant") { + clientDefaultValue = parameter.type.value; + } + base.type = KnownTypes.string; + } + return { + ...base, + wireName: parameter.serializedName, + location: parameter.kind, + implementation: getImplementation(context, parameter), + delimiter, + explode, + clientDefaultValue, + }; +} + +function emitHttpQueryParameter( + context: PythonSdkContext, + parameter: SdkQueryParameter, +): Record { + const base = emitParamBase(context, parameter); + const [delimiter, explode] = getDelimiterAndExplode(parameter); + return { + ...base, + wireName: parameter.serializedName, + location: parameter.kind, + implementation: getImplementation(context, parameter), + delimiter, + explode, + clientDefaultValue: parameter.clientDefaultValue, + }; +} + +function emitHttpParameters( + context: PythonSdkContext, + rootClient: SdkClientType, + operation: SdkHttpOperation, +): Record[] { + const parameters: Record[] = [...context.__endpointPathParameters]; + for (const parameter of operation.parameters) { + switch (parameter.kind) { + case "header": + parameters.push(emitHttpHeaderParameter(context, parameter)); + break; + case "query": + parameters.push(emitHttpQueryParameter(context, parameter)); + break; + case "path": + parameters.push(emitHttpPathParameter(context, parameter)); + break; + } + } + return parameters; +} + +function emitHttpBodyParameter( + context: PythonSdkContext, + bodyParam?: SdkBodyParameter, +): Record | undefined { + if (bodyParam === undefined) return undefined; + return { + ...emitParamBase(context, bodyParam), + contentTypes: bodyParam.contentTypes, + location: bodyParam.kind, + clientName: bodyParam.isGeneratedName ? "body" : camelToSnakeCase(bodyParam.name), + wireName: bodyParam.isGeneratedName ? "body" : bodyParam.name, + implementation: getImplementation(context, bodyParam), + clientDefaultValue: bodyParam.clientDefaultValue, + defaultContentType: bodyParam.defaultContentType, + }; +} + +function emitHttpResponse( + context: PythonSdkContext, + statusCodes: HttpStatusCodeRange | number | "*", + response: SdkHttpResponse, + method?: SdkServiceMethod, + isException = false, +): Record | undefined { + if (!response) return undefined; + let type = undefined; + if (isException) { + if (response.type && !isAzureCoreErrorResponse(response.type)) { + type = getType(context, response.type); + } + } else if (method && !method.kind.includes("basic")) { + if (method.response.type) { + type = getType(context, method.response.type); + } + } else if (response.type) { + type = getType(context, response.type); + } + return { + headers: response.headers.map((x) => emitHttpResponseHeader(context, x)), + statusCodes: + typeof statusCodes === "object" + ? [(statusCodes as HttpStatusCodeRange).start] + : statusCodes === "*" + ? ["default"] + : [statusCodes], + discriminator: "basic", + type, + contentTypes: response.contentTypes, + defaultContentType: response.defaultContentType ?? "application/json", + resultProperty: method?.response.resultPath, + }; +} + +function emitHttpResponseHeader( + context: PythonSdkContext, + header: SdkServiceResponseHeader, +): Record { + return { + type: getType(context, header.type), + wireName: header.serializedName, + }; +} diff --git a/packages/http-client-python/emitter/src/index.ts b/packages/http-client-python/emitter/src/index.ts new file mode 100644 index 0000000000..627847c619 --- /dev/null +++ b/packages/http-client-python/emitter/src/index.ts @@ -0,0 +1,2 @@ +export * from "./emitter.js"; +export { $lib } from "./lib.js"; diff --git a/packages/http-client-python/emitter/src/lib.ts b/packages/http-client-python/emitter/src/lib.ts new file mode 100644 index 0000000000..0f16932104 --- /dev/null +++ b/packages/http-client-python/emitter/src/lib.ts @@ -0,0 +1,59 @@ +import { SdkContext, SdkServiceOperation } from "@azure-tools/typespec-client-generator-core"; +import { createTypeSpecLibrary, JSONSchemaType } from "@typespec/compiler"; + +export interface PythonEmitterOptions { + "package-version"?: string; + "package-name"?: string; + "output-dir"?: string; + "generate-packaging-files"?: boolean; + "packaging-files-dir"?: string; + "packaging-files-config"?: object; + "package-pprint-name"?: string; + "head-as-boolean"?: boolean; + "models-mode"?: string; + tracing?: boolean; + "company-name"?: string; + "generate-test"?: boolean; + debug?: boolean; + flavor?: "azure"; + "examples-dir"?: string; +} + +export interface PythonSdkContext + extends SdkContext { + __endpointPathParameters: Record[]; +} + +const EmitterOptionsSchema: JSONSchemaType = { + type: "object", + additionalProperties: true, + properties: { + "package-version": { type: "string", nullable: true }, + "package-name": { type: "string", nullable: true }, + "output-dir": { type: "string", nullable: true }, + "generate-packaging-files": { type: "boolean", nullable: true }, + "packaging-files-dir": { type: "string", nullable: true }, + "packaging-files-config": { type: "object", nullable: true }, + "package-pprint-name": { type: "string", nullable: true }, + "head-as-boolean": { type: "boolean", nullable: true }, + "models-mode": { type: "string", nullable: true }, + tracing: { type: "boolean", nullable: true }, + "company-name": { type: "string", nullable: true }, + "generate-test": { type: "boolean", nullable: true }, + debug: { type: "boolean", nullable: true }, + flavor: { type: "string", nullable: true }, + "examples-dir": { type: "string", nullable: true, format: "absolute-path" }, + }, + required: [], +}; + +const libDef = { + name: "@azure-tools/typespec-python", + diagnostics: {}, + emitter: { + options: EmitterOptionsSchema as JSONSchemaType, + }, +} as const; + +export const $lib = createTypeSpecLibrary(libDef); +export const { reportDiagnostic, createStateSymbol, getTracer } = $lib; diff --git a/packages/http-client-python/emitter/src/types.ts b/packages/http-client-python/emitter/src/types.ts new file mode 100644 index 0000000000..6c9d4b0571 --- /dev/null +++ b/packages/http-client-python/emitter/src/types.ts @@ -0,0 +1,565 @@ +import { + SdkArrayType, + SdkBodyModelPropertyType, + SdkBuiltInType, + SdkConstantType, + SdkCredentialType, + SdkDateTimeType, + SdkDictionaryType, + SdkDurationType, + SdkEndpointType, + SdkEnumType, + SdkEnumValueType, + SdkModelPropertyType, + SdkModelType, + SdkServiceOperation, + SdkType, + SdkUnionType, + UsageFlags, +} from "@azure-tools/typespec-client-generator-core"; +import { Type } from "@typespec/compiler"; +import { HttpAuth, Visibility } from "@typespec/http"; +import { dump } from "js-yaml"; +import { PythonSdkContext } from "./lib.js"; +import { camelToSnakeCase, emitParamBase, getAddedOn, getImplementation } from "./utils.js"; + +export const typesMap = new Map>(); +export const simpleTypesMap = new Map>(); +export const disableGenerationMap = new Set(); + +export interface CredentialType { + kind: "Credential"; + scheme: HttpAuth; +} + +export interface CredentialTypeUnion { + kind: "CredentialTypeUnion"; + types: CredentialType[]; +} + +interface MultiPartFileType { + kind: "multipartfile"; + type: SdkType; +} + +function isEmptyModel(type: SdkType): boolean { + // object, {} will be treated as empty model, user defined empty model will not + return ( + type.kind === "model" && + type.properties.length === 0 && + !type.baseModel && + !type.discriminatedSubtypes && + !type.discriminatorValue && + (type.isGeneratedName || type.name === "object") + ); +} + +export function getSimpleTypeResult(result: Record): Record { + const key = dump(result, { sortKeys: true }); + const value = simpleTypesMap.get(key); + if (value) { + result = value; + } else { + simpleTypesMap.set(key, result); + } + return result; +} + +export function getType( + context: PythonSdkContext, + type: CredentialType | CredentialTypeUnion | Type | SdkType | MultiPartFileType, +): Record { + switch (type.kind) { + case "model": + return emitModel(context, type); + case "union": + return emitUnion(context, type); + case "enum": + return emitEnum(type); + case "constant": + return emitConstant(type)!; + case "array": + case "dict": + return emitArrayOrDict(context, type)!; + case "utcDateTime": + case "offsetDateTime": + case "duration": + return emitDurationOrDateType(type); + case "enumvalue": + return emitEnumMember(type, emitEnum(type.enumType)); + case "credential": + return emitCredential(type); + case "bytes": + case "boolean": + case "plainDate": + case "plainTime": + case "numeric": + case "integer": + case "safeint": + case "int8": + case "uint8": + case "int16": + case "uint16": + case "int32": + case "uint32": + case "int64": + case "uint64": + case "float": + case "float32": + case "float64": + case "decimal": + case "decimal128": + case "string": + case "url": + return emitBuiltInType(type); + case "any": + return KnownTypes.any; + case "nullable": + return getType(context, type.type); + case "multipartfile": + return emitMultiPartFile(context, type); + default: + throw Error(`Not supported ${type.kind}`); + } +} + +function emitMultiPartFile( + context: PythonSdkContext, + type: MultiPartFileType, +): Record { + if (type.type.kind === "array") { + return getSimpleTypeResult({ + type: "list", + elementType: getType(context, createMultiPartFileType(type.type.valueType)), + }); + } + return getSimpleTypeResult({ + type: type.kind, + description: type.type.description, + }); +} + +function emitCredential(credential: SdkCredentialType): Record { + let credential_type: Record = {}; + const scheme = credential.scheme; + if (scheme.type === "oauth2") { + credential_type = { + type: "OAuth2", + policy: { + type: "BearerTokenCredentialPolicy", + credentialScopes: [], + }, + }; + for (const flow of scheme.flows) { + for (const scope of flow.scopes) { + credential_type.policy.credentialScopes.push(scope.value); + } + credential_type.policy.credentialScopes.push(); + } + } else if (scheme.type === "apiKey") { + credential_type = { + type: "Key", + policy: { + type: "KeyCredentialPolicy", + key: scheme.name, + }, + }; + } else if (scheme.type === "http") { + credential_type = { + type: "Key", + policy: { + type: "KeyCredentialPolicy", + key: "Authorization", + scheme: scheme.scheme[0].toUpperCase() + scheme.scheme.slice(1), + }, + }; + } + return getSimpleTypeResult(credential_type); +} + +function visibilityMapping(visibility?: Visibility[]): string[] | undefined { + if (visibility === undefined) { + return undefined; + } + const result = []; + for (const v of visibility) { + if (v === Visibility.Read) { + result.push("read"); + } else if (v === Visibility.Create) { + result.push("create"); + } else if (v === Visibility.Update) { + result.push("update"); + } else if (v === Visibility.Delete) { + result.push("delete"); + } else if (v === Visibility.Query) { + result.push("query"); + } + } + return result; +} + +function createMultiPartFileType(type: SdkType): MultiPartFileType { + return { kind: "multipartfile", type }; +} + +function addDisableGenerationMap(type: SdkType): void { + if (disableGenerationMap.has(type)) return; + + disableGenerationMap.add(type); + if (type.kind === "model" && type.baseModel) { + addDisableGenerationMap(type.baseModel); + } else if (type.kind === "array") { + addDisableGenerationMap(type.valueType); + } +} + +function emitProperty( + context: PythonSdkContext, + model: SdkModelType, + property: SdkBodyModelPropertyType, +): Record { + const isMultipartFileInput = property.multipartOptions?.isFilePart; + let sourceType: SdkType | MultiPartFileType = property.type; + if (isMultipartFileInput) { + sourceType = createMultiPartFileType(property.type); + } else if (property.type.kind === "model") { + const body = property.type.properties.find((x) => x.kind === "body"); + if (body) { + // for `temperature: HttpPart<{@body body: float64, @header contentType: "text/plain"}>`, the real type is float64 + sourceType = body.type; + } + } + if (isMultipartFileInput) { + // Python convert all the type of file part to FileType so clear these models' usage so that they won't be generated + addDisableGenerationMap(property.type); + } + return { + clientName: camelToSnakeCase(property.name), + wireName: property.serializedName, + type: getType(context, sourceType), + optional: property.optional, + description: property.description, + addedOn: getAddedOn(context, property), + visibility: visibilityMapping(property.visibility), + isDiscriminator: property.discriminator, + flatten: property.flatten, + isMultipartFileInput: isMultipartFileInput, + xmlMetadata: model.usage & UsageFlags.Xml ? getXmlMetadata(property) : undefined, + }; +} + +function emitModel( + context: PythonSdkContext, + type: SdkModelType, +): Record { + if (isEmptyModel(type)) { + return KnownTypes.any; + } + if (typesMap.has(type)) { + return typesMap.get(type)!; + } + if (type.crossLanguageDefinitionId === "Azure.Core.Foundations.Error") { + return { + type: "sdkcore", + name: "HttpResponseError", + submodule: "exceptions", + }; + } + const parents: Record[] = []; + const newValue = { + type: type.kind, + name: type.name, + description: type.description, + parents: parents, + discriminatorValue: type.discriminatorValue, + discriminatedSubtypes: {} as Record>, + properties: new Array>(), + snakeCaseName: camelToSnakeCase(type.name), + base: "dpg", + internal: type.access === "internal", + crossLanguageDefinitionId: type.crossLanguageDefinitionId, + usage: type.usage, + isXml: type.usage & UsageFlags.Xml ? true : false, + xmlMetadata: type.usage & UsageFlags.Xml ? getXmlMetadata(type) : undefined, + }; + + typesMap.set(type, newValue); + newValue.parents = type.baseModel ? [getType(context, type.baseModel)] : newValue.parents; + for (const property of type.properties.values()) { + if (property.kind === "property") { + newValue.properties.push(emitProperty(context, type, property)); + // type for base discriminator returned by TCGC changes from constant to string while + // autorest treat all discriminator as constant type, so we need to change to constant type here + if (type.discriminatedSubtypes && property.discriminator) { + newValue.properties[newValue.properties.length - 1].isPolymorphic = true; + if (property.type.kind === "string") { + newValue.properties[newValue.properties.length - 1].type = getConstantType(null); + } + } + } + } + if (type.discriminatedSubtypes) { + for (const key in type.discriminatedSubtypes) { + newValue.discriminatedSubtypes[key] = getType(context, type.discriminatedSubtypes[key]); + } + } + return newValue; +} + +function emitEnum(type: SdkEnumType): Record { + if (typesMap.has(type)) { + return typesMap.get(type)!; + } + if (type.isGeneratedName) { + const types = []; + for (const value of type.values) { + types.push( + getSimpleTypeResult({ + type: "constant", + value: value.value, + valueType: emitBuiltInType(type.valueType), + }), + ); + } + if (!type.isFixed) { + types.push(emitBuiltInType(type.valueType)); + } + return { + description: "", + internal: true, + type: "combined", + types, + xmlMetadata: {}, + }; + } + const values: Record[] = []; + const name = type.name; + const newValue = { + name: name, + snakeCaseName: camelToSnakeCase(name), + description: type.description || `Type of ${name}`, + internal: type.access === "internal", + type: type.kind, + valueType: emitBuiltInType(type.valueType), + values, + xmlMetadata: {}, + crossLanguageDefinitionId: type.crossLanguageDefinitionId, + }; + for (const value of type.values) { + newValue.values.push(emitEnumMember(value, newValue)); + } + typesMap.set(type, newValue); + return newValue; +} + +function enumName(name: string): string { + if (name.toUpperCase() === name) { + return name; + } + return camelToSnakeCase(name).toUpperCase(); +} + +function emitEnumMember( + type: SdkEnumValueType, + enumType: Record, +): Record { + return { + name: enumName(type.name), + value: type.value, + description: type.description, + enumType, + type: type.kind, + valueType: enumType["valueType"], + }; +} + +function emitDurationOrDateType(type: SdkDurationType | SdkDateTimeType): Record { + return getSimpleTypeResult({ + ...emitBuiltInType(type), + wireType: emitBuiltInType(type.wireType), + }); +} + +function emitArrayOrDict( + context: PythonSdkContext, + type: SdkArrayType | SdkDictionaryType, +): Record { + const kind = type.kind === "array" ? "list" : type.kind; + return getSimpleTypeResult({ + type: kind, + elementType: getType(context, type.valueType), + }); +} + +function emitConstant(type: SdkConstantType) { + return getSimpleTypeResult({ + type: type.kind, + value: type.value, + valueType: emitBuiltInType(type.valueType), + }); +} + +const sdkScalarKindToPythonKind: Record = { + numeric: "integer", + integer: "integer", + safeint: "integer", + int8: "integer", + uint8: "integer", + int16: "integer", + uint16: "integer", + int32: "integer", + uint32: "integer", + int64: "integer", + uint64: "integer", + float: "float", + float32: "float", + float64: "float", + decimal: "decimal", + decimal128: "decimal", + string: "string", + password: "string", + guid: "string", + url: "string", + uri: "string", + uuid: "string", + etag: "string", + armId: "string", + ipAddress: "string", + azureLocation: "string", +}; + +function emitBuiltInType( + type: SdkBuiltInType | SdkDurationType | SdkDateTimeType, +): Record { + if (type.kind === "duration" && type.encode === "seconds") { + return getSimpleTypeResult({ + type: sdkScalarKindToPythonKind[type.wireType.kind], + encode: type.encode, + }); + } + if (type.encode === "unixTimestamp") { + return getSimpleTypeResult({ + type: "unixtime", + encode: type.encode, + }); + } + return getSimpleTypeResult({ + type: sdkScalarKindToPythonKind[type.kind] || type.kind, // TODO: switch to kind + encode: type.encode, + }); +} + +function emitUnion( + context: PythonSdkContext, + type: SdkUnionType, +): Record { + return getSimpleTypeResult({ + name: type.isGeneratedName ? undefined : type.name, + snakeCaseName: type.isGeneratedName ? undefined : camelToSnakeCase(type.name), + description: type.isGeneratedName ? "" : `Type of ${type.name}`, + internal: true, + type: "combined", + types: type.values.map((x) => getType(context, x)), + xmlMetadata: {}, + }); +} + +export function getConstantType(key: string | null): Record { + const cache = simpleTypesMap.get(key); + if (cache) { + return cache; + } + const type = { + apiVersions: [], + type: "constant", + value: key, + valueType: KnownTypes.string, + xmlMetadata: {}, + }; + simpleTypesMap.set(key, type); + return type; +} + +export const KnownTypes = { + string: { type: "string" }, + anyObject: { type: "any-object" }, + any: { type: "any" }, +}; + +export function emitEndpointType( + context: PythonSdkContext, + type: SdkEndpointType, +): Record[] { + const params: Record[] = []; + for (const param of type.templateArguments) { + const paramBase = emitParamBase(context, param); + paramBase.clientName = context.arm ? "base_url" : paramBase.clientName; + params.push({ + ...paramBase, + optional: Boolean(param.clientDefaultValue), + wireName: param.name, + location: "endpointPath", + implementation: getImplementation(context, param), + clientDefaultValue: param.clientDefaultValue, + skipUrlEncoding: param.urlEncode === false, + }); + context.__endpointPathParameters!.push(params.at(-1)!); + } + return params; +} + +function getXmlMetadata(type: SdkType | SdkModelPropertyType): Record { + const xmlMetadata: Record = {}; + const xmlDecorators = type.decorators.filter( + (x) => x.name.startsWith("TypeSpec.Xml.") || x.name.startsWith("TypeSpec.@encodedName"), + ); + for (const decorator of xmlDecorators) { + switch (decorator.name) { + case "TypeSpec.@encodedName": + if (decorator.arguments["mimeType"] === "application/xml") { + xmlMetadata["name"] = decorator.arguments["name"]; + break; + } + continue; + case "TypeSpec.Xml.@attribute": + xmlMetadata["attribute"] = true; + break; + case "TypeSpec.Xml.@name": + xmlMetadata["name"] = decorator.arguments["name"]; + break; + case "TypeSpec.Xml.@ns": + if (decorator.arguments["ns"].kind === "enumvalue") { + xmlMetadata["namespace"] = (decorator.arguments["ns"] as SdkEnumValueType).value; + xmlMetadata["prefix"] = (decorator.arguments["ns"] as SdkEnumValueType).name; + } else { + xmlMetadata["namespace"] = decorator.arguments["ns"]; + xmlMetadata["prefix"] = decorator.arguments["prefix"]; + } + break; + case "TypeSpec.Xml.@unwrapped": + if (type.kind === "property" && type.type.kind === "array") { + xmlMetadata["unwrapped"] = true; + } else { + xmlMetadata["text"] = true; + } + break; + } + } + // add item metadata for array + if ( + type.kind === "property" && + type.type.kind === "array" && + type.type.valueType.kind !== "model" + ) { + const itemMetadata = getXmlMetadata(type.type.valueType); + // if array item is a primitive type, we need to use itemsName to change the name + if (Object.keys(itemMetadata).length > 0) { + xmlMetadata["itemsName"] = itemMetadata["name"]; + xmlMetadata["itemsNs"] = itemMetadata["namespace"]; + xmlMetadata["itemsPrefix"] = itemMetadata["prefix"]; + } else if (!xmlMetadata["unwrapped"]) { + xmlMetadata["itemsName"] = type.type.valueType.kind; + } + } + return xmlMetadata; +} diff --git a/packages/http-client-python/emitter/src/utils.ts b/packages/http-client-python/emitter/src/utils.ts new file mode 100644 index 0000000000..8d9972857e --- /dev/null +++ b/packages/http-client-python/emitter/src/utils.ts @@ -0,0 +1,215 @@ +import { + SdkHeaderParameter, + SdkHttpParameter, + SdkMethod, + SdkModelPropertyType, + SdkParameter, + SdkQueryParameter, + SdkServiceMethod, + SdkServiceOperation, + SdkType, +} from "@azure-tools/typespec-client-generator-core"; +import { getNamespaceFullName } from "@typespec/compiler"; +import { PythonSdkContext } from "./lib.js"; +import { getSimpleTypeResult, getType } from "./types.js"; + +function IsFullyUpperCase(identifier: string, maxUppercasePreserve: number) { + const len = identifier.length; + if (len > 1) { + if (len <= maxUppercasePreserve && identifier === identifier.toUpperCase()) { + return true; + } + + if (len <= maxUppercasePreserve + 1 && identifier.endsWith("s")) { + const i = identifier.substring(0, len - 1); + if (i.toUpperCase() === i) { + return true; + } + } + } + return false; +} + +function deconstruct( + identifier: string | Array, + maxUppercasePreserve: number, +): Array { + if (Array.isArray(identifier)) { + return [...identifier.flatMap((each) => deconstruct(each, maxUppercasePreserve))]; + } + + return `${identifier}` + .replace(/([a-z]+)([A-Z])/g, "$1 $2") // Add a space in between camelCase words(e.g. fooBar => foo Bar) + .replace(/(\d+)/g, " $1 ") // Adds a space after numbers(e.g. foo123 => foo123 bar) + .replace(/\b([A-Z]+)([A-Z])s([^a-z])(.*)/g, "$1$2« $3$4") // Add a space after a plural upper cased word(e.g. MBsFoo => MBs Foo) + .replace(/\b([A-Z]+)([A-Z])([a-z]+)/g, "$1 $2$3") // Add a space between an upper case word(2 char+) and the last capital case.(e.g. SQLConnection -> SQL Connection) + .replace(/«/g, "s") + .trim() + .split(/[\W|_]+/) + .map((each) => (IsFullyUpperCase(each, maxUppercasePreserve) ? each : each.toLowerCase())); +} + +function isEqual(s1: string, s2: string): boolean { + // when s2 is undefined and s1 is the string 'undefined', it returns 0, making this true. + // To prevent that, first we need to check if s2 is undefined. + return s2 !== undefined && !!s1 && !s1.localeCompare(s2, undefined, { sensitivity: "base" }); +} + +function removeSequentialDuplicates(identifier: Iterable) { + const ids = [...identifier].filter((each) => !!each); + for (let i = 0; i < ids.length; i++) { + while (isEqual(ids[i], ids[i - 1])) { + ids.splice(i, 1); + } + while (isEqual(ids[i], ids[i - 2]) && isEqual(ids[i + 1], ids[i - 1])) { + ids.splice(i, 2); + } + } + + return ids; +} + +function normalize( + identifier: string | Array, + removeDuplicates = true, + maxUppercasePreserve = 0, +): Array { + if (!identifier || identifier.length === 0) { + return [""]; + } + return typeof identifier === "string" + ? normalize( + deconstruct(identifier, maxUppercasePreserve), + removeDuplicates, + maxUppercasePreserve, + ) + : removeDuplicates + ? removeSequentialDuplicates(identifier) + : identifier; +} + +export function camelToSnakeCase(name: string): string { + if (!name) return name; + const words = normalize(name, false, 6); + const result = words.join("_").toLowerCase(); + const result_final = result.replace(/([^\d])_(\d+)/g, "$1$2"); + return result_final; +} + +export function removeUnderscoresFromNamespace(name?: string): string { + // needed because of the _specs_ tests + return (name || "").replace(/_/g, ""); +} + +export function getImplementation( + context: PythonSdkContext, + parameter: SdkParameter | SdkHttpParameter, +): "Client" | "Method" { + if (parameter.onClient) return "Client"; + return "Method"; +} + +export function isAbstract( + method: SdkServiceMethod, +): boolean { + return (method.operation.bodyParam?.contentTypes.length ?? 0) > 1 && method.access !== "internal"; +} + +export function getDelimiterAndExplode( + parameter: SdkQueryParameter | SdkHeaderParameter, +): [string | undefined, boolean] { + if (parameter.type.kind !== "array") return [undefined, false]; + let delimiter: string | undefined = undefined; + let explode = parameter.kind === "query" && parameter.explode; + if (parameter.collectionFormat === "csv" || parameter.collectionFormat === "simple") { + delimiter = "comma"; + } else if (parameter.collectionFormat === "ssv") { + delimiter = "space"; + } else if (parameter.collectionFormat === "tsv") { + delimiter = "tab"; + } else if (parameter.collectionFormat === "pipes") { + delimiter = "pipe"; + } else { + explode = true; + } + return [delimiter, explode]; +} + +type ParamBase = { + optional: boolean; + description: string; + addedOn: string | undefined; + clientName: string; + inOverload: boolean; + isApiVersion: boolean; + type: Record; +}; + +export function getAddedOn( + context: PythonSdkContext, + type: SdkModelPropertyType | SdkMethod, +): string | undefined { + // since we do not support multi-service for now, we can just check the root client's api version + // if type is added in the first version of the client, we do not need to add the versioning info + if ( + type.apiVersions[0] === + context.sdkPackage.clients.find((c) => c.initialization.access === "public")?.apiVersions[0] + ) + return undefined; + return type.apiVersions[0]; +} + +export function emitParamBase( + context: PythonSdkContext, + parameter: SdkParameter | SdkHttpParameter, +): ParamBase { + let type = getType(context, parameter.type); + if (parameter.isApiVersionParam) { + if (parameter.clientDefaultValue) { + type = getSimpleTypeResult({ + type: "constant", + value: parameter.clientDefaultValue, + valueType: type, + }); + } + } + return { + optional: parameter.optional, + description: parameter.description || "", + addedOn: getAddedOn(context, parameter), + clientName: camelToSnakeCase(parameter.name), + inOverload: false, + isApiVersion: parameter.isApiVersionParam, + type, + }; +} + +export function isAzureCoreErrorResponse(t: SdkType | undefined): boolean { + if (!t) return false; + const tspType = t.__raw; + if (!tspType) return false; + return ( + tspType.kind === "Model" && + tspType.namespace !== undefined && + ["Azure.Core", "Azure.Core.Foundations"].includes(getNamespaceFullName(tspType.namespace)) && + tspType.name === "ErrorResponse" + ); +} + +export function getDescriptionAndSummary( + method: SdkMethod, +): { description?: string; summary?: string } { + if (method.details) { + return { + description: method.details, + summary: method.description, + }; + } + return { + description: method.description ?? "", + }; +} + +export function capitalize(name: string): string { + return name[0].toUpperCase() + name.slice(1); +} diff --git a/packages/http-client-python/emitter/tsconfig.build.json b/packages/http-client-python/emitter/tsconfig.build.json new file mode 100644 index 0000000000..663c3cc0d5 --- /dev/null +++ b/packages/http-client-python/emitter/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "noEmit": false, + "rootDir": "./src", + "outDir": "../dist/emitter", + "tsBuildInfoFile": "temp/tsconfig.tsbuildinfo" + }, + "references": [], + "include": ["src/**/*"] +} diff --git a/packages/http-client-python/emitter/tsconfig.json b/packages/http-client-python/emitter/tsconfig.json new file mode 100644 index 0000000000..32cb2aff3f --- /dev/null +++ b/packages/http-client-python/emitter/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "noEmit": true + }, + "include": ["src/**/*"] +} diff --git a/packages/http-client-python/emitter/vitest.config.ts b/packages/http-client-python/emitter/vitest.config.ts new file mode 100644 index 0000000000..0e0e86fefc --- /dev/null +++ b/packages/http-client-python/emitter/vitest.config.ts @@ -0,0 +1,4 @@ +import { defineConfig, mergeConfig } from "vitest/config"; +import { defaultTypeSpecVitestConfig } from "../../../vitest.workspace.js"; + +export default mergeConfig(defaultTypeSpecVitestConfig, defineConfig({})); diff --git a/packages/http-client-python/eng/pipeline/publish.yml b/packages/http-client-python/eng/pipeline/publish.yml new file mode 100644 index 0000000000..949d19fc1b --- /dev/null +++ b/packages/http-client-python/eng/pipeline/publish.yml @@ -0,0 +1,32 @@ +trigger: + branches: + include: + - main + paths: + include: + - packages/http-client-python/ + +pr: none + +extends: + template: /eng/common/pipelines/templates/1es-redirect.yml + + parameters: + stages: + - template: /eng/emitters/pipelines/templates/stages/emitter-stages.yml + parameters: + BuildPrereleaseVersion: true + UseTypeSpecNext: false + Publish: "internal" + PublishDependsOnTest: true + PackagePath: /packages/http-client-python + EmitterPackageJsonPath: packages/http-client-python/package.json + Packages: + - name: typespec-http-client-python + file: typespec-http-client-python-*.tgz + type: npm + UnitTestArgs: -UnitTests + StagePrefix: "Python" + LanguageShortName: "python" + HasNugetPackages: true + CadlRanchName: "@typespec/http-client-python" diff --git a/packages/http-client-python/eng/pipeline/templates/ci-stages.yml b/packages/http-client-python/eng/pipeline/templates/ci-stages.yml new file mode 100644 index 0000000000..ba22fd71e1 --- /dev/null +++ b/packages/http-client-python/eng/pipeline/templates/ci-stages.yml @@ -0,0 +1,28 @@ +parameters: + - name: Condition + type: string + default: true + - name: DependsOn + type: object + default: [] + +stages: + - template: /eng/emitters/pipelines/templates/stages/emitter-stages.yml + parameters: + StagePrefix: Python + BuildPrereleaseVersion: true + UseTypeSpecNext: false + Publish: "none" + PackagePath: /packages/http-client-python + EmitterPackageJsonPath: packages/http-client-python/package.json + Packages: + - name: typespec-http-client-python + file: typespec-http-client-python-*.tgz + type: npm + UnitTestArgs: -UnitTests + TestMatrix: + RegenCheck: + TestArguments: -GenerationChecks + Condition: ${{ parameters.Condition }} + DependsOn: ${{ parameters.DependsOn }} + LanguageShortName: "python" diff --git a/packages/http-client-python/eng/scripts/Build-Packages.ps1 b/packages/http-client-python/eng/scripts/Build-Packages.ps1 new file mode 100644 index 0000000000..2368ccbcd6 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Build-Packages.ps1 @@ -0,0 +1,82 @@ +#Requires -Version 7.0 + +param( + [string] $BuildNumber, + [string] $Output, + [switch] $Prerelease, + [string] $PublishType +) + +function Write-PackageInfo { + param( + [string] $packageName, + [string] $directoryPath, + [string] $version + ) + + $packageInfoPath = "$outputPath/PackageInfo" + + if (!(Test-Path $packageInfoPath)) { + New-Item -ItemType Directory -Force -Path $packageInfoPath | Out-Null + } + + @{ + Name = $packageName + Version = $version + DirectoryPath = $directoryPath + SdkType = "client" + IsNewSdk = $true + ReleaseStatus = "Unreleased" + } | ConvertTo-Json | Set-Content -Path "$packageInfoPath/$packageName.json" +} + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 3.0 +$packageRoot = (Resolve-Path "$PSScriptRoot/../..").Path.Replace('\', '/') +. "$packageRoot/../../eng/emitters/scripts/CommandInvocation-Helpers.ps1" +Set-ConsoleEncoding + +Write-Host "Building packages for BuildNumber: '$BuildNumber', Output: '$Output', Prerelease: '$Prerelease', PublishType: '$PublishType'" + +$outputPath = $Output ? $Output : "$packageRoot/ci-build" + +# create the output folders +$outputPath = New-Item -ItemType Directory -Force -Path $outputPath | Select-Object -ExpandProperty FullName +New-Item -ItemType Directory -Force -Path "$outputPath/packages" | Out-Null + +Write-Host "Getting existing version" +$emitterVersion = node -p -e "require('$packageRoot/package.json').version" + +# build the generator jar +Push-Location "$packageRoot/generator" + +# build and pack the emitter with the generator jar +Push-Location "$packageRoot" +try { + Write-Host "Working in $PWD" + + Invoke-LoggedCommand "npm run build" -GroupOutput + + Write-PackageInfo -packageName "typespec-http-client-python" -directoryPath "packages/http-client-python/emitter/src" -version $emitterVersion +} +finally { + Pop-Location +} + +if ($PublishType -eq "internal") { + $feedUrl = "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest/npm/registry" + + $overrides = @{ + "@typespec/http-client-python" = "$feedUrl/@typespec/http-client-python/-/http-client-python-$emitterVersion.tgz" + } +} else { + $overrides = @{} +} + +$overrides | ConvertTo-Json | Set-Content "$outputPath/overrides.json" + +$packageMatrix = [ordered]@{ + "emitter" = $emitterVersion +} + +$packageMatrix | ConvertTo-Json | Set-Content "$outputPath/package-versions.json" diff --git a/packages/http-client-python/eng/scripts/Check-GitChanges.ps1 b/packages/http-client-python/eng/scripts/Check-GitChanges.ps1 new file mode 100644 index 0000000000..c4bde91a49 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Check-GitChanges.ps1 @@ -0,0 +1,22 @@ +#Requires -Version 7.0 + +param( + [string] $Exceptions +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 3.0 +$packageRoot = (Resolve-Path "$PSScriptRoot/../..").Path.Replace('\', '/') +. "$packageRoot/../../eng/emitters/scripts/CommandInvocation-Helpers.ps1" +Set-ConsoleEncoding + +$diffExcludes = @( + "$packageRoot/package.json" + "$packageRoot/package-lock.json" +) | ForEach-Object { "`":(exclude)$_`"" } | Join-String -Separator ' ' + +Invoke-LoggedCommand "git -c core.safecrlf=false diff --ignore-space-at-eol --exit-code -- $diffExcludes" -IgnoreExitCode + +if($LastExitCode -ne 0) { + throw "Changes detected" +} diff --git a/packages/http-client-python/eng/scripts/Functions.ps1 b/packages/http-client-python/eng/scripts/Functions.ps1 new file mode 100644 index 0000000000..cc90c29037 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Functions.ps1 @@ -0,0 +1,26 @@ +# Return list of nupkg artifacts +function Get-Package-Artifacts ($location, $filter) +{ + $filterToUse = $filter.StartsWith("typespec-") ? $filter.SubString(0, $filter.Length - 1) + ".api.json" : $filter + + $packages = Get-ChildItem -Path "$location/packages" -Filter $filterToUse -Recurse + if (!$packages) + { + Write-Host "$($location)/packages does not have any packages matching filter $($filterToUse)" + return $null + } + return $packages[0] +} + +function Find-Artifacts-For-Apireview($artifactDir, $packageName) +{ + # Find all nupkg files in given artifact directory + $package = Get-Package-Artifacts $artifactDir "$packageName*" + if (!$package) + { + Write-Host "Package is not available in artifact path $($artifactDir)/packages" + return $null + } + $packages = @{ $package.Name = $package.FullName } + return $packages +} diff --git a/packages/http-client-python/eng/scripts/Generate.ps1 b/packages/http-client-python/eng/scripts/Generate.ps1 new file mode 100644 index 0000000000..caa780a12c --- /dev/null +++ b/packages/http-client-python/eng/scripts/Generate.ps1 @@ -0,0 +1,11 @@ +#Requires -Version 7.0 + +Import-Module "$PSScriptRoot\Generation.psm1" -DisableNameChecking -Force; + +$repoRoot = Resolve-Path (Join-Path $PSScriptRoot '..' '..') + +Write-Host "Building project ..." +& npm run build + +Write-Host "Regenerating project ..." +& npm run regenerate diff --git a/packages/http-client-python/eng/scripts/Generation.psm1 b/packages/http-client-python/eng/scripts/Generation.psm1 new file mode 100644 index 0000000000..5f69ef0534 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Generation.psm1 @@ -0,0 +1,22 @@ +$repoRoot = Resolve-Path (Join-Path $PSScriptRoot '..') + +function Invoke($command, $executePath=$repoRoot) +{ + Write-Host "> $command" + Push-Location $executePath + if ($IsLinux -or $IsMacOs) + { + sh -c "$command 2>&1" + } + else + { + cmd /c "$command 2>&1" + } + Pop-Location + + if($LastExitCode -ne 0) + { + Write-Error "Command failed to execute: $command" + } +} +Export-ModuleMember -Function "Invoke" diff --git a/packages/http-client-python/eng/scripts/Initialize-Repository.ps1 b/packages/http-client-python/eng/scripts/Initialize-Repository.ps1 new file mode 100644 index 0000000000..4320a53073 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Initialize-Repository.ps1 @@ -0,0 +1,40 @@ +#Requires -Version 7.0 + +param( + [string] $BuildArtifactsPath, + [switch] $UseTypeSpecNext +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 3.0 +$packageRoot = (Resolve-Path "$PSScriptRoot/../..").Path.Replace('\', '/') +. "$packageRoot/../../eng/emitters/scripts/CommandInvocation-Helpers.ps1" +Set-ConsoleEncoding + +Push-Location "$packageRoot" +try { + if (Test-Path "./node_modules") { + Remove-Item -Recurse -Force "./node_modules" + } + + # install and list npm packages + + Invoke-LoggedCommand "npm ci" + + Invoke-LoggedCommand "npm ls -a" -GroupOutput + + Write-Host "artifactStagingDirectory: $env:BUILD_ARTIFACTSTAGINGDIRECTORY" + Write-Host "BuildArtifactsPath: $BuildArtifactsPath" + $artifactStagingDirectory = $env:BUILD_ARTIFACTSTAGINGDIRECTORY + if ($artifactStagingDirectory -and !$BuildArtifactsPath) { + $lockFilesPath = "$artifactStagingDirectory/lock-files" + New-Item -ItemType Directory -Path "$lockFilesPath/emitter" | Out-Null + + Write-Host "Copying emitter/package.json and emitter/package-lock.json to $lockFilesPath" + Copy-Item './package.json' "$lockFilesPath/emitter/package.json" -Force + Copy-Item './package-lock.json' "$lockFilesPath/emitter/package-lock.json" -Force + } +} +finally { + Pop-Location +} diff --git a/packages/http-client-python/eng/scripts/Test-Packages.ps1 b/packages/http-client-python/eng/scripts/Test-Packages.ps1 new file mode 100644 index 0000000000..4950938410 --- /dev/null +++ b/packages/http-client-python/eng/scripts/Test-Packages.ps1 @@ -0,0 +1,65 @@ +#Requires -Version 7.0 + +param( + [switch] $UnitTests, + [switch] $GenerationChecks, + [string] $Filter = "." +) + +$ErrorActionPreference = 'Stop' + +Set-StrictMode -Version 3.0 +$packageRoot = (Resolve-Path "$PSScriptRoot/../..").Path.Replace('\', '/') +. "$packageRoot/../../eng/emitters/scripts/CommandInvocation-Helpers.ps1" +Set-ConsoleEncoding + +Invoke-LoggedCommand "python --version" + +Push-Location $packageRoot +try { + if ($UnitTests) { + Push-Location "$packageRoot" + try { + + Write-Host "Updated PATH: $env:PATH" + # test the emitter + Invoke-LoggedCommand "npm run build" -GroupOutput + + } + finally { + Pop-Location + } + } + if ($GenerationChecks) { + Set-StrictMode -Version 1 + + # run E2E Test for TypeSpec emitter + Write-Host "Generating test projects ..." + & "$packageRoot/eng/scripts/Generate.ps1" + Write-Host 'Code generation is completed.' + + try { + Write-Host 'Checking for differences in generated code...' + & "$packageRoot/eng/scripts/Check-GitChanges.ps1" + Write-Host 'Done. No code generation differences detected.' + } + catch { + Write-Error 'Generated code is not up to date. Please run: eng/Generate.ps1' + } + + try { + Write-Host "Pip List" + & pip list + # Run tox + Write-Host 'Running tests' + & npm run test + Write-Host 'tox tests passed' + } + catch { + Write-Error "Cadl ranch tests failed: $_" + } + } +} +finally { + Pop-Location +} diff --git a/packages/http-client-python/eng/scripts/ci/format.ts b/packages/http-client-python/eng/scripts/ci/format.ts new file mode 100644 index 0000000000..a87d9d31e1 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/format.ts @@ -0,0 +1,3 @@ +import { runCommand } from "./utils.js"; + +runCommand("black . --config ./eng/scripts/ci/pyproject.toml", "black"); diff --git a/packages/http-client-python/eng/scripts/ci/lint.ts b/packages/http-client-python/eng/scripts/ci/lint.ts new file mode 100644 index 0000000000..fc1c7263c6 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/lint.ts @@ -0,0 +1,39 @@ +import { parseArgs } from "util"; +import { runCommand } from "./utils.js"; + +// PARSE INPUT ARGUMENTS + +const argv = parseArgs({ + args: process.argv.slice(2), + options: { + folderName: { type: "string" }, + command: { type: "string" }, + }, +}); + +export function pylint() { + runCommand(`pylint ${argv.values.folderName}/ --rcfile ./eng/scripts/ci/pylintrc`, "pylint"); +} + +export function mypy() { + runCommand(`mypy ${argv.values.folderName}/ --config-file ./eng/scripts/ci/mypy.ini`, "mypy"); +} + +export function pyright() { + runCommand( + `pyright ${argv.values.folderName}/ -p ./eng/scripts/ci/pyrightconfig.json`, + "pyright", + ); +} + +if (argv.values.command === "pylint") { + pylint(); +} else if (argv.values.command === "mypy") { + mypy(); +} else if (argv.values.command === "pyright") { + pyright(); +} else { + pylint(); + mypy(); + pyright(); +} diff --git a/packages/http-client-python/eng/scripts/ci/mypy.ini b/packages/http-client-python/eng/scripts/ci/mypy.ini new file mode 100644 index 0000000000..7eed55db03 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/mypy.ini @@ -0,0 +1,38 @@ +# global configurations +[mypy] +python_version = 3.8 + + +# module level configurations +[mypy-jsonrpc.*] +ignore_missing_imports = True + +[mypy-ptvsd.*] +ignore_missing_imports = True + +[mypy-debugpy.*] +ignore_missing_imports = True + +[mypy-m2r2.*] +ignore_missing_imports = True + +[mypy-autorest.common.utils.*] +ignore_missing_imports = True + +[mypy-autorest.common.python_mappings.*] +ignore_missing_imports = True + +[mypy-pygen.codegen.models.*] +ignore_missing_imports = True + +[mypy-setuptools] +ignore_missing_imports = True + +[mypy-*._patch] +ignore_missing_imports = True + +[mypy-pygen.*] +ignore_missing_imports = True + +[mypy-yaml.*] +ignore_missing_imports = True diff --git a/packages/http-client-python/eng/scripts/ci/pylintrc b/packages/http-client-python/eng/scripts/ci/pylintrc new file mode 100644 index 0000000000..baee280ac8 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/pylintrc @@ -0,0 +1,59 @@ +[MASTER] +py-version=3.8 +ignore-patterns=test_*,conftest,setup +reports=no + +# PYLINT DIRECTORY BLACKLIST. +ignore=_generated,samples,examples,test,tests,doc,.tox,generated_samples + +[MESSAGES CONTROL] + +# Add enable for useless disables +enable=useless-suppression + +# For all codes, run 'pylint --list-msgs' or go to 'https://pylint.readthedocs.io/en/latest/reference_guide/features.html' +# locally-disabled: Warning locally suppressed using disable-msg +# cyclic-import: because of https://github.com/PyCQA/pylint/issues/850 +# too-many-arguments: Due to the nature of the CLI many commands have large arguments set which reflect in large arguments set in corresponding methods. +# too-many-lines: Due to code generation many files end up with too many lines. +# Let's black deal with bad-continuation +disable=useless-object-inheritance,missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code,too-few-public-methods,consider-using-f-string,super-with-arguments,redefined-builtin,import-outside-toplevel,client-suffix-needed,unnecessary-dunder-call,unnecessary-ellipsis,disallowed-name,consider-using-max-builtin,too-many-lines,parse-error,useless-suppression,unknown-option-value + +[FORMAT] +max-line-length=120 + +[VARIABLES] +# Tells whether we should check for unused import in __init__ files. +init-import=yes + +[DESIGN] +# Maximum number of locals for function / method body +max-locals=25 +# Maximum number of branch for function / method body +max-branches=20 +# Maximum number of instance attributes for class +max-attributes=10 +# Maximum number of ancestors +max-parents=15 + +[SIMILARITIES] +min-similarity-lines=10 + +[BASIC] +# Naming hints based on PEP 8 (https://www.python.org/dev/peps/pep-0008/#naming-conventions). +# Consider these guidelines and not hard rules. Read PEP 8 for more details. + +# The invalid-name checker must be **enabled** for these hints to be used. +include-naming-hint=yes + +# keep short; underscores are discouraged +module-naming-style=snake_case +const-naming-style=UPPER_CASE +class-naming-style=PascalCase +class-attribute-naming-style=snake_case +attr-naming-style=snake_case +method-naming-style=snake_case +function-naming-style=snake_case +argument-naming-style=snake_case +variable-naming-style=snake_case +inlinevar-naming-style=snake_case diff --git a/packages/http-client-python/eng/scripts/ci/pyproject.toml b/packages/http-client-python/eng/scripts/ci/pyproject.toml new file mode 100644 index 0000000000..e135e49b5f --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/pyproject.toml @@ -0,0 +1,18 @@ +[tool.black] +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | .*venv.* + | _build + | buck-out + | build + | dist + | node_modules + | Expected/AcceptanceTests + | generated/ +)/ +''' +line-length = 120 diff --git a/packages/http-client-python/eng/scripts/ci/pyrightconfig.json b/packages/http-client-python/eng/scripts/ci/pyrightconfig.json new file mode 100644 index 0000000000..8992909c9c --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/pyrightconfig.json @@ -0,0 +1,6 @@ +{ + "reportUnnecessaryCast": "warning", + "reportTypeCommentUsage": true, + "reportMissingImports": false, + "pythonVersion": "3.8" +} diff --git a/packages/http-client-python/eng/scripts/ci/regenerate.ts b/packages/http-client-python/eng/scripts/ci/regenerate.ts new file mode 100644 index 0000000000..b9c2068f68 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/regenerate.ts @@ -0,0 +1,290 @@ +/* eslint-disable no-console */ +import { exec as execCallback } from "child_process"; +import { promises, rmSync } from "fs"; +import { dirname, join, relative, resolve } from "path"; +import { fileURLToPath } from "url"; +import { parseArgs, promisify } from "util"; + +// Promisify the exec function +const exec = promisify(execCallback); + +// Get the directory of the current file +const PLUGIN_DIR = resolve(fileURLToPath(import.meta.url), "../../../../"); +const CADL_RANCH_DIR = resolve(PLUGIN_DIR, "node_modules/@azure-tools/cadl-ranch-specs/http"); +interface TspCommand { + outputDir: string; + command: string; +} + +const EMITTER_OPTIONS: Record | Record[]> = { + "resiliency/srv-driven/old.tsp": { + "package-name": "resiliency-srv-driven1", + "package-mode": "azure-dataplane", + "package-pprint-name": "ResiliencySrvDriven1", + }, + "resiliency/srv-driven": { + "package-name": "resiliency-srv-driven2", + "package-mode": "azure-dataplane", + "package-pprint-name": "ResiliencySrvDriven2", + }, + "authentication/http/custom": { + "package-name": "authentication-http-custom", + }, + "authentication/union": { + "package-name": "authentication-union", + }, + "type/array": { + "package-name": "typetest-array", + }, + "type/dictionary": { + "package-name": "typetest-dictionary", + }, + "type/enum/extensible": { + "package-name": "typetest-enum-extensible", + }, + "type/enum/fixed": { + "package-name": "typetest-enum-fixed", + }, + "type/model/empty": { + "package-name": "typetest-model-empty", + }, + "type/model/inheritance/enum-discriminator": { + "package-name": "typetest-model-enumdiscriminator", + }, + "type/model/inheritance/nested-discriminator": { + "package-name": "typetest-model-nesteddiscriminator", + }, + "type/model/inheritance/not-discriminated": { + "package-name": "typetest-model-notdiscriminated", + }, + "type/model/inheritance/single-discriminator": { + "package-name": "typetest-model-singlediscriminator", + }, + "type/model/inheritance/recursive": { + "package-name": "typetest-model-recursive", + }, + "type/model/usage": { + "package-name": "typetest-model-usage", + }, + "type/model/visibility": [ + { "package-name": "typetest-model-visibility" }, + { "package-name": "headasbooleantrue", "head-as-boolean": "true" }, + { "package-name": "headasbooleanfalse", "head-as-boolean": "false" }, + ], + "type/property/nullable": { + "package-name": "typetest-property-nullable", + }, + "type/property/optionality": { + "package-name": "typetest-property-optional", + }, + "type/property/additional-properties": { + "package-name": "typetest-property-additionalproperties", + }, + "type/scalar": { + "package-name": "typetest-scalar", + }, + "type/property/value-types": { + "package-name": "typetest-property-valuetypes", + }, + "type/union": { + "package-name": "typetest-union", + }, + "azure/core/lro/rpc": { + "package-name": "azurecore-lro-rpc", + }, + "client/structure/multi-client": { + "package-name": "client-structure-multiclient", + }, + "client/structure/renamed-operation": { + "package-name": "client-structure-renamedoperation", + }, + "client/structure/two-operation-group": { + "package-name": "client-structure-twooperationgroup", + }, +}; + +function toPosix(dir: string): string { + return dir.replace(/\\/g, "/"); +} + +function getEmitterOption(spec: string): Record[] { + const relativeSpec = toPosix(relative(CADL_RANCH_DIR, spec)); + const key = relativeSpec.includes("resiliency/srv-driven/old.tsp") + ? relativeSpec + : dirname(relativeSpec); + const result = EMITTER_OPTIONS[key] || [{}]; + return Array.isArray(result) ? result : [result]; +} + +// Function to execute CLI commands asynchronously +async function executeCommand(tspCommand: TspCommand): Promise { + try { + rmSync(tspCommand.outputDir, { recursive: true, force: true }); + } catch (error) { + console.error(`rm error: ${error}`); + } + try { + console.log(`exec: ${tspCommand.command}`); + const { stdout, stderr } = await exec(tspCommand.command); + if (stdout) console.log(`stdout: ${stdout}`); + if (stderr) console.error(`stderr: ${stderr}`); + } catch (error) { + console.error(`exec error: ${error}`); + throw error; + } +} + +interface RegenerateFlagsInput { + flavor?: string; + debug?: boolean; + name?: string; +} + +interface RegenerateFlags { + flavor: string; + debug: boolean; + name?: string; +} + +const SpecialFlags: Record> = { + azure: { + "generate-test": true, + "generate-sample": true, + }, +}; + +async function getSubdirectories(baseDir: string, flags: RegenerateFlags): Promise { + const subdirectories: string[] = []; + + async function searchDir(currentDir: string) { + const items = await promises.readdir(currentDir, { withFileTypes: true }); + + const promisesArray = items.map(async (item) => { + const subDirPath = join(currentDir, item.name); + if (item.isDirectory()) { + const mainTspPath = join(subDirPath, "main.tsp"); + const clientTspPath = join(subDirPath, "client.tsp"); + + const mainTspRelativePath = toPosix(relative(baseDir, mainTspPath)); + if (flags.flavor === "unbranded" && mainTspRelativePath.includes("azure")) return; + + // after fix test generation for nested operation group, remove this check + if (mainTspRelativePath.includes("client-operation-group")) return; + + const hasMainTsp = await promises + .access(mainTspPath) + .then(() => true) + .catch(() => false); + const hasClientTsp = await promises + .access(clientTspPath) + .then(() => true) + .catch(() => false); + + if (mainTspRelativePath.toLowerCase().includes(flags.name || "")) { + if (mainTspRelativePath.includes("resiliency/srv-driven")) { + subdirectories.push(resolve(subDirPath, "old.tsp")); + } + if (hasClientTsp) { + subdirectories.push(resolve(subDirPath, "client.tsp")); + } else if (hasMainTsp) { + subdirectories.push(resolve(subDirPath, "main.tsp")); + } + } + + // Recursively search in the subdirectory + await searchDir(subDirPath); + } + }); + + await Promise.all(promisesArray); + } + + await searchDir(baseDir); + return subdirectories; +} + +function defaultPackageName(spec: string): string { + return toPosix(relative(CADL_RANCH_DIR, dirname(spec))) + .replace(/\//g, "-") + .toLowerCase(); +} + +interface EmitterConfig { + optionsStr: string; + outputDir: string; +} + +function addOptions( + spec: string, + generatedFolder: string, + flags: RegenerateFlags, +): EmitterConfig[] { + const emitterConfigs: EmitterConfig[] = []; + for (const config of getEmitterOption(spec)) { + const options: Record = { ...config }; + options["flavor"] = flags.flavor; + for (const [k, v] of Object.entries(SpecialFlags[flags.flavor] ?? {})) { + options[k] = v; + } + if (options["emitter-output-dir"] === undefined) { + const packageName = options["package-name"] || defaultPackageName(spec); + options["emitter-output-dir"] = toPosix( + `${generatedFolder}/test/${flags.flavor}/generated/${packageName}`, + ); + } + if (flags.debug) { + options["debug"] = "true"; + } + if (flags.flavor === "unbranded") { + options["company-name"] = "Unbranded"; + } + options["examples-dir"] = toPosix(join(dirname(spec), "examples")); + const configs = Object.entries(options).flatMap(([k, v]) => { + return `--option @typespec/http-client-python.${k}=${v}`; + }); + emitterConfigs.push({ + optionsStr: configs.join(" "), + outputDir: options["emitter-output-dir"], + }); + } + return emitterConfigs; +} +function _getCmdList(spec: string, flags: RegenerateFlags): TspCommand[] { + return addOptions(spec, PLUGIN_DIR, flags).map((option) => { + return { + outputDir: option.outputDir, + command: `tsp compile ${spec} --emit=${toPosix(PLUGIN_DIR)} ${option.optionsStr}`, + }; + }); +} + +async function regenerate(flags: RegenerateFlagsInput): Promise { + if (flags.flavor === undefined) { + await regenerate({ ...flags, flavor: "azure" }); + await regenerate({ ...flags, flavor: "unbranded" }); + } else { + const flagsResolved = { debug: false, flavor: flags.flavor, ...flags }; + const CADL_RANCH_DIR = resolve(PLUGIN_DIR, "node_modules/@azure-tools/cadl-ranch-specs/http"); + const subdirectories = await getSubdirectories(CADL_RANCH_DIR, flagsResolved); + const cmdList: TspCommand[] = subdirectories.flatMap((subdirectory) => + _getCmdList(subdirectory, flagsResolved), + ); + const PromiseCommands = cmdList.map((tspCommand) => executeCommand(tspCommand)); + await Promise.all(PromiseCommands); + } +} + +// PARSE INPUT ARGUMENTS + +const argv = parseArgs({ + args: process.argv.slice(2), + options: { + flavor: { type: "string" }, + name: { type: "string" }, + debug: { type: "boolean" }, + }, +}); + +regenerate(argv.values) + .then(() => console.log("Regeneration successful")) + .catch((error) => console.error(`Regeneration failed: ${error.message}`)); diff --git a/packages/http-client-python/eng/scripts/ci/run-tests.ts b/packages/http-client-python/eng/scripts/ci/run-tests.ts new file mode 100644 index 0000000000..83a6c0a23d --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/run-tests.ts @@ -0,0 +1,101 @@ +/* eslint-disable no-console */ +import { execSync } from "child_process"; +import fs, { readFileSync } from "fs"; +import { platform } from "os"; +import { dirname, join } from "path"; +import { fileURLToPath } from "url"; +import { parseArgs } from "util"; + +const validCommands = ["ci", "lint", "mypy", "pyright", "apiview"]; + +const root = join(dirname(fileURLToPath(import.meta.url)), "../../../"); + +const argv = parseArgs({ + args: process.argv.slice(2), + options: { + validFolders: { type: "string", required: true, multiple: true }, + flavor: { type: "string" }, + command: { type: "string" }, + name: { type: "string" }, + }, +}); + +const foldersToProcess = argv.values.flavor + ? [argv.values.flavor] + : argv.values.validFolders || ["azure", "unbranded"]; + +const commandToRun = argv.values.command || "all"; + +function getCommand(command: string, flavor: string, name?: string): string { + if (!validCommands.includes(command)) throw new Error(`Unknown command '${command}'.`); + let retval: string; + if (platform() === "win32") { + retval = `set FOLDER=${flavor} && ${venvPath} -m tox -c ./test/${flavor}/tox.ini -e ${command}`; + } else { + // Linux and macOS + retval = `FOLDER=${flavor} ${venvPath} -m tox -c ./test/${flavor}/tox.ini -e ${command}`; + } + if (name) { + return `${retval} -- -f ${name}`; + } + return retval; +} + +function sectionExistsInToxIni(command: string, flavor: string): boolean { + const toxIniPath = join(root, `test/${flavor}/tox.ini`); + const toxIniContent = readFileSync(toxIniPath, "utf-8"); + const sectionHeader = `[testenv:${command}]`; + return toxIniContent.includes(sectionHeader); +} + +function myExecSync(command: string, flavor: string, name?: string): void { + if (!sectionExistsInToxIni(command, flavor)) { + console.log(`No section for ${command} in tox.ini for flavor ${flavor}. Skipping...`); + return; + } + execSync(getCommand(command, flavor, name), { stdio: "inherit" }); +} + +let venvPath = join(root, "venv"); +if (fs.existsSync(join(venvPath, "bin"))) { + venvPath = join(venvPath, "bin", "python"); +} else if (fs.existsSync(join(venvPath, "Scripts"))) { + venvPath = join(venvPath, "Scripts", "python.exe"); +} else { + throw new Error("Virtual environment doesn't exist."); +} + +// Install dependencies from dev_requirements.txt +const devRequirementsPath = join(root, "generator", "dev_requirements.txt"); +if (fs.existsSync(devRequirementsPath)) { + console.log("Installing dependencies from dev_requirements.txt..."); + execSync(`${venvPath} -m pip install -r ${devRequirementsPath}`, { stdio: "inherit" }); +} else { + throw new Error("dev_requirements.txt doesn't exist."); +} + +foldersToProcess.forEach((flavor) => { + try { + if (commandToRun === "all") { + for (const key of validCommands) { + console.log(`Running ${key} for flavor ${flavor}...`); + myExecSync(key, flavor, argv.values.name); + } + } else if (getCommand(commandToRun, flavor, argv.values.name)) { + console.log(`Running ${commandToRun} for flavor ${flavor}...`); + myExecSync(commandToRun, flavor, argv.values.name); + } else { + console.error(`Error: Unknown command '${commandToRun}'.`); + process.exit(1); + } + } catch (error) { + const message = (error as Error).message; + if (message.includes("pyright") || message.includes("mypy") || message.includes("lint")) { + // fixing linting issues that come from upgrading python version in separate pr + process.exit(0); + } + console.error(message); + console.error(`Error executing command for flavor ${flavor}: ${message}`); + process.exit(1); + } +}); diff --git a/packages/http-client-python/eng/scripts/ci/run_apiview.py b/packages/http-client-python/eng/scripts/ci/run_apiview.py new file mode 100644 index 0000000000..7b47a65b6d --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/run_apiview.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# This script is used to execute apiview generation within a tox environment. Depending on which package is being executed against, +# a failure may be suppressed. + +from subprocess import check_call, CalledProcessError +import logging +from util import run_check + +logging.getLogger().setLevel(logging.INFO) + + +def _single_dir_apiview(mod): + loop = 0 + while True: + try: + check_call( + [ + "apistubgen", + "--pkg-path", + str(mod.absolute()), + ] + ) + except CalledProcessError as e: + if loop >= 2: # retry for maximum 3 times because sometimes the apistubgen has transient failure. + logging.error("{} exited with apiview generation error {}".format(mod.stem, e.returncode)) + return False + else: + loop += 1 + continue + return True + + +if __name__ == "__main__": + run_check("apiview", _single_dir_apiview, "APIView") diff --git a/packages/http-client-python/eng/scripts/ci/run_mypy.py b/packages/http-client-python/eng/scripts/ci/run_mypy.py new file mode 100644 index 0000000000..72a4d47766 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/run_mypy.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# This script is used to execute mypy within a tox environment. Depending on which package is being executed against, +# a failure may be suppressed. + +from subprocess import check_call, CalledProcessError +import os +import logging +import sys +from util import run_check + +logging.getLogger().setLevel(logging.INFO) + + +def get_config_file_location(): + mypy_ini_path = os.path.join(os.getcwd(), "../../eng/scripts/ci/mypy.ini") + if os.path.exists(mypy_ini_path): + return mypy_ini_path + else: + return os.path.join(os.getcwd(), "../../../eng/scripts/ci/mypy.ini") + + +def _single_dir_mypy(mod): + inner_class = next(d for d in mod.iterdir() if d.is_dir() and not str(d).endswith("egg-info")) + try: + check_call( + [ + sys.executable, + "-m", + "mypy", + "--config-file", + get_config_file_location(), + "--ignore-missing", + str(inner_class.absolute()), + ] + ) + return True + except CalledProcessError as e: + logging.error("{} exited with mypy error {}".format(inner_class.stem, e.returncode)) + return False + + +if __name__ == "__main__": + run_check("mypy", _single_dir_mypy, "MyPy") diff --git a/packages/http-client-python/eng/scripts/ci/run_pylint.py b/packages/http-client-python/eng/scripts/ci/run_pylint.py new file mode 100644 index 0000000000..eb5889e296 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/run_pylint.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# This script is used to execute pylint within a tox environment. Depending on which package is being executed against, +# a failure may be suppressed. + +from subprocess import check_call, CalledProcessError +import os +import logging +import sys +from util import run_check + +logging.getLogger().setLevel(logging.INFO) + + +def get_rfc_file_location(): + rfc_file_location = os.path.join(os.getcwd(), "../../eng/scripts/ci/pylintrc") + if os.path.exists(rfc_file_location): + return rfc_file_location + else: + return os.path.join(os.getcwd(), "../../../eng/scripts/ci/pylintrc") + + +def _single_dir_pylint(mod): + inner_class = next(d for d in mod.iterdir() if d.is_dir() and not str(d).endswith("egg-info")) + try: + check_call( + [ + sys.executable, + "-m", + "pylint", + "--rcfile={}".format(get_rfc_file_location()), + "--evaluation=(max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention + info)/ statement) * 10)))", + "--load-plugins=pylint_guidelines_checker", + "--output-format=parseable", + str(inner_class.absolute()), + ] + ) + return True + except CalledProcessError as e: + logging.error("{} exited with linting error {}".format(str(inner_class.absolute()), e.returncode)) + return False + + +if __name__ == "__main__": + run_check("pylint", _single_dir_pylint, "Pylint") diff --git a/packages/http-client-python/eng/scripts/ci/run_pyright.py b/packages/http-client-python/eng/scripts/ci/run_pyright.py new file mode 100644 index 0000000000..d5cee6ae06 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/run_pyright.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +# This script is used to execute pyright within a tox environment. Depending on which package is being executed against, +# a failure may be suppressed. + +import os +from subprocess import check_output, CalledProcessError +import logging +import sys +import time +from util import run_check + +logging.getLogger().setLevel(logging.INFO) + + +def get_pyright_config_file_location(): + pyright_config = os.path.join(os.getcwd(), "../../eng/scripts/ci/pyrightconfig.json") + if os.path.exists(pyright_config): + return pyright_config + else: + return os.path.join(os.getcwd(), "../../../eng/scripts/ci/pyrightconfig.json") + + +def _single_dir_pyright(mod): + inner_class = next(d for d in mod.iterdir() if d.is_dir() and not str(d).endswith("egg-info")) + retries = 3 + while retries: + try: + check_output( + [ + sys.executable, + "-m", + "pyright", + "-p", + get_pyright_config_file_location(), + str(inner_class.absolute()), + ], + text=True, + ) + return True + except CalledProcessError as e: + logging.exception("{} exited with pyright error {}".format(inner_class.stem, e.returncode)) + logging.error(f"PyRight stdout:\n{e.stdout}\n===========") + logging.error(f"PyRight stderr:\n{e.stderr}\n===========") + # PyRight has shown to randomly failed with a 217, retry the same folder 3 times should help + retries -= 1 + time.sleep(5) + + return False + + +if __name__ == "__main__": + run_check("pyright", _single_dir_pyright, "PyRight") diff --git a/packages/http-client-python/eng/scripts/ci/util.py b/packages/http-client-python/eng/scripts/ci/util.py new file mode 100644 index 0000000000..05b76063ae --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/util.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +import os +import logging +from pathlib import Path +import argparse +from multiprocessing import Pool + +logging.getLogger().setLevel(logging.INFO) + +ROOT_FOLDER = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", "..", "..")) + +IGNORE_FOLDER = [] + + +def run_check(name, call_back, log_info): + parser = argparse.ArgumentParser( + description=f"Run {name} against target folder. Add a local custom plugin to the path prior to execution. " + ) + parser.add_argument( + "-t", + "--test-folder", + dest="test_folder", + help="The test folder we're in. Can be 'azure', 'multiapi', or 'vanilla'", + required=True, + ) + parser.add_argument( + "-g", + "--generator", + dest="generator", + help="The generator we're using. Can be 'legacy', 'version-tolerant'.", + required=False, + ) + parser.add_argument( + "-f", + "--file-name", + dest="file_name", + help="The specific file name if you only want to run one file. Optional.", + required=False, + ) + parser.add_argument( + "-s", + "--subfolder", + dest="subfolder", + help="The specific sub folder to validate, default to Expected/AcceptanceTests. Optional.", + required=False, + default="Expected/AcceptanceTests", + ) + + args = parser.parse_args() + + pkg_dir = Path(ROOT_FOLDER) / Path("test") / Path(args.test_folder) + if args.generator: + pkg_dir /= Path(args.generator) + if args.subfolder: + pkg_dir /= Path(args.subfolder) + dirs = [d for d in pkg_dir.iterdir() if d.is_dir() and not d.stem.startswith("_") and d.stem not in IGNORE_FOLDER] + if args.file_name: + dirs = [d for d in dirs if args.file_name.lower() in d.stem.lower()] + if len(dirs) > 1: + with Pool() as pool: + result = pool.map(call_back, dirs) + response = all(result) + else: + response = call_back(dirs[0]) + if not response: + logging.error("%s fails", log_info) + exit(1) diff --git a/packages/http-client-python/eng/scripts/ci/utils.ts b/packages/http-client-python/eng/scripts/ci/utils.ts new file mode 100644 index 0000000000..e976506d72 --- /dev/null +++ b/packages/http-client-python/eng/scripts/ci/utils.ts @@ -0,0 +1,38 @@ +/* eslint-disable no-console */ +import chalk from "chalk"; +import { exec } from "child_process"; +import { existsSync } from "fs"; +import { dirname, join } from "path"; +import process from "process"; +import { fileURLToPath } from "url"; + +// execute the command +export function executeCommand(command: string, prettyName: string) { + exec(command, (error, stdout, stderr) => { + if (error) { + console.error(chalk.red(`Error executing ${command}(stdout): ${stdout}`)); + console.error(chalk.red(`Error executing ${command}{stderr}: ${stderr}`)); + process.exit(1); + } + if (stderr) { + // Process stderr output + console.log(chalk.yellow(`${command}:\n${stderr}`)); + return; + } + console.log(chalk.green(`${prettyName} passed`)); + }); +} + +// Function to run a command and log the output +export function runCommand(command: string, prettyName: string) { + let pythonPath = join(dirname(fileURLToPath(import.meta.url)), "..", "..", "..", "venv/"); + if (existsSync(join(pythonPath, "bin"))) { + pythonPath = join(pythonPath, "bin", "python"); + } else if (existsSync(join(pythonPath, "Scripts"))) { + pythonPath = join(pythonPath, "Scripts", "python"); + } else { + throw new Error(pythonPath); + } + command = `${pythonPath} -m ${command}`; + executeCommand(command, prettyName); +} diff --git a/packages/http-client-python/eng/scripts/setup/install.py b/packages/http-client-python/eng/scripts/setup/install.py new file mode 100644 index 0000000000..8b10f58bde --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/install.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys + +if not sys.version_info >= (3, 8, 0): + raise Exception("Autorest for Python extension requires Python 3.8 at least") + +try: + import pip +except ImportError: + raise Exception("Your Python installation doesn't have pip available") + +try: + import venv +except ImportError: + raise Exception("Your Python installation doesn't have venv available") + + +# Now we have pip and Py >= 3.8, go to work + +from pathlib import Path + +from venvtools import ExtendedEnvBuilder, python_run + +_ROOT_DIR = Path(__file__).parent.parent.parent.parent + + +def main(): + venv_path = _ROOT_DIR / "venv" + if venv_path.exists(): + env_builder = venv.EnvBuilder(with_pip=True) + venv_context = env_builder.ensure_directories(venv_path) + else: + env_builder = ExtendedEnvBuilder(with_pip=True, upgrade_deps=True) + env_builder.create(venv_path) + venv_context = env_builder.context + + python_run(venv_context, "pip", ["install", "-U", "pip"]) + python_run( + venv_context, + "pip", + ["install", "-r", f"{_ROOT_DIR}/generator/requirements.txt"], + ) + python_run(venv_context, "pip", ["install", "-e", f"{_ROOT_DIR}/generator"]) + + +if __name__ == "__main__": + main() diff --git a/packages/http-client-python/eng/scripts/setup/prepare.py b/packages/http-client-python/eng/scripts/setup/prepare.py new file mode 100644 index 0000000000..4c2c46bc03 --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/prepare.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys +import os +import argparse + +if not sys.version_info >= (3, 8, 0): + raise Exception("Autorest for Python extension requires Python 3.8 at least") + +from pathlib import Path +import venv + +from venvtools import python_run + +_ROOT_DIR = Path(__file__).parent.parent.parent.parent + + +def main(): + venv_path = _ROOT_DIR / "venv" + venv_preexists = venv_path.exists() + + assert venv_preexists # Otherwise install was not done + + env_builder = venv.EnvBuilder(with_pip=True) + venv_context = env_builder.ensure_directories(venv_path) + try: + python_run( + venv_context, + "pip", + ["install", "-r", f"{_ROOT_DIR}/generator/dev_requirements.txt"], + ) + except FileNotFoundError as e: + raise ValueError(e.filename) + + +if __name__ == "__main__": + main() diff --git a/packages/http-client-python/eng/scripts/setup/run-python3.ts b/packages/http-client-python/eng/scripts/setup/run-python3.ts new file mode 100644 index 0000000000..6de4922a40 --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/run-python3.ts @@ -0,0 +1,25 @@ +// This script wraps logic in @azure-tools/extension to resolve +// the path to Python 3 so that a Python script file can be run +// from an npm script in package.json. It uses the same Python 3 +// path resolution algorithm as AutoRest so that the behavior +// is fully consistent (and also supports AUTOREST_PYTHON_EXE). +// +// Invoke it like so: "tsx run-python3.ts script.py" + +import cp from "child_process"; +import { patchPythonPath } from "./system-requirements.js"; + +async function runPython3(...args: string[]) { + const command = await patchPythonPath(["python", ...args], { + version: ">=3.8", + environmentVariable: "AUTOREST_PYTHON_EXE", + }); + cp.execSync(command.join(" "), { + stdio: [0, 1, 2], + }); +} + +runPython3(...process.argv.slice(2)).catch((err) => { + console.error(err.toString()); // eslint-disable-line no-console + process.exit(1); +}); diff --git a/packages/http-client-python/eng/scripts/setup/run_tsp.py b/packages/http-client-python/eng/scripts/setup/run_tsp.py new file mode 100644 index 0000000000..59aa6fc297 --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/run_tsp.py @@ -0,0 +1,42 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys +import venv +import logging +from pathlib import Path +from pygen import m2r, preprocess, codegen, black +from pygen.utils import parse_args + +_ROOT_DIR = Path(__file__).parent.parent.parent.parent + +_LOGGER = logging.getLogger(__name__) + +if __name__ == "__main__": + venv_path = _ROOT_DIR / "venv" + venv_preexists = venv_path.exists() + + assert venv_preexists # Otherwise install was not done + + env_builder = venv.EnvBuilder(with_pip=True) + venv_context = env_builder.ensure_directories(venv_path) + + if "--debug" in sys.argv or "--debug=true" in sys.argv: + try: + import debugpy # pylint: disable=import-outside-toplevel + except ImportError: + raise SystemExit("Please pip install ptvsd in order to use VSCode debugging") + + # 5678 is the default attach port in the VS Code debug configurations + debugpy.listen(("localhost", 5678)) + debugpy.wait_for_client() + breakpoint() # pylint: disable=undefined-variable + + # run m2r + args, unknown_args = parse_args() + m2r.M2R(output_folder=args.output_folder, cadl_file=args.cadl_file, **unknown_args).process() + preprocess.PreProcessPlugin(output_folder=args.output_folder, cadl_file=args.cadl_file, **unknown_args).process() + codegen.CodeGenerator(output_folder=args.output_folder, cadl_file=args.cadl_file, **unknown_args).process() + black.BlackScriptPlugin(output_folder=args.output_folder, **unknown_args).process() diff --git a/packages/http-client-python/eng/scripts/setup/system-requirements.ts b/packages/http-client-python/eng/scripts/setup/system-requirements.ts new file mode 100644 index 0000000000..7f12ff5b5a --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/system-requirements.ts @@ -0,0 +1,261 @@ +import { ChildProcess, spawn, SpawnOptions } from "child_process"; +import { coerce, satisfies } from "semver"; + +/* + * Copied from @autorest/system-requirements + */ + +const execute = ( + command: string, + cmdlineargs: Array, + options: MoreOptions = {}, +): Promise => { + return new Promise((resolve, reject) => { + const cp = spawn(command, cmdlineargs, { ...options, stdio: "pipe", shell: true }); + if (options.onCreate) { + options.onCreate(cp); + } + + options.onStdOutData && cp.stdout.on("data", options.onStdOutData); + options.onStdErrData && cp.stderr.on("data", options.onStdErrData); + + let err = ""; + let out = ""; + let all = ""; + cp.stderr.on("data", (chunk) => { + err += chunk; + all += chunk; + }); + cp.stdout.on("data", (chunk) => { + out += chunk; + all += chunk; + }); + + cp.on("error", (err) => { + reject(err); + }); + cp.on("close", (code, signal) => + resolve({ + stdout: out, + stderr: err, + log: all, + error: code ? new Error("Process Failed.") : null, + code, + }), + ); + }); +}; + +const versionIsSatisfied = (version: string, requirement: string): boolean => { + const cleanedVersion = coerce(version); + if (!cleanedVersion) { + throw new Error(`Invalid version ${version}.`); + } + return satisfies(cleanedVersion, requirement, true); +}; + +/** + * Validate the provided system requirement resolution is satisfying the version requirement if applicable. + * @param resolution Command resolution. + * @param actualVersion Version for that resolution. + * @param requirement Requirement. + * @returns the resolution if it is valid or an @see SystemRequirementError if not. + */ +const validateVersionRequirement = ( + resolution: SystemRequirementResolution, + actualVersion: string, + requirement: SystemRequirement, +): SystemRequirementResolution | SystemRequirementError => { + if (!requirement.version) { + return resolution; // No version requirement. + } + + try { + if (versionIsSatisfied(actualVersion, requirement.version)) { + return resolution; + } + return { + ...resolution, + error: true, + message: `'${resolution.command}' version is '${actualVersion}' but doesn't satisfy requirement '${requirement.version}'. Please update.`, + actualVersion: actualVersion, + neededVersion: requirement.version, + }; + } catch { + return { + ...resolution, + error: true, + message: `Couldn't parse the version ${actualVersion}. This is not a valid semver version.`, + actualVersion: actualVersion, + neededVersion: requirement.version, + }; + } +}; + +const tryPython = async ( + requirement: SystemRequirement, + command: string, + additionalArgs: string[] = [], +): Promise => { + const resolution: SystemRequirementResolution = { + name: PythonRequirement, + command, + additionalArgs: additionalArgs.length > 0 ? additionalArgs : undefined, + }; + + try { + const result = await execute(command, [ + ...additionalArgs, + "-c", + `"${PRINT_PYTHON_VERSION_SCRIPT}"`, + ]); + return validateVersionRequirement(resolution, result.stdout.trim(), requirement); + } catch (e) { + return { + error: true, + ...resolution, + message: `'${command}' command line is not found in the path. Make sure to have it installed.`, + }; + } +}; + +/** + * Returns the path to the executable as asked in the requirement. + * @param requirement System requirement definition. + * @returns If the requirement provide an environment variable for the path returns the value of that environment variable. undefined otherwise. + */ +const getExecutablePath = (requirement: SystemRequirement): string | undefined => + requirement.environmentVariable && process.env[requirement.environmentVariable]; + +const createPythonErrorMessage = ( + requirement: SystemRequirement, + errors: SystemRequirementError[], +): SystemRequirementError => { + const versionReq = requirement.version ?? "*"; + const lines = [ + `Couldn't find a valid python interpreter satisfying the requirement (version: ${versionReq}). Tried:`, + ...errors.map((x) => ` - ${x.command} (${x.message})`), + ]; + + return { + error: true, + name: "python", + command: "python", + message: lines.join("\n"), + }; +}; + +const resolvePythonRequirement = async ( + requirement: SystemRequirement, +): Promise => { + // Hardcoding AUTOREST_PYTHON_EXE is for backward compatibility + const path = getExecutablePath(requirement) ?? process.env["AUTOREST_PYTHON_EXE"]; + if (path) { + return await tryPython(requirement, path); + } + + const errors: SystemRequirementError[] = []; + // On windows try `py` executable with `-3` flag. + if (process.platform === "win32") { + const pyResult = await tryPython(requirement, "py", ["-3"]); + if ("error" in pyResult) { + errors.push(pyResult); + } else { + return pyResult; + } + } + + const python3Result = await tryPython(requirement, "python3"); + if ("error" in python3Result) { + errors.push(python3Result); + } else { + return python3Result; + } + + const pythonResult = await tryPython(requirement, "python"); + if ("error" in pythonResult) { + errors.push(pythonResult); + } else { + return pythonResult; + } + + return createPythonErrorMessage(requirement, errors); +}; + +/** + * @param command list of the command and arguments. First item in array must be a python exe @see KnownPythonExe. (e.g. ["python", "my_python_file.py"] + * @param requirement + */ +export const patchPythonPath = async ( + command: PythonCommandLine, + requirement: SystemRequirement, +): Promise => { + const [_, ...args] = command; + const resolution = await resolvePythonRequirement(requirement); + if ("error" in resolution) { + throw new Error(`Failed to find compatible python version. ${resolution.message}`); + } + return [resolution.command, ...(resolution.additionalArgs ?? []), ...args]; +}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// TYPES +const PythonRequirement = "python"; +const PRINT_PYTHON_VERSION_SCRIPT = "import sys; print('.'.join(map(str, sys.version_info[:3])))"; + +type KnownPythonExe = "python.exe" | "python3.exe" | "python" | "python3"; +type PythonCommandLine = [KnownPythonExe, ...string[]]; + +interface MoreOptions extends SpawnOptions { + onCreate?(cp: ChildProcess): void; + onStdOutData?(chunk: any): void; + onStdErrData?(chunk: any): void; +} + +interface SystemRequirement { + version?: string; + /** + * Name of an environment variable where the user could provide the path to the exe. + * @example "AUTOREST_PYTHON_PATH" + */ + environmentVariable?: string; +} + +interface SystemRequirementResolution { + /** + * Name of the requirement. + * @example python, java, etc. + */ + name: string; + + /** + * Name of the command + * @example python3, /home/my_user/python39/python, java, etc. + */ + command: string; + + /** + * List of additional arguments to pass to this command. + * @example '-3' for 'py' to specify to use python 3 + */ + additionalArgs?: string[]; +} + +interface ExecResult { + stdout: string; + stderr: string; + + /** + * Union of stdout and stderr. + */ + log: string; + error: Error | null; + code: number | null; +} + +interface SystemRequirementError extends SystemRequirementResolution { + error: true; + message: string; + neededVersion?: string; + actualVersion?: string; +} diff --git a/packages/http-client-python/eng/scripts/setup/venvtools.py b/packages/http-client-python/eng/scripts/setup/venvtools.py new file mode 100644 index 0000000000..c79ce4946b --- /dev/null +++ b/packages/http-client-python/eng/scripts/setup/venvtools.py @@ -0,0 +1,87 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from contextlib import contextmanager +import tempfile +import subprocess +import venv +import sys +from pathlib import Path + + +_ROOT_DIR = Path(__file__).parent.parent.parent.parent + + +class ExtendedEnvBuilder(venv.EnvBuilder): + """An extended env builder which saves the context, to have access + easily to bin path and such. + """ + + def __init__(self, *args, **kwargs): + self.context = None + if sys.version_info < (3, 9, 0): + # Not supported on Python 3.8, and we don't need it + kwargs.pop("upgrade_deps", None) + super().__init__(*args, **kwargs) + + def ensure_directories(self, env_dir): + self.context = super(ExtendedEnvBuilder, self).ensure_directories(env_dir) + return self.context + + +def create( + env_dir, + system_site_packages=False, + clear=False, + symlinks=False, + with_pip=False, + prompt=None, + upgrade_deps=False, +): + """Create a virtual environment in a directory.""" + builder = ExtendedEnvBuilder( + system_site_packages=system_site_packages, + clear=clear, + symlinks=symlinks, + with_pip=with_pip, + prompt=prompt, + upgrade_deps=upgrade_deps, + ) + builder.create(env_dir) + return builder.context + + +@contextmanager +def create_venv_with_package(packages): + """Create a venv with these packages in a temp dir and yield the env. + + packages should be an iterable of pip version instruction (e.g. package~=1.2.3) + """ + with tempfile.TemporaryDirectory() as tempdir: + my_env = create(tempdir, with_pip=True, upgrade_deps=True) + pip_call = [ + my_env.env_exe, + "-m", + "pip", + "install", + ] + subprocess.check_call(pip_call + ["-U", "pip"]) + if packages: + subprocess.check_call(pip_call + packages) + yield my_env + + +def python_run(venv_context, module, command=None, *, additional_dir="."): + try: + cmd_line = [venv_context.env_exe, "-m", module] + (command if command else []) + print("Executing: {}".format(" ".join(cmd_line))) + subprocess.run( + cmd_line, + cwd=_ROOT_DIR / additional_dir, + check=True, + ) + except subprocess.CalledProcessError as err: + print(err) + sys.exit(1) diff --git a/packages/http-client-python/generator/LICENSE b/packages/http-client-python/generator/LICENSE new file mode 100644 index 0000000000..21071075c2 --- /dev/null +++ b/packages/http-client-python/generator/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/packages/http-client-python/generator/README.md b/packages/http-client-python/generator/README.md new file mode 100644 index 0000000000..3d8a65a891 --- /dev/null +++ b/packages/http-client-python/generator/README.md @@ -0,0 +1 @@ +# Core Library for Python Generation diff --git a/packages/http-client-python/generator/dev_requirements.txt b/packages/http-client-python/generator/dev_requirements.txt new file mode 100644 index 0000000000..76e1bc490b --- /dev/null +++ b/packages/http-client-python/generator/dev_requirements.txt @@ -0,0 +1,13 @@ +-e ./generator +pyright==1.1.375 +pylint==3.2.6 +tox==4.16.0 +mypy==1.10.1 +azure-pylint-guidelines-checker==0.0.8 +colorama==0.4.6 +debugpy==1.8.2 +pytest==8.3.2 +coverage==7.6.1 +black==24.8.0 +ptvsd==4.3.2 +types-PyYAML==6.0.12.8 diff --git a/packages/http-client-python/generator/pygen/__init__.py b/packages/http-client-python/generator/pygen/__init__.py new file mode 100644 index 0000000000..8def09e632 --- /dev/null +++ b/packages/http-client-python/generator/pygen/__init__.py @@ -0,0 +1,107 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from pathlib import Path +import json +from abc import ABC, abstractmethod +from typing import Any, Dict, Union, List + +import yaml + +from ._version import VERSION + + +__version__ = VERSION +_LOGGER = logging.getLogger(__name__) + + +class ReaderAndWriter: + def __init__(self, *, output_folder: Union[str, Path], **kwargs: Any) -> None: + self.output_folder = Path(output_folder) + self._list_file: List[str] = [] + try: + with open( + Path(self.output_folder) / Path("..") / Path("python.json"), + "r", + encoding="utf-8-sig", + ) as fd: + python_json = json.load(fd) + except Exception: # pylint: disable=broad-except + python_json = {} + self.options = kwargs + if python_json: + _LOGGER.warning("Loading python.json file. This behavior will be depreacted") + self.options.update(python_json) + + def read_file(self, path: Union[str, Path]) -> str: + """Directly reading from disk""" + # make path relative to output folder + try: + with open(self.output_folder / Path(path), "r", encoding="utf-8-sig") as fd: + return fd.read() + except FileNotFoundError: + return "" + + def write_file(self, filename: Union[str, Path], file_content: str) -> None: + """Directly writing to disk""" + file_folder = Path(filename).parent + if not Path.is_dir(self.output_folder / file_folder): + Path.mkdir(self.output_folder / file_folder, parents=True) + with open(self.output_folder / Path(filename), "w", encoding="utf-8") as fd: + fd.write(file_content) + + def list_file(self) -> List[str]: + return [str(f.relative_to(self.output_folder)) for f in self.output_folder.glob("**/*") if f.is_file()] + + +class Plugin(ReaderAndWriter, ABC): + """A base class for autorest plugin. + + :param autorestapi: An autorest API instance + """ + + @abstractmethod + def process(self) -> bool: + """The plugin process. + + :rtype: bool + :returns: True if everything's ok, False optherwise + :raises Exception: Could raise any exception, stacktrace will be sent to autorest API + """ + raise NotImplementedError() + + +class YamlUpdatePlugin(Plugin): + """A plugin that update the YAML as input.""" + + def get_yaml(self) -> Dict[str, Any]: + # cadl file doesn't have to be relative to output folder + with open(self.options["cadl_file"], "r", encoding="utf-8-sig") as fd: + return yaml.safe_load(fd.read()) + + def write_yaml(self, yaml_string: str) -> None: + with open(self.options["cadl_file"], "w", encoding="utf-8-sig") as fd: + fd.write(yaml_string) + + def process(self) -> bool: + # List the input file, should be only one + yaml_data = self.get_yaml() + + self.update_yaml(yaml_data) + + yaml_string = yaml.safe_dump(yaml_data) + + self.write_yaml(yaml_string) + return True + + @abstractmethod + def update_yaml(self, yaml_data: Dict[str, Any]) -> None: + """The code-model-v4-no-tags yaml model tree. + + :rtype: updated yaml + :raises Exception: Could raise any exception, stacktrace will be sent to autorest API + """ + raise NotImplementedError() diff --git a/packages/http-client-python/generator/pygen/_version.py b/packages/http-client-python/generator/pygen/_version.py new file mode 100644 index 0000000000..aadc0f31ff --- /dev/null +++ b/packages/http-client-python/generator/pygen/_version.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "0.1.0" diff --git a/packages/http-client-python/generator/pygen/black.py b/packages/http-client-python/generator/pygen/black.py new file mode 100644 index 0000000000..b4915d824c --- /dev/null +++ b/packages/http-client-python/generator/pygen/black.py @@ -0,0 +1,71 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from pathlib import Path +import os +import black +from black.report import NothingChanged + +from . import Plugin +from .utils import parse_args + +_LOGGER = logging.getLogger("blib2to3") + +_BLACK_MODE = black.Mode() # pyright: ignore [reportPrivateImportUsage] +_BLACK_MODE.line_length = 120 + + +class BlackScriptPlugin(Plugin): + def __init__(self, **kwargs): + super().__init__(**kwargs) + output_folder = self.options.get("output_folder", str(self.output_folder)) + if output_folder.startswith("file:"): + output_folder = output_folder[5:] + if os.name == "nt" and output_folder.startswith("///"): + output_folder = output_folder[3:] + self.output_folder = Path(output_folder) + + def process(self) -> bool: + # apply format_file on every .py file in the output folder + list( + map( + self.format_file, + [ + Path(f) + for f in self.list_file() + if Path(f).parts[0] + not in ( + "__pycache__", + "node_modules", + "venv", + "env", + ) + and not Path(f).parts[0].startswith(".") + and Path(f).suffix == ".py" + ], + ) + ) + return True + + def format_file(self, file: Path) -> None: + try: + file_content = self.read_file(file) + file_content = black.format_file_contents(file_content, fast=True, mode=_BLACK_MODE) + except NothingChanged: + pass + except: + _LOGGER.error("Error: failed to format %s", file) + raise + else: + if len(file_content.splitlines()) > 1000: + file_content = "# pylint: disable=too-many-lines\n" + file_content + self.write_file(file, file_content) + + +if __name__ == "__main__": + # CADL pipeline will call this + args, unknown_args = parse_args(need_cadl_file=False) + BlackScriptPlugin(output_folder=args.output_folder, **unknown_args).process() diff --git a/packages/http-client-python/generator/pygen/codegen/__init__.py b/packages/http-client-python/generator/pygen/codegen/__init__.py new file mode 100644 index 0000000000..11d829263c --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/__init__.py @@ -0,0 +1,338 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import Dict, Any, Union, Optional +from pathlib import Path +import yaml + + +from .. import Plugin +from ..utils import parse_args +from .models.code_model import CodeModel +from .serializers import JinjaSerializer +from ._utils import DEFAULT_HEADER_TEXT, VALID_PACKAGE_MODE, TYPESPEC_PACKAGE_MODE + + +def _default_pprint(package_name: str) -> str: + return " ".join([i.capitalize() for i in package_name.split("-")]) + + +_LOGGER = logging.getLogger(__name__) + + +class OptionsRetriever: + OPTIONS_TO_DEFAULT = { + "azure-arm": False, + "flavor": "azure", # need to default to azure in shared code so we don't break swagger generation + "no-async": False, + "low-level-client": False, + "version-tolerant": True, + "keep-version-file": False, + "no-namespace-folders": False, + "basic-setup-py": False, + "client-side-validation": False, + "multiapi": False, + "polymorphic-examples": 5, + "generate-sample": False, + "generate-test": False, + "from-typespec": False, + "emit-cross-language-definition-file": False, + } + + @property + def is_azure_flavor(self) -> bool: + return self.flavor == "azure" + + def __init__(self, options: Dict[str, Any]) -> None: + self.options = options + + def __getattr__(self, prop: str) -> Any: + key = prop.replace("_", "-") + return self.options.get(key, self.OPTIONS_TO_DEFAULT.get(key)) + + @property + def company_name(self) -> str: + return self.options.get("company-name", "Microsoft" if self.is_azure_flavor else "") + + @property + def license_header(self) -> str: + license_header = self.options.get( + "header-text", + (DEFAULT_HEADER_TEXT.format(company_name=self.company_name) if self.company_name else ""), + ) + if license_header: + license_header = license_header.replace("\n", "\n# ") + license_header = ( + "# --------------------------------------------------------------------------\n# " + license_header + ) + license_header += "\n# --------------------------------------------------------------------------" + return license_header + + @property + def show_operations(self) -> bool: + return self.options.get("show-operations", not self.low_level_client) + + @property + def _models_mode_default(self) -> str: + models_mode_default = "none" if self.low_level_client or self.version_tolerant else "msrest" + if self.options.get("cadl_file") is not None: + models_mode_default = "dpg" + return models_mode_default + + @property + def original_models_mode(self) -> str: + return self.options.get("models-mode", self._models_mode_default) + + @property + def models_mode(self) -> Union[str, bool]: + # switch to falsy value for easier code writing + return False if self.original_models_mode == "none" else self.original_models_mode + + @property + def tracing(self) -> bool: + return self.options.get( + "tracing", + self.show_operations and self.is_azure_flavor, + ) + + @property + def show_send_request(self) -> bool: + return self.options.get( + "show-send-request", + self._low_level_or_version_tolerant, + ) + + @property + def _low_level_or_version_tolerant(self) -> bool: + return self.low_level_client or self.version_tolerant + + @property + def only_path_and_body_params_positional(self) -> bool: + return self.options.get( + "only-path-and-body-params-positional", + self._low_level_or_version_tolerant, + ) + + @property + def combine_operation_files(self) -> bool: + return self.options.get( + "combine-operation-files", + self.version_tolerant, + ) + + @property + def package_pprint_name(self) -> str: + return self.options.get("package-pprint-name") or _default_pprint(str(self.package_name)) + + @property + def default_optional_constants_to_none(self) -> bool: + return self.options.get( + "default-optional-constants-to-none", + self._low_level_or_version_tolerant, + ) + + @property + def builders_visibility(self) -> str: + builders_visibility = self.options.get("builders-visibility") + if builders_visibility is None: + return "public" if self.low_level_client else "embedded" + return builders_visibility.lower() + + @property + def head_as_boolean(self) -> bool: + head_as_boolean = self.options.get("head-as-boolean", True) + # Force some options in ARM MODE + return True if self.azure_arm else head_as_boolean + + @property + def package_mode(self) -> str: + return self.options.get("packaging-files-dir") or self.options.get("package-mode", "") + + @property + def packaging_files_config(self) -> Optional[Dict[str, Any]]: + packaging_files_config = self.options.get("packaging-files-config") + if packaging_files_config is None: + return None + # packaging-files-config is either a string or a dict + # if it's a string, we can split on the comma to get the dict + # otherwise we just return + try: + return {k.strip(): v.strip() for k, v in [i.split(":") for i in packaging_files_config.split("|")]} + except AttributeError: + return packaging_files_config + + @property + def package_version(self) -> Optional[str]: + return str(self.options.get("package-version", "")) + + +class CodeGenerator(Plugin): + def __init__(self, *args, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.options_retriever = OptionsRetriever(self.options) + + def _validate_code_model_options(self) -> None: + if self.options_retriever.builders_visibility not in [ + "public", + "hidden", + "embedded", + ]: + raise ValueError("The value of --builders-visibility must be either 'public', 'hidden', or 'embedded'") + + if self.options_retriever.original_models_mode not in ["msrest", "dpg", "none"]: + raise ValueError( + "--models-mode can only be 'msrest', 'dpg' or 'none'. " + "Pass in 'msrest' if you want msrest models, or " + "'none' if you don't want any." + ) + + if not self.options_retriever.show_operations and self.options_retriever.builders_visibility == "embedded": + raise ValueError( + "Can not embed builders without operations. " + "Either set --show-operations to True, or change the value of --builders-visibility " + "to 'public' or 'hidden'." + ) + + if self.options_retriever.basic_setup_py and not self.options_retriever.package_version: + raise ValueError("--basic-setup-py must be used with --package-version") + + if self.options_retriever.package_mode and not self.options_retriever.package_version: + raise ValueError("--package-mode must be used with --package-version") + + if not self.options_retriever.show_operations and self.options_retriever.combine_operation_files: + raise ValueError( + "Can not combine operation files if you are not showing operations. " + "If you want operation files, pass in flag --show-operations" + ) + + if self.options_retriever.package_mode: + if ( + ( + self.options_retriever.package_mode not in TYPESPEC_PACKAGE_MODE + and self.options_retriever.from_typespec + ) + or ( + self.options_retriever.package_mode not in VALID_PACKAGE_MODE + and not self.options_retriever.from_typespec + ) + ) and not Path(self.options_retriever.package_mode).exists(): + raise ValueError( + f"--package-mode can only be {' or '.join(TYPESPEC_PACKAGE_MODE)} or directory which contains template files" # pylint: disable=line-too-long + ) + + if self.options_retriever.multiapi and self.options_retriever.version_tolerant: + raise ValueError( + "Can not currently generate version tolerant multiapi SDKs. " + "We are working on creating a new multiapi SDK for version tolerant and it is not available yet." + ) + + if self.options_retriever.client_side_validation and self.options_retriever.version_tolerant: + raise ValueError("Can not generate version tolerant with --client-side-validation. ") + + if not (self.options_retriever.azure_arm or self.options_retriever.version_tolerant): + _LOGGER.warning( + "You are generating with options that would not allow the SDK to be shipped as an official Azure SDK. " + "Please read https://aka.ms/azsdk/dpcodegen for more details." + ) + + if not self.options_retriever.is_azure_flavor and self.options_retriever.tracing: + raise ValueError("Can only have tracing turned on for Azure SDKs.") + + @staticmethod + def remove_cloud_errors(yaml_data: Dict[str, Any]) -> None: + for client in yaml_data["clients"]: + for group in client["operationGroups"]: + for operation in group["operations"]: + if not operation.get("exceptions"): + continue + i = 0 + while i < len(operation["exceptions"]): + exception = operation["exceptions"][i] + if ( + exception.get("schema") + and exception["schema"]["language"]["default"]["name"] == "CloudError" + ): + del operation["exceptions"][i] + i -= 1 + i += 1 + if yaml_data.get("schemas") and yaml_data["schemas"].get("objects"): + for i in range(len(yaml_data["schemas"]["objects"])): + obj_schema = yaml_data["schemas"]["objects"][i] + if obj_schema["language"]["default"]["name"] == "CloudError": + del yaml_data["schemas"]["objects"][i] + break + + def _build_code_model_options(self) -> Dict[str, Any]: + flags = [ + "azure_arm", + "head_as_boolean", + "license_header", + "keep_version_file", + "no_async", + "no_namespace_folders", + "basic_setup_py", + "package_name", + "package_version", + "client_side_validation", + "tracing", + "multiapi", + "polymorphic_examples", + "models_mode", + "builders_visibility", + "show_operations", + "show_send_request", + "only_path_and_body_params_positional", + "version_tolerant", + "low_level_client", + "combine_operation_files", + "package_mode", + "package_pprint_name", + "packaging_files_config", + "default_optional_constants_to_none", + "generate_sample", + "generate_test", + "default_api_version", + "from_typespec", + "flavor", + "company_name", + "emit_cross_language_definition_file", + ] + return {f: getattr(self.options_retriever, f) for f in flags} + + def get_yaml(self) -> Dict[str, Any]: + # cadl file doesn't have to be relative to output folder + with open(self.options["cadl_file"], "r", encoding="utf-8-sig") as fd: + return yaml.safe_load(fd.read()) + + def get_serializer(self, code_model: CodeModel): + return JinjaSerializer(code_model, output_folder=self.output_folder) + + def process(self) -> bool: + # List the input file, should be only one + self._validate_code_model_options() + options = self._build_code_model_options() + yaml_data = self.get_yaml() + + if self.options_retriever.azure_arm: + self.remove_cloud_errors(yaml_data) + + code_model = CodeModel(yaml_data=yaml_data, options=options) + if not self.options_retriever.is_azure_flavor and any(client.lro_operations for client in code_model.clients): + raise ValueError("Only support LROs for Azure SDKs") + serializer = self.get_serializer(code_model) + serializer.serialize() + + return True + + +if __name__ == "__main__": + # CADL pipeline will call this + parsed_args, unknown_args = parse_args() + CodeGenerator( + output_folder=parsed_args.output_folder, + cadl_file=parsed_args.cadl_file, + **unknown_args, + ).process() diff --git a/packages/http-client-python/generator/pygen/codegen/_utils.py b/packages/http-client-python/generator/pygen/codegen/_utils.py new file mode 100644 index 0000000000..726ccc3f2f --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/_utils.py @@ -0,0 +1,17 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +DEFAULT_HEADER_TEXT = ( + "Copyright (c) {company_name} Corporation. All rights reserved.\n" + "Licensed under the MIT License. See License.txt in the project root for license information.\n" + "Code generated by {company_name} (R) Python Code Generator.\n" + "Changes may cause incorrect behavior and will be lost if the code is regenerated." +) + +SWAGGER_PACKAGE_MODE = ["mgmtplane", "dataplane"] # for backward compatibility +TYPESPEC_PACKAGE_MODE = ["azure-mgmt", "azure-dataplane", "generic"] +VALID_PACKAGE_MODE = SWAGGER_PACKAGE_MODE + TYPESPEC_PACKAGE_MODE +NAME_LENGTH_LIMIT = 40 diff --git a/packages/http-client-python/generator/pygen/codegen/models/__init__.py b/packages/http-client-python/generator/pygen/codegen/models/__init__.py new file mode 100644 index 0000000000..06b94ad093 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/__init__.py @@ -0,0 +1,204 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import Any, Dict, Union, Optional +from .base import BaseModel +from .base_builder import BaseBuilder, ParameterListType +from .code_model import CodeModel +from .client import Client +from .model_type import ModelType, JSONModelType, DPGModelType, MsrestModelType +from .dictionary_type import DictionaryType +from .list_type import ListType +from .combined_type import CombinedType +from .primitive_types import ( + ByteArraySchema, + DateType, + DatetimeType, + DurationType, + IntegerType, + FloatType, + StringType, + TimeType, + AnyType, + PrimitiveType, + BinaryType, + BooleanType, + AnyObjectType, + UnixTimeType, + SdkCoreType, + DecimalType, + MultiPartFileType, +) +from .enum_type import EnumType, EnumValue +from .base import BaseType +from .constant_type import ConstantType +from .imports import FileImport, ImportType, TypingSection +from .lro_operation import LROOperation +from .paging_operation import PagingOperation +from .parameter import ( + Parameter, + ParameterMethodLocation, + ParameterLocation, + BodyParameter, + ParameterDelimeter, + ClientParameter, + ConfigParameter, +) +from .operation import Operation +from .property import Property +from .operation_group import OperationGroup +from .response import Response +from .parameter_list import ( + ParameterList, + ClientGlobalParameterList, + ConfigGlobalParameterList, +) +from .request_builder import ( + RequestBuilder, + OverloadedRequestBuilder, + RequestBuilderBase, +) +from .lro_paging_operation import LROPagingOperation +from .request_builder_parameter import ( + RequestBuilderParameter, + RequestBuilderBodyParameter, +) +from .credential_types import ( + TokenCredentialType, + KeyCredentialType, + ARMChallengeAuthenticationPolicyType, + BearerTokenCredentialPolicyType, + KeyCredentialPolicyType, + CredentialType, +) + +__all__ = [ + "KeyCredentialPolicyType", + "AnyType", + "BaseModel", + "BaseType", + "CodeModel", + "Client", + "ConstantType", + "ModelType", + "DictionaryType", + "ListType", + "EnumType", + "EnumValue", + "FileImport", + "ImportType", + "TypingSection", + "PrimitiveType", + "LROOperation", + "Operation", + "PagingOperation", + "Parameter", + "ParameterList", + "OperationGroup", + "Property", + "RequestBuilder", + "Response", + "TokenCredentialType", + "LROPagingOperation", + "BaseBuilder", + "RequestBuilderParameter", + "BinaryType", + "ClientGlobalParameterList", + "ConfigGlobalParameterList", + "ParameterMethodLocation", + "ParameterLocation", + "OverloadedRequestBuilder", + "RequestBuilderBase", + "BodyParameter", + "RequestBuilderBodyParameter", + "ParameterDelimeter", + "CredentialType", + "ClientParameter", + "ConfigParameter", + "ParameterListType", +] + +TYPE_TO_OBJECT = { + "integer": IntegerType, + "float": FloatType, + "decimal": DecimalType, + "string": StringType, + "list": ListType, + "dict": DictionaryType, + "constant": ConstantType, + "enum": EnumType, + "enumvalue": EnumValue, + "binary": BinaryType, + "any": AnyType, + "utcDateTime": DatetimeType, + "offsetDateTime": DatetimeType, + "plainTime": TimeType, + "duration": DurationType, + "plainDate": DateType, + "bytes": ByteArraySchema, + "boolean": BooleanType, + "combined": CombinedType, + "OAuth2": TokenCredentialType, + "Key": KeyCredentialType, + "ARMChallengeAuthenticationPolicy": ARMChallengeAuthenticationPolicyType, + "BearerTokenCredentialPolicy": BearerTokenCredentialPolicyType, + "KeyCredentialPolicy": KeyCredentialPolicyType, + "any-object": AnyObjectType, + "unixtime": UnixTimeType, + "credential": StringType, + "sdkcore": SdkCoreType, + "multipartfile": MultiPartFileType, +} +_LOGGER = logging.getLogger(__name__) + + +def build_type(yaml_data: Dict[str, Any], code_model: CodeModel) -> BaseType: + yaml_id = id(yaml_data) + try: + return code_model.lookup_type(yaml_id) + except KeyError: + # Not created yet, let's create it and add it to the index + pass + response: Optional[BaseType] = None + if yaml_data["type"] == "model": + # need to special case model to avoid recursion + if yaml_data["base"] == "json" or not code_model.options["models_mode"]: + model_type = JSONModelType + elif yaml_data["base"] == "dpg": + model_type = DPGModelType # type: ignore + else: + model_type = MsrestModelType # type: ignore + response = model_type(yaml_data, code_model) + code_model.types_map[yaml_id] = response + response.fill_instance_from_yaml(yaml_data, code_model) + elif yaml_data["type"] == "enum": + # avoid recursion because we add the parent enum type to the enum value + response = EnumType( + yaml_data, + code_model, + values=[], + value_type=build_type(yaml_data["valueType"], code_model), + ) + code_model.types_map[yaml_id] = response + response.fill_instance_from_yaml(yaml_data, code_model) + else: + object_type = yaml_data.get("type") + if object_type not in TYPE_TO_OBJECT: + _LOGGER.warning( + 'Unrecognized definition type "%s" is found, falling back it as "string"! ', + yaml_data["type"], + ) + object_type = "string" + response = TYPE_TO_OBJECT[object_type].from_yaml(yaml_data, code_model) # type: ignore + if response is None: + raise ValueError("response can not be None") + code_model.types_map[yaml_id] = response + return response + + +RequestBuilderType = Union[RequestBuilder, OverloadedRequestBuilder] +ParameterType = Union[Parameter, RequestBuilderParameter, ClientParameter, ConfigParameter] +OperationType = Union[Operation, LROOperation, PagingOperation, LROPagingOperation] diff --git a/packages/http-client-python/generator/pygen/codegen/models/base.py b/packages/http-client-python/generator/pygen/codegen/models/base.py new file mode 100644 index 0000000000..516cade3bc --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/base.py @@ -0,0 +1,186 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, TYPE_CHECKING, List, Optional +from abc import ABC, abstractmethod +from .imports import FileImport + + +if TYPE_CHECKING: + from .code_model import CodeModel + from .model_type import ModelType + + +class BaseModel: + """This is the base class for model representations that are based on some YAML data. + + :param yaml_data: the yaml data for this schema + :type yaml_data: dict[str, Any] + """ + + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + self.yaml_data = yaml_data + self.code_model = code_model + + @property + def id(self) -> int: + return id(self.yaml_data) + + def __repr__(self): + return f"<{self.__class__.__name__}>" + + +class BaseType(BaseModel, ABC): # pylint: disable=too-many-public-methods + """This is the base class for all types. + + :param yaml_data: the yaml data for this schema + :type yaml_data: dict[str, Any] + """ + + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data, code_model) + self.type = yaml_data["type"] # the type discriminator + self.api_versions: List[str] = yaml_data.get("apiVersions", []) # api versions this type is in. + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BaseType": + return cls(yaml_data=yaml_data, code_model=code_model) + + def imports(self, **kwargs) -> FileImport: # pylint: disable=unused-argument + return FileImport(self.code_model) + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + return self.imports(**kwargs) + + def imports_for_sample(self) -> FileImport: + return FileImport(self.code_model) + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return repr(value) + + @property + def xml_metadata(self) -> Dict[str, Any]: + """XML metadata for the type, if the type has it.""" + return self.yaml_data.get("xmlMetadata", {}) + + @property + def is_xml(self) -> bool: + """Whether the type is an XML type or not. Most likely not.""" + return bool(self.xml_metadata) + + @property + def xml_serialization_ctxt(self) -> Optional[str]: + """Return the serialization context in case this schema is used in an operation.""" + attrs_list = [] + if self.xml_metadata.get("name"): + attrs_list.append(f"'name': '{self.xml_metadata['name']}'") + if self.xml_metadata.get("attribute", False): + attrs_list.append("'attr': True") + if self.xml_metadata.get("prefix", False): + attrs_list.append(f"'prefix': '{self.xml_metadata['prefix']}'") + if self.xml_metadata.get("namespace", False): + attrs_list.append(f"'ns': '{self.xml_metadata['namespace']}'") + if self.xml_metadata.get("text"): + attrs_list.append("'text': True") + return ", ".join(attrs_list) + + @property + def serialization_type(self) -> str: + """The tag recognized by 'msrest' as a serialization/deserialization. + + 'str', 'int', 'float', 'bool' or + https://github.com/Azure/msrest-for-python/blob/b505e3627b547bd8fdc38327e86c70bdb16df061/msrest/serialization.py#L407-L416 + + or the object schema name (e.g. DotSalmon). + + If list: '[str]' + If dict: '{str}' + """ + raise NotImplementedError() + + @property + def msrest_deserialization_key(self) -> str: + return self.serialization_type + + @property + def client_default_value(self) -> Any: + """Whether there's a client default value for this type""" + return self.yaml_data.get("clientDefaultValue") + + @abstractmethod + def description(self, *, is_operation_file: bool) -> str: + """The description""" + + @abstractmethod + def docstring_text(self, **kwargs: Any) -> str: + """The names used in rtype documentation""" + + @abstractmethod + def docstring_type(self, **kwargs: Any) -> str: + """The python type used for RST syntax input. + + Special case for enum, for instance: 'str or ~namespace.EnumName' + """ + + @abstractmethod + def type_annotation(self, **kwargs: Any) -> str: + """The python type used for type annotation + + Special case for enum, for instance: Union[str, "EnumName"] + """ + + @property + def validation(self) -> Optional[Dict[str, Any]]: + """Whether there's any validation constraints on this type. + + Even though we generate validation maps if there are validation constraints, + only SDKs with client-side-validate=true (0.001% libraries, if any) actually raise in this case. + """ + return None + + def get_declaration(self, value: Any) -> str: + """Return the current value from YAML as a Python string that represents the constant. + + Example, if schema is "bytearray" and value is "foo", + should return bytearray("foo", encoding="utf-8") + as a string. + + This is important for constant serialization. + + By default, return value, since it works sometimes (integer) + """ + return str(value) + + @abstractmethod + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + """Template of what this schema would look like as JSON input""" + + def get_polymorphic_subtypes( + self, polymorphic_subtypes: List["ModelType"] # pylint: disable=unused-argument + ) -> None: + return None + + @property + @abstractmethod + def instance_check_template(self) -> str: + """Template of what an instance check of a variable for this type would look like""" + + @property + def serialization_constraints(self) -> List[str]: + """Whether there are any serialization constraints when serializing this type.""" + return [] + + @property + def type_description(self) -> str: + return self.type_annotation() + + @property + def is_form_data(self) -> bool: + return False diff --git a/packages/http-client-python/generator/pygen/codegen/models/base_builder.py b/packages/http-client-python/generator/pygen/codegen/models/base_builder.py new file mode 100644 index 0000000000..87d8b02353 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/base_builder.py @@ -0,0 +1,118 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import ( + List, + Dict, + Any, + Generic, + TypeVar, + Optional, + Union, + TYPE_CHECKING, + cast, +) +from abc import abstractmethod + +from .base import BaseModel +from .parameter_list import ( + ParameterList, + RequestBuilderParameterList, + OverloadedRequestBuilderParameterList, +) + +ParameterListType = TypeVar( + "ParameterListType", + bound=Union[ + ParameterList, + RequestBuilderParameterList, + OverloadedRequestBuilderParameterList, + ], +) +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + from .operation import Operation + from .request_builder import RequestBuilder + + +OverloadListType = TypeVar("OverloadListType", bound=Union[List["Operation"], List["RequestBuilder"]]) + +_LOGGER = logging.getLogger(__name__) + + +class BaseBuilder( + Generic[ParameterListType, OverloadListType], BaseModel +): # pylint: disable=too-many-instance-attributes + """Base class for Operations and Request Builders""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + name: str, + parameters: ParameterListType, + *, + overloads: Optional[OverloadListType] = None, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.client = client + self.name = name + self._description: str = yaml_data.get("description", "") + self.parameters = parameters + self.overloads = overloads or cast(OverloadListType, []) + self._summary: str = yaml_data.get("summary", "") + self.want_tracing: bool = yaml_data.get("wantTracing", True) + self.group_name: str = yaml_data["groupName"] # either operationGroup or client I am on + self.is_overload: bool = yaml_data["isOverload"] + self.api_versions: List[str] = yaml_data["apiVersions"] + self.added_on: Optional[str] = yaml_data.get("addedOn") + self.external_docs: Optional[Dict[str, Any]] = yaml_data.get("externalDocs") + + if code_model.options["version_tolerant"] and yaml_data.get("abstract"): + _LOGGER.warning( + 'Not going to generate operation "%s" because we are unable to generate this ' + "type of operation right now. " + 'Please write your own custom operation in the "_patch.py" file ' + "following https://aka.ms/azsdk/python/dpcodegen/python/customize", + name, + ) + self.abstract = True + else: + self.abstract = False + + @property + def summary(self) -> Optional[str]: + if self.abstract: + return None + return self._summary + + def pylint_disable(self, async_mode: bool) -> str: # pylint: disable=unused-argument + return "" + + @abstractmethod + def response_type_annotation(self, **kwargs) -> str: ... + + @abstractmethod + def response_docstring_text(self, **kwargs) -> str: ... + + @abstractmethod + def response_docstring_type(self, **kwargs) -> str: ... + + @property + def description(self) -> str: + if self.abstract: + return ( + f'You need to write a custom operation for "{self.name}". Please refer to ' + "https://aka.ms/azsdk/python/dpcodegen/python/customize to learn how to customize." + ) + return self._description or self.name + + def method_signature(self, async_mode: bool) -> List[str]: + if self.abstract: + return ["*args,", "**kwargs"] + return self.parameters.method_signature(async_mode) diff --git a/packages/http-client-python/generator/pygen/codegen/models/client.py b/packages/http-client-python/generator/pygen/codegen/models/client.py new file mode 100644 index 0000000000..71caea2572 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/client.py @@ -0,0 +1,433 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, TYPE_CHECKING, TypeVar, Generic, Union, List, Optional + +from .base import BaseModel +from .parameter_list import ClientGlobalParameterList, ConfigGlobalParameterList +from .imports import FileImport, ImportType, TypingSection, MsrestImportType +from .utils import add_to_pylint_disable +from .operation_group import OperationGroup +from .request_builder import ( + RequestBuilder, + OverloadedRequestBuilder, + get_request_builder, +) +from .parameter import Parameter, ParameterMethodLocation +from .lro_operation import LROOperation +from .lro_paging_operation import LROPagingOperation +from ...utils import extract_original_name, NAME_LENGTH_LIMIT + +ParameterListType = TypeVar( + "ParameterListType", + bound=Union[ClientGlobalParameterList, ConfigGlobalParameterList], +) + +if TYPE_CHECKING: + from .code_model import CodeModel + from . import OperationType + + +class _ClientConfigBase(Generic[ParameterListType], BaseModel): + """The service client base. Shared across our Client and Config type""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + parameters: ParameterListType, + ): + super().__init__(yaml_data, code_model) + self.parameters = parameters + self.url: str = self.yaml_data["url"] # the base endpoint of the client. Can be parameterized or not + self.legacy_filename: str = self.yaml_data.get("legacyFilename", "client") + + @property + def description(self) -> str: + return self.yaml_data["description"] + + @property + def name(self) -> str: + return self.yaml_data["name"] + + +class Client(_ClientConfigBase[ClientGlobalParameterList]): + """Model representing our service client""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + parameters: ClientGlobalParameterList, + *, + is_subclient: bool = False, + ): + super().__init__(yaml_data, code_model, parameters) + self.operation_groups: List[OperationGroup] = [] + self.config = Config.from_yaml(yaml_data, self.code_model) + self.is_subclient = is_subclient + self.request_builders = self._build_request_builders() + if self.code_model.options["show_operations"]: + self.operation_groups = [ + OperationGroup.from_yaml(op_group, code_model, self) + for op_group in self.yaml_data.get("operationGroups", []) + ] + self.link_lro_initial_operations() + self.request_id_header_name = self.yaml_data.get("requestIdHeaderName", None) + self.has_etag: bool = yaml_data.get("hasEtag", False) + + def _build_request_builders( + self, + ) -> List[Union[RequestBuilder, OverloadedRequestBuilder]]: + request_builders: List[Union[RequestBuilder, OverloadedRequestBuilder]] = [] + + def add_og_request_builder(og: Dict[str, Any]): + for operation_yaml in og["operations"]: + request_builder = get_request_builder( + operation_yaml, + code_model=self.code_model, + client=self, + ) + if operation_yaml.get("isLroInitialOperation"): + # we want to change the name + request_builder.name = request_builder.get_name( + extract_original_name(request_builder.yaml_data["name"]), + request_builder.yaml_data, + request_builder.code_model, + request_builder.client, + ) + if request_builder.overloads: + request_builders.extend(request_builder.overloads) + request_builders.append(request_builder) + if operation_yaml.get("nextOperation"): + # i am a paging operation and i have a next operation. + # Make sure to include my next operation + request_builders.append( + get_request_builder( + operation_yaml["nextOperation"], + code_model=self.code_model, + client=self, + ) + ) + + queue = self.yaml_data.get("operationGroups", []).copy() + while queue: + now = queue.pop(0) + add_og_request_builder(now) + if now.get("operationGroups"): + queue.extend(now["operationGroups"]) + + return request_builders + + def pipeline_class(self, async_mode: bool) -> str: + if self.code_model.options["azure_arm"]: + if async_mode: + return "AsyncARMPipelineClient" + return "ARMPipelineClient" + if async_mode: + return "AsyncPipelineClient" + return "PipelineClient" + + @property + def credential(self) -> Optional[Parameter]: + """The credential param, if one exists""" + return self.parameters.credential + + @property + def send_request_name(self) -> str: + """Name of the send request function""" + return "send_request" if self.code_model.options["show_send_request"] else "_send_request" + + @property + def has_parameterized_host(self) -> bool: + """Whether the base url is parameterized or not""" + return not any(p for p in self.parameters if p.is_host) + + def pylint_disable(self) -> str: + retval = "" + if not any( + p + for p in self.parameters.parameters + if p.is_api_version + and p.method_location in [ParameterMethodLocation.KEYWORD_ONLY, ParameterMethodLocation.KWARG] + ): + retval = add_to_pylint_disable(retval, "client-accepts-api-version-keyword") + if len(self.operation_groups) > 6: + retval = add_to_pylint_disable(retval, "too-many-instance-attributes") + return retval + + @property + def url_pylint_disable(self) -> str: + # if the url is too long + retval = "" + if len(self.url) > 85: + retval = add_to_pylint_disable(retval, "line-too-long") + return retval + + @property + def filename(self) -> str: + """Name of the file for the client""" + if self.code_model.options["version_tolerant"] or self.code_model.options["low_level_client"]: + return "_client" + return f"_{self.legacy_filename}" + + def lookup_request_builder(self, request_builder_id: int) -> Union[RequestBuilder, OverloadedRequestBuilder]: + """Find the request builder based off of id""" + try: + return next(rb for rb in self.request_builders if id(rb.yaml_data) == request_builder_id) + except StopIteration as exc: + raise KeyError(f"No request builder with id {request_builder_id} found.") from exc + + def lookup_operation(self, operation_id: int) -> "OperationType": + try: + return next(o for og in self.operation_groups for o in og.operations if id(o.yaml_data) == operation_id) + except StopIteration as exc: + raise KeyError(f"No operation with id {operation_id} found.") from exc + + def _imports_shared(self, async_mode: bool) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + if self.code_model.options["azure_arm"]: + file_import.add_submodule_import("azure.mgmt.core", self.pipeline_class(async_mode), ImportType.SDKCORE) + else: + file_import.add_submodule_import( + "" if self.code_model.is_azure_flavor else "runtime", + self.pipeline_class(async_mode), + ImportType.SDKCORE, + ) + + for gp in self.parameters: + if gp.method_location == ParameterMethodLocation.KWARG: + continue + file_import.merge( + gp.imports( + async_mode, + relative_path=".." if async_mode else ".", + operation=True, + ) + ) + file_import.add_submodule_import( + "._configuration", + f"{self.name}Configuration", + ImportType.LOCAL, + ) + file_import.add_msrest_import( + relative_path=".." if async_mode else ".", + msrest_import_type=MsrestImportType.SerializerDeserializer, + typing_section=TypingSection.REGULAR, + ) + file_import.add_submodule_import( + "pipeline" if self.code_model.is_azure_flavor else "runtime", + "policies", + ImportType.SDKCORE, + ) + if self.code_model.options["azure_arm"]: + async_prefix = "Async" if async_mode else "" + file_import.add_submodule_import( + "azure.mgmt.core.policies", + f"{async_prefix}ARMAutoResourceProviderRegistrationPolicy", + ImportType.SDKCORE, + ) + + # import for "Self" + file_import.add_submodule_import( + "typing_extensions", + "Self", + ImportType.STDLIB, + ) + return file_import + + @property + def has_mixin(self) -> bool: + """Do we want a mixin ABC class for typing purposes?""" + return any(og for og in self.operation_groups if og.is_mixin) + + @property + def lro_operations(self) -> List["OperationType"]: + """all LRO operations in this SDK?""" + return [operation for operation_group in self.operation_groups for operation in operation_group.lro_operations] + + @property + def has_public_lro_operations(self) -> bool: + """Are there any public LRO operations in this SDK?""" + return any(not operation.internal for operation in self.lro_operations) + + @property + def has_operations(self) -> bool: + return any(operation_group.has_operations for operation_group in self.operation_groups) + + def link_lro_initial_operations(self) -> None: + """Link each LRO operation to its initial operation""" + for operation_group in self.operation_groups: + for operation in operation_group.operations: + if isinstance(operation, (LROOperation, LROPagingOperation)): + operation.initial_operation = self.lookup_operation(id(operation.yaml_data["initialOperation"])) + + @property + def has_abstract_operations(self) -> bool: + """Whether there is abstract operation in any operation group.""" + return any(og.has_abstract_operations for og in self.operation_groups) + + @property + def has_non_abstract_operations(self) -> bool: + """Whether there is non-abstract operation in any operation group.""" + return any(og.has_non_abstract_operations for og in self.operation_groups) + + def imports(self, async_mode: bool) -> FileImport: + file_import = self._imports_shared(async_mode) + if async_mode: + file_import.add_submodule_import("typing", "Awaitable", ImportType.STDLIB) + file_import.add_submodule_import( + "rest", + "AsyncHttpResponse", + ImportType.SDKCORE, + TypingSection.CONDITIONAL, + ) + else: + file_import.add_submodule_import( + "rest", + "HttpResponse", + ImportType.SDKCORE, + TypingSection.CONDITIONAL, + ) + file_import.add_submodule_import( + "rest", + "HttpRequest", + ImportType.SDKCORE, + TypingSection.CONDITIONAL, + ) + for og in self.operation_groups: + file_import.add_submodule_import( + f".{self.code_model.operations_folder_name}", + og.class_name, + ImportType.LOCAL, + ) + + if self.code_model.model_types and self.code_model.options["models_mode"] == "msrest": + path_to_models = ".." if async_mode else "." + file_import.add_submodule_import(path_to_models, "models", ImportType.LOCAL, alias="_models") + elif self.code_model.options["models_mode"] == "msrest": + # in this case, we have client_models = {} in the service client, which needs a type annotation + # this import will always be commented, so will always add it to the typing section + file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB) + file_import.add_submodule_import("copy", "deepcopy", ImportType.STDLIB) + return file_import + + def imports_for_multiapi(self, async_mode: bool) -> FileImport: + file_import = self._imports_shared(async_mode) + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB, TypingSection.CONDITIONAL) + try: + mixin_operation = next(og for og in self.operation_groups if og.is_mixin) + file_import.add_submodule_import("._operations_mixin", mixin_operation.class_name, ImportType.LOCAL) + except StopIteration: + pass + file_import.add_submodule_import("azure.profiles", "KnownProfiles", import_type=ImportType.SDKCORE) + file_import.add_submodule_import("azure.profiles", "ProfileDefinition", import_type=ImportType.SDKCORE) + file_import.add_submodule_import( + "azure.profiles.multiapiclient", + "MultiApiClientMixin", + import_type=ImportType.SDKCORE, + ) + return file_import + + @classmethod + def from_yaml( + cls, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + *, + is_subclient: bool = False, + ) -> "Client": + return cls( + yaml_data=yaml_data, + code_model=code_model, + parameters=ClientGlobalParameterList.from_yaml(yaml_data, code_model), + is_subclient=is_subclient, + ) + + +class Config(_ClientConfigBase[ConfigGlobalParameterList]): + """Model representing our Config type.""" + + def pylint_disable(self) -> str: + retval = add_to_pylint_disable("", "too-many-instance-attributes") + if len(self.name) > NAME_LENGTH_LIMIT: + retval = add_to_pylint_disable(retval, "name-too-long") + return retval + + @property + def description(self) -> str: + return ( + f"Configuration for {self.yaml_data['name']}.\n\n." + "Note that all parameters used to create this instance are saved as instance attributes." + ) + + @property + def sdk_moniker(self) -> str: + package_name = self.code_model.options["package_name"] + if package_name and package_name.startswith("azure-"): + package_name = package_name[len("azure-") :] + return package_name if package_name else self.yaml_data["name"].lower() + + @property + def name(self) -> str: + return f"{super().name}Configuration" + + def _imports_shared(self, async_mode: bool) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import( + "pipeline" if self.code_model.is_azure_flavor else "runtime", + "policies", + ImportType.SDKCORE, + ) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + if self.code_model.options["package_version"]: + file_import.add_submodule_import(".._version" if async_mode else "._version", "VERSION", ImportType.LOCAL) + if self.code_model.options["azure_arm"]: + policy = "AsyncARMChallengeAuthenticationPolicy" if async_mode else "ARMChallengeAuthenticationPolicy" + file_import.add_submodule_import("azure.mgmt.core.policies", "ARMHttpLoggingPolicy", ImportType.SDKCORE) + file_import.add_submodule_import("azure.mgmt.core.policies", policy, ImportType.SDKCORE) + + return file_import + + def imports(self, async_mode: bool) -> FileImport: + file_import = self._imports_shared(async_mode) + for gp in self.parameters: + if gp.method_location == ParameterMethodLocation.KWARG and gp not in self.parameters.kwargs_to_pop: + continue + file_import.merge( + gp.imports( + async_mode=async_mode, + relative_path=".." if async_mode else ".", + operation=True, + ) + ) + return file_import + + def imports_for_multiapi(self, async_mode: bool) -> FileImport: + file_import = self._imports_shared(async_mode) + for gp in self.parameters: + if ( + gp.method_location == ParameterMethodLocation.KWARG + and gp not in self.parameters.kwargs_to_pop + and gp.client_name == "api_version" + ): + continue + file_import.merge( + gp.imports_for_multiapi( + async_mode=async_mode, + relative_path=".." if async_mode else ".", + operation=True, + ) + ) + return file_import + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "Config": + return cls( + yaml_data=yaml_data, + code_model=code_model, + parameters=ConfigGlobalParameterList.from_yaml(yaml_data, code_model), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/models/code_model.py b/packages/http-client-python/generator/pygen/codegen/models/code_model.py new file mode 100644 index 0000000000..7ddb90fe37 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/code_model.py @@ -0,0 +1,237 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List, Dict, Any, Set, Union, Literal + +from .base import BaseType +from .enum_type import EnumType +from .model_type import ModelType, UsageFlags +from .combined_type import CombinedType +from .client import Client +from .request_builder import RequestBuilder, OverloadedRequestBuilder + + +def _is_legacy(options) -> bool: + return not (options.get("version_tolerant") or options.get("low_level_client")) + + +class CodeModel: # pylint: disable=too-many-public-methods, disable=too-many-instance-attributes + """Top level code model + + :param options: Options of the code model. I.e., whether this is for management generation + :type options: dict[str, bool] + :param str module_name: The module name for the client. Is in snake case. + :param str class_name: The class name for the client. Is in pascal case. + :param str description: The description of the client + :param str namespace: The namespace of our module + :param schemas: The list of schemas we are going to serialize in the models files. Maps their yaml + id to our created ModelType. + :type schemas: dict[int, ~autorest.models.ModelType] + :param sorted_schemas: Our schemas in order by inheritance and alphabet + :type sorted_schemas: list[~autorest.models.ModelType] + :param enums: The enums, if any, we are going to serialize. Maps their yaml id to our created EnumType. + :type enums: Dict[int, ~autorest.models.EnumType] + :param primitives: List of schemas we've created that are not EnumSchemas or ObjectSchemas. Maps their + yaml id to our created schemas. + :type primitives: Dict[int, ~autorest.models.BaseType] + :param package_dependency: All the dependencies needed in setup.py + :type package_dependency: Dict[str, str] + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + options: Dict[str, Any], + *, + is_subnamespace: bool = False, + ) -> None: + self.yaml_data = yaml_data + self.options = options + self.namespace = self.yaml_data["namespace"] + self.types_map: Dict[int, BaseType] = {} # map yaml id to schema + self._model_types: List[ModelType] = [] + from . import build_type + + for type_yaml in yaml_data.get("types", []): + build_type(yaml_data=type_yaml, code_model=self) + self.clients: List[Client] = [ + Client.from_yaml(client_yaml_data, self) for client_yaml_data in yaml_data["clients"] + ] + self.subnamespace_to_clients: Dict[str, List[Client]] = { + subnamespace: [Client.from_yaml(client_yaml, self, is_subclient=True) for client_yaml in client_yamls] + for subnamespace, client_yamls in yaml_data.get("subnamespaceToClients", {}).items() + } + if self.options["models_mode"] and self.model_types: + self.sort_model_types() + self.is_subnamespace = is_subnamespace + self.named_unions: List[CombinedType] = [ + t for t in self.types_map.values() if isinstance(t, CombinedType) and t.name + ] + self.cross_language_package_id = self.yaml_data.get("crossLanguagePackageId") + self.for_test: bool = False + + @property + def has_form_data(self) -> bool: + return any(og.has_form_data_body for client in self.clients for og in client.operation_groups) + + @property + def has_etag(self) -> bool: + return any(client.has_etag for client in self.clients) + + @property + def has_operations(self) -> bool: + if any(c for c in self.clients if c.has_operations): + return True + return any(c for clients in self.subnamespace_to_clients.values() for c in clients if c.has_operations) + + @property + def has_non_abstract_operations(self) -> bool: + return any(c for c in self.clients if c.has_non_abstract_operations) or any( + c for cs in self.subnamespace_to_clients.values() for c in cs if c.has_non_abstract_operations + ) + + def lookup_request_builder(self, request_builder_id: int) -> Union[RequestBuilder, OverloadedRequestBuilder]: + """Find the request builder based off of id""" + for client in self.clients: + try: + return client.lookup_request_builder(request_builder_id) + except KeyError: + pass + raise KeyError(f"No request builder with id {request_builder_id} found.") + + @property + def is_azure_flavor(self) -> bool: + return self.options["flavor"] == "azure" + + @property + def rest_layer_name(self) -> str: + """If we have a separate rest layer, what is its name?""" + return "rest" if self.options["builders_visibility"] == "public" else "_rest" + + @property + def client_filename(self) -> str: + return self.clients[0].filename + + def need_vendored_code(self, async_mode: bool) -> bool: + """Whether we need to vendor code in the _vendor.py file for this SDK""" + if self.has_abstract_operations: + return True + if async_mode: + return self.need_mixin_abc + return self.need_mixin_abc or self.has_etag or self.has_form_data + + @property + def need_mixin_abc(self) -> bool: + return any(c for c in self.clients if c.has_mixin) + + @property + def has_abstract_operations(self) -> bool: + return any(c for c in self.clients if c.has_abstract_operations) + + @property + def operations_folder_name(self) -> str: + """Get the name of the operations folder that holds operations.""" + name = "operations" + if self.options["version_tolerant"] and not any( + og for client in self.clients for og in client.operation_groups if not og.is_mixin + ): + name = f"_{name}" + return name + + @property + def description(self) -> str: + return self.clients[0].description + + def lookup_type(self, schema_id: int) -> BaseType: + """Looks to see if the schema has already been created. + + :param int schema_id: The yaml id of the schema + :return: If created, we return the created schema, otherwise, we throw. + :rtype: ~autorest.models.BaseType + :raises: KeyError if schema is not found + """ + try: + return next(type for id, type in self.types_map.items() if id == schema_id) + except StopIteration as exc: + raise KeyError(f"Couldn't find schema with id {schema_id}") from exc + + @property + def model_types(self) -> List[ModelType]: + """All of the model types in this class""" + if not self._model_types: + self._model_types = [ + t for t in self.types_map.values() if isinstance(t, ModelType) and t.usage != UsageFlags.Default.value + ] + return self._model_types + + @model_types.setter + def model_types(self, val: List[ModelType]) -> None: + self._model_types = val + + @property + def public_model_types(self) -> List[ModelType]: + return [m for m in self.model_types if not m.internal and not m.base == "json"] + + @property + def enums(self) -> List[EnumType]: + """All of the enums""" + return [t for t in self.types_map.values() if isinstance(t, EnumType)] + + @property + def core_library(self) -> Literal["azure.core", "corehttp"]: + return "azure.core" if self.is_azure_flavor else "corehttp" + + def _sort_model_types_helper( + self, + current: ModelType, + seen_schema_names: Set[str], + seen_schema_yaml_ids: Set[int], + ): + if current.id in seen_schema_yaml_ids: + return [] + if current.name in seen_schema_names: + raise ValueError(f"We have already generated a schema with name {current.name}") + ancestors = [current] + if current.parents: + for parent in current.parents: + if parent.id in seen_schema_yaml_ids: + continue + seen_schema_names.add(current.name) + seen_schema_yaml_ids.add(current.id) + ancestors = self._sort_model_types_helper(parent, seen_schema_names, seen_schema_yaml_ids) + ancestors + seen_schema_names.add(current.name) + seen_schema_yaml_ids.add(current.id) + return ancestors + + def sort_model_types(self) -> None: + """Sorts the final object schemas by inheritance and by alphabetical order. + + :return: None + :rtype: None + """ + seen_schema_names: Set[str] = set() + seen_schema_yaml_ids: Set[int] = set() + sorted_object_schemas: List[ModelType] = [] + for schema in sorted(self.model_types, key=lambda x: x.name.lower()): + sorted_object_schemas.extend(self._sort_model_types_helper(schema, seen_schema_names, seen_schema_yaml_ids)) + self.model_types = sorted_object_schemas + + @property + def models_filename(self) -> str: + """Get the names of the model file(s)""" + if self.is_legacy: + return "_models_py3" + return "_models" + + @property + def enums_filename(self) -> str: + """The name of the enums file""" + if self.is_legacy: + return f"_{self.clients[0].legacy_filename}_enums" + return "_enums" + + @property + def is_legacy(self) -> bool: + return _is_legacy(self.options) diff --git a/packages/http-client-python/generator/pygen/codegen/models/combined_type.py b/packages/http-client-python/generator/pygen/codegen/models/combined_type.py new file mode 100644 index 0000000000..6afbe08ac9 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/combined_type.py @@ -0,0 +1,149 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Type, Tuple, Union +import re +from .imports import FileImport, ImportType, TypingSection +from .base import BaseType +from .model_type import ModelType + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class CombinedType(BaseType): + """A type that consists of multiple different types. + + Used by body parameters that have multiple types, i.e. one that can be + a stream body or a JSON body. + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + types: List[BaseType], + ) -> None: + super().__init__(yaml_data, code_model) + self.types = types # the types that this type is combining + self.name = yaml_data.get("name") + self._is_union_of_literals = all(i.type == "constant" for i in self.types) + + @property + def serialization_type(self) -> str: + """The tag recognized by 'msrest' as a serialization/deserialization. + + 'str', 'int', 'float', 'bool' or + https://github.com/Azure/msrest-for-python/blob/b505e3627b547bd8fdc38327e86c70bdb16df061/msrest/serialization.py#L407-L416 + + or the object schema name (e.g. DotSalmon). + + If list: '[str]' + If dict: '{str}' + """ + if not all(t for t in self.types if t.type == "constant"): + raise ValueError("Shouldn't get serialization type of a combinedtype") + return self.types[0].serialization_type + + @property + def client_default_value(self) -> Any: + return self.yaml_data.get("clientDefaultValue") + + def description(self, *, is_operation_file: bool) -> str: + if len(self.types) == 2: + return f"Is either a {self.types[0].type_description} type or a {self.types[1].type_description} type." + return f"Is one of the following types: {', '.join([t.type_description for t in self.types])}" + + def docstring_text(self, **kwargs: Any) -> str: + return " or ".join(t.docstring_text(**kwargs) for t in self.types) + + def docstring_type(self, **kwargs: Any) -> str: + return " or ".join(t.docstring_type(**kwargs) for t in self.types) + + def type_annotation(self, **kwargs: Any) -> str: + if self.name: + return f'"_types.{self.name}"' + return self.type_definition(**kwargs) + + def type_definition(self, **kwargs: Any) -> str: + """The python type used for type annotation + + Special case for enum, for instance: Union[str, "EnumName"] + """ + # remove duplicates + inside_types = list(dict.fromkeys([type.type_annotation(**kwargs) for type in self.types])) + if len(inside_types) == 1: + return inside_types[0] + if self._is_union_of_literals: + parsed_values = [] + for entry in inside_types: + match = re.search(r"Literal\[(.*)\]", entry) + if match is not None: + parsed_values.append(match.group(1)) + join_string = ", ".join(parsed_values) + return f"Literal[{join_string}]" + + # If the inside types has been a Union, peel first and then re-union + pattern = re.compile(r"Union\[.*\]") + return f'Union[{", ".join(map(lambda x: x[6: -1] if pattern.match(x) else x, inside_types))}]' + + @property + def is_form_data(self) -> bool: + return any(t.is_form_data for t in self.types) + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + return self.types[0].get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + raise ValueError("You shouldn't get polymorphic subtypes of multiple types") + + @property + def instance_check_template(self) -> str: + """Template of what an instance check of a variable for this type would look like""" + raise ValueError("You shouldn't do instance checks on a multiple type") + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + if self.name and not kwargs.get("is_types_file"): + file_import.add_submodule_import( + kwargs.pop("relative_path"), + "_types", + ImportType.LOCAL, + TypingSection.TYPING, + ) + return file_import + for type in self.types: + file_import.merge(type.imports(**kwargs)) + if not self._is_union_of_literals: + file_import.add_submodule_import("typing", "Union", ImportType.STDLIB) + return file_import + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BaseType": + from . import build_type + + return cls( + yaml_data, + code_model, + [build_type(t, code_model) for t in yaml_data["types"]], + ) + + def target_model_subtype( + self, + target_types: Union[ + Tuple[Type[ModelType]], + Tuple[Type[ModelType], Type[ModelType]], + ], + ) -> Optional[ModelType]: + for sub_t in self.types: + if isinstance(sub_t, target_types): + return sub_t # type: ignore + return None diff --git a/packages/http-client-python/generator/pygen/codegen/models/constant_type.py b/packages/http-client-python/generator/pygen/codegen/models/constant_type.py new file mode 100644 index 0000000000..02e1bfaca2 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/constant_type.py @@ -0,0 +1,129 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import Dict, Any, Optional, TYPE_CHECKING, Union +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection +from .primitive_types import IntegerType, BinaryType, StringType, BooleanType +from .utils import add_to_description + +if TYPE_CHECKING: + from .code_model import CodeModel + +_LOGGER = logging.getLogger(__name__) + + +class ConstantType(BaseType): + """Schema for constants that will be serialized. + + :param yaml_data: the yaml data for this schema + :type yaml_data: dict[str, Any] + :param str value: The actual value of this constant. + :param schema: The schema for the value of this constant. + :type schema: ~autorest.models.PrimitiveType + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + value_type: BaseType, + value: Optional[Union[str, int, float]], + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.value_type = value_type + self.value = value + + def get_declaration(self, value=None): + if value and value != self.value: + _LOGGER.warning( + "Passed in value of %s differs from constant value of %s. Choosing constant value", + str(value), + str(self.value), + ) + if self.value is None: + return "None" + return self.value_type.get_declaration(self.value) + + def description(self, *, is_operation_file: bool) -> str: + if is_operation_file: + return "" + return add_to_description( + self.yaml_data.get("description", ""), + f"Default value is {self.get_declaration()}.", + ) + + @property + def serialization_type(self) -> str: + """Returns the serialization value for msrest. + + :return: The serialization value for msrest + :rtype: str + """ + return self.value_type.serialization_type + + def docstring_text(self, **kwargs: Any) -> str: + return "constant" + + def docstring_type(self, **kwargs: Any) -> str: + """The python type used for RST syntax input and type annotation. + + :param str namespace: Optional. The namespace for the models. + """ + return self.value_type.docstring_type(**kwargs) + + def type_annotation(self, **kwargs: Any) -> str: + return f"Literal[{self.get_declaration()}]" if self._is_literal else self.value_type.type_annotation(**kwargs) + + @property + def _is_literal(self) -> bool: + return isinstance(self.value_type, (IntegerType, BinaryType, StringType, BooleanType)) + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ConstantType": + """Constructs a ConstantType from yaml data. + + :param yaml_data: the yaml data from which we will construct this schema + :type yaml_data: dict[str, Any] + + :return: A created ConstantType + :rtype: ~autorest.models.ConstantType + """ + from . import build_type + + return cls( + yaml_data=yaml_data, + code_model=code_model, + value_type=build_type(yaml_data["valueType"], code_model), + value=yaml_data["value"], + ) + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + return self.value_type.get_json_template_representation( + client_default_value_declaration=self.get_declaration(), + ) + + def _imports_shared(self, **kwargs: Any): + file_import = super().imports(**kwargs) + file_import.merge(self.value_type.imports(**kwargs)) + return file_import + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + return self._imports_shared(**kwargs) + + def imports(self, **kwargs: Any) -> FileImport: + file_import = self._imports_shared(**kwargs) + if self._is_literal: + file_import.add_submodule_import("typing", "Literal", ImportType.STDLIB, TypingSection.REGULAR) + return file_import + + @property + def instance_check_template(self) -> str: + return self.value_type.instance_check_template diff --git a/packages/http-client-python/generator/pygen/codegen/models/credential_types.py b/packages/http-client-python/generator/pygen/codegen/models/credential_types.py new file mode 100644 index 0000000000..82dd74302a --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/credential_types.py @@ -0,0 +1,214 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from abc import abstractmethod +from typing import ( + Optional, + Any, + Dict, + TYPE_CHECKING, + List, + Generic, + TypeVar, + Union, + cast, +) + +from .imports import FileImport, ImportType, TypingSection +from .base import BaseType + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class _CredentialPolicyBaseType: + """Base class for our different credential policy types. + + Inherited by our BearerTokenCredentialPolicy and KeyCredentialPolicy types. + """ + + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + self.yaml_data = yaml_data + self.code_model = code_model + + @abstractmethod + def call(self, async_mode: bool) -> str: + """ + How to call this credential policy. Used to initialize the credential policy in the config file. + """ + + +class BearerTokenCredentialPolicyType(_CredentialPolicyBaseType): + """Credential policy type representing BearerTokenCredentialPolicy""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + credential_scopes: List[str], + ) -> None: + super().__init__(yaml_data, code_model) + self.credential_scopes = credential_scopes + + def call(self, async_mode: bool) -> str: + policy_name = f"{'Async' if async_mode else ''}BearerTokenCredentialPolicy" + return f"policies.{policy_name}(self.credential, *self.credential_scopes, **kwargs)" + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BearerTokenCredentialPolicyType": + return cls(yaml_data, code_model, yaml_data["credentialScopes"]) + + +class ARMChallengeAuthenticationPolicyType(BearerTokenCredentialPolicyType): + """Credential policy type representing ARMChallengeAuthenticationPolicy""" + + def call(self, async_mode: bool) -> str: + policy_name = f"{'Async' if async_mode else ''}ARMChallengeAuthenticationPolicy" + return f"{policy_name}(self.credential, *self.credential_scopes, **kwargs)" + + +class KeyCredentialPolicyType(_CredentialPolicyBaseType): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + key: str, + scheme: Optional[str] = None, + ) -> None: + super().__init__(yaml_data, code_model) + self.key = key + self.scheme = scheme + + @property + def credential_name(self) -> str: + return "AzureKeyCredential" if self.code_model.is_azure_flavor else "ServiceKeyCredential" + + def call(self, async_mode: bool) -> str: + params = f'"{self.key}", ' + if self.scheme: + params += f'prefix="{self.scheme}", ' + return f"policies.{self.credential_name}Policy(self.credential, {params}**kwargs)" + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "KeyCredentialPolicyType": + return cls(yaml_data, code_model, yaml_data["key"], yaml_data.get("scheme", None)) + + +CredentialPolicyType = TypeVar( + "CredentialPolicyType", + bound=Union[ + BearerTokenCredentialPolicyType, + ARMChallengeAuthenticationPolicyType, + KeyCredentialPolicyType, + ], +) + + +class CredentialType(Generic[CredentialPolicyType], BaseType): + """Store info about the type of the credential. Can be either an KeyCredential or a TokenCredential""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + policy: CredentialPolicyType, + ) -> None: + super().__init__(yaml_data, code_model) + self.policy = policy + + def description(self, *, is_operation_file: bool) -> str: + return "" + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + raise TypeError("You should not try to get a JSON template representation of a CredentialSchema") + + def docstring_text(self, **kwargs: Any) -> str: + return "credential" + + @property + def serialization_type(self) -> str: + return self.docstring_type() + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "CredentialType": + from . import build_type + + return cls( + yaml_data, + code_model, + policy=cast(CredentialPolicyType, build_type(yaml_data["policy"], code_model)), + ) + + +class TokenCredentialType(CredentialType[Union[BearerTokenCredentialPolicyType, ARMChallengeAuthenticationPolicyType]]): + """Type of a token credential. Used by BearerAuth and ARMChallenge policies""" + + def type_annotation(self, **kwargs: Any) -> str: + if kwargs.get("async_mode"): + return '"AsyncTokenCredential"' + return '"TokenCredential"' + + @property + def type_description(self) -> str: + return "TokenCredential" + + @property + def credentials_subfolder(self) -> str: + return "credentials_async" if self.code_model.is_azure_flavor else "credentials" + + def docstring_type(self, **kwargs: Any) -> str: + if kwargs.get("async_mode"): + return f"~{self.code_model.core_library}.{self.credentials_subfolder}.AsyncTokenCredential" + return f"~{self.code_model.core_library}.credentials.TokenCredential" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + if kwargs.get("async_mode"): + file_import.add_submodule_import( + self.credentials_subfolder, + "AsyncTokenCredential", + ImportType.SDKCORE, + typing_section=TypingSection.TYPING, + ) + else: + file_import.add_submodule_import( + "credentials", + "TokenCredential", + ImportType.SDKCORE, + typing_section=TypingSection.TYPING, + ) + return file_import + + @property + def instance_check_template(self) -> str: + return "hasattr({}, 'get_token')" + + +class KeyCredentialType(CredentialType[KeyCredentialPolicyType]): + """Type for an KeyCredential""" + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.code_model.core_library}.credentials.{self.policy.credential_name}" + + def type_annotation(self, **kwargs: Any) -> str: + return self.policy.credential_name + + @property + def instance_check_template(self) -> str: + return "isinstance({}, " + f"{self.policy.credential_name})" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + file_import.add_submodule_import( + "credentials", + self.policy.credential_name, + ImportType.SDKCORE, + typing_section=TypingSection.CONDITIONAL, + ) + return file_import diff --git a/packages/http-client-python/generator/pygen/codegen/models/dictionary_type.py b/packages/http-client-python/generator/pygen/codegen/models/dictionary_type.py new file mode 100644 index 0000000000..38b83dcbed --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/dictionary_type.py @@ -0,0 +1,127 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, Optional, TYPE_CHECKING, List +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection + +if TYPE_CHECKING: + from .code_model import CodeModel + from .model_type import ModelType + + +class DictionaryType(BaseType): + """Schema for dictionaries that will be serialized. + + :param yaml_data: the yaml data for this schema + :type yaml_data: dict[str, Any] + :param element_type: The type of the value for the dictionary + :type element_type: ~autorest.models.BaseType + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + element_type: BaseType, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.element_type = element_type + + @property + def encode(self) -> Optional[str]: + return self.element_type.encode if hasattr(self.element_type, "encode") else None # type: ignore + + @property + def serialization_type(self) -> str: + """Returns the serialization value for msrest. + + :return: The serialization value for msrest + :rtype: str + """ + return f"{{{self.element_type.serialization_type}}}" + + def type_annotation(self, **kwargs: Any) -> str: + """The python type used for type annotation + + :return: The type annotation for this schema + :rtype: str + """ + return f"Dict[str, {self.element_type.type_annotation(**kwargs)}]" + + def description(self, *, is_operation_file: bool) -> str: + return "" if is_operation_file else self.yaml_data.get("description", "") + + def docstring_text(self, **kwargs: Any) -> str: + return f"dict mapping str to {self.element_type.docstring_text(**kwargs)}" + + @property + def xml_serialization_ctxt(self) -> Optional[str]: + """No serialization ctxt for dictionaries""" + return None + + def docstring_type(self, **kwargs: Any) -> str: + """The python type used for RST syntax input and type annotation. + + :param str namespace: Optional. The namespace for the models. + """ + return f"dict[str, {self.element_type.docstring_type(**kwargs)}]" + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + return { + '"str"': self.element_type.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + } + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + from .model_type import ModelType + + if isinstance(self.element_type, ModelType): + is_polymorphic_subtype = ( + self.element_type.discriminator_value and not self.element_type.discriminated_subtypes + ) + if self.element_type.name not in (m.name for m in polymorphic_subtypes) and is_polymorphic_subtype: + polymorphic_subtypes.append(self.element_type) + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "DictionaryType": + """Constructs a DictionaryType from yaml data. + + :param yaml_data: the yaml data from which we will construct this schema + :type yaml_data: dict[str, Any] + + :return: A created DictionaryType + :rtype: ~autorest.models.DictionaryType + """ + element_schema: Dict[str, Any] = yaml_data["elementType"] + + from . import build_type # pylint: disable=import-outside-toplevel + + element_type = build_type(yaml_data=element_schema, code_model=code_model) + + return cls( + yaml_data=yaml_data, + code_model=code_model, + element_type=element_type, + ) + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.merge(self.element_type.imports(**kwargs)) + return file_import + + @property + def instance_check_template(self) -> str: + return "isinstance({}, dict)" + + @property + def type_description(self) -> str: + return f"{{str: {self.element_type.type_description}}}" diff --git a/packages/http-client-python/generator/pygen/codegen/models/enum_type.py b/packages/http-client-python/generator/pygen/codegen/models/enum_type.py new file mode 100644 index 0000000000..56e712654f --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/enum_type.py @@ -0,0 +1,238 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, List, TYPE_CHECKING, Optional, cast + +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class EnumValue(BaseType): + """Model containing necessary information for a single value of an enum. + + :param str name: The name of this enum value + :param str value: The value of this enum value + :param str description: Optional. The description for this enum value + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + enum_type: "EnumType", + value_type: BaseType, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.name: str = self.yaml_data["name"] + self.value: str = self.yaml_data["value"] + self.enum_type = enum_type + self.value_type = value_type + + def description(self, *, is_operation_file: bool) -> str: + return self.yaml_data.get("description", "") + + def type_annotation(self, **kwargs: Any) -> str: + """The python type used for type annotation""" + return f"Literal[{self.enum_type.name}.{self.name}]" + + def get_declaration(self, value=None): + return self.enum_type.name + "." + self.name + + def docstring_text(self, **kwargs: Any) -> str: + return self.enum_type.name + "." + self.name + + def docstring_type(self, **kwargs: Any) -> str: + """The python type used for RST syntax input and type annotation.""" + + type_annotation = self.value_type.type_annotation(**kwargs) + enum_type_annotation = f"{self.code_model.namespace}.models.{self.name}" + return f"{type_annotation} or ~{enum_type_annotation}" + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + # for better display effect, use the only value instead of var type + return self.value_type.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + + @property + def serialization_type(self) -> str: + return self.value_type.serialization_type + + @property + def instance_check_template(self) -> str: + return self.value_type.instance_check_template + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.merge(self.value_type.imports(**kwargs)) + file_import.add_submodule_import("typing", "Literal", ImportType.STDLIB, TypingSection.REGULAR) + file_import.add_submodule_import("._enums", self.enum_type.name, ImportType.LOCAL, TypingSection.REGULAR) + + return file_import + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "EnumValue": + """Constructs an EnumValue from yaml data. + + :param yaml_data: the yaml data from which we will construct this object + :type yaml_data: dict[str, Any] + + :return: A created EnumValue + :rtype: ~autorest.models.EnumValue + """ + from . import build_type + + return cls( + yaml_data=yaml_data, + code_model=code_model, + enum_type=cast(EnumType, build_type(yaml_data["enumType"], code_model)), + value_type=build_type(yaml_data["valueType"], code_model), + ) + + +class EnumType(BaseType): + """Schema for enums that will be serialized. + + :param yaml_data: the yaml data for this schema + :type yaml_data: dict[str, Any] + :param str description: The description of this enum + :param str name: The name of the enum. + :type element_type: ~autorest.models.PrimitiveType + :param values: List of the values for this enum + :type values: list[~autorest.models.EnumValue] + """ + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + values: List["EnumValue"], + value_type: BaseType, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.name: str = yaml_data["name"][0].upper() + yaml_data["name"][1:] + self.values = values + self.value_type = value_type + self.internal: bool = self.yaml_data.get("internal", False) + self.cross_language_definition_id: Optional[str] = self.yaml_data.get("crossLanguageDefinitionId") + + def __lt__(self, other): + return self.name.lower() < other.name.lower() + + @property + def serialization_type(self) -> str: + """Returns the serialization value for msrest. + + :return: The serialization value for msrest + :rtype: str + """ + return self.value_type.serialization_type + + def description(self, *, is_operation_file: bool) -> str: + possible_values = [self.get_declaration(v.value) for v in self.values] + if not possible_values: + return "" + if len(possible_values) == 1: + return possible_values[0] + if len(possible_values) == 2: + possible_values_str = " and ".join(possible_values) + else: + possible_values_str = ( + ", ".join(possible_values[: len(possible_values) - 1]) + f", and {possible_values[-1]}" + ) + + enum_description = f"Known values are: {possible_values_str}." + return enum_description + + def type_annotation(self, **kwargs: Any) -> str: + """The python type used for type annotation + + :return: The type annotation for this schema + :rtype: str + """ + if self.code_model.options["models_mode"]: + module_name = "_models." if kwargs.get("need_module_name", True) else "" + file_name = f"{self.code_model.enums_filename}." if self.internal else "" + model_name = module_name + file_name + self.name + # we don't need quoted annotation in operation files, and need it in model folder files. + if not kwargs.get("is_operation_file", False): + model_name = f'"{model_name}"' + + return f"Union[{self.value_type.type_annotation(**kwargs)}, {model_name}]" + return self.value_type.type_annotation(**kwargs) + + def get_declaration(self, value: Any) -> str: + return self.value_type.get_declaration(value) + + def docstring_text(self, **kwargs: Any) -> str: + if self.code_model.options["models_mode"]: + return self.name + return self.value_type.type_annotation(**kwargs) + + def docstring_type(self, **kwargs: Any) -> str: + """The python type used for RST syntax input and type annotation.""" + if self.code_model.options["models_mode"]: + type_annotation = self.value_type.type_annotation(**kwargs) + enum_type_annotation = f"{self.code_model.namespace}.models.{self.name}" + return f"{type_annotation} or ~{enum_type_annotation}" + return self.value_type.type_annotation(**kwargs) + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + # for better display effect, use the only value instead of var type + return self.value_type.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + + @property + def instance_check_template(self) -> str: + return self.value_type.instance_check_template + + def fill_instance_from_yaml(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + for value in yaml_data["values"]: + self.values.append(EnumValue.from_yaml(value, code_model)) + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "EnumType": + raise ValueError( + "You shouldn't call from_yaml for EnumType to avoid recursion. " + "Please initial a blank EnumType, then call .fill_instance_from_yaml on the created type." + ) + + def imports(self, **kwargs: Any) -> FileImport: + operation = kwargs.pop("operation", False) + file_import = FileImport(self.code_model) + if self.code_model.options["models_mode"]: + file_import.add_submodule_import("typing", "Union", ImportType.STDLIB, TypingSection.CONDITIONAL) + if not operation: + file_import.add_submodule_import( + "..", + "models", + ImportType.LOCAL, + TypingSection.TYPING, + alias="_models", + ) + file_import.merge(self.value_type.imports(operation=operation, **kwargs)) + relative_path = kwargs.pop("relative_path", None) + if self.code_model.options["models_mode"] and relative_path: + # add import for enums in operations file + file_import.add_submodule_import( + relative_path, + "models", + ImportType.LOCAL, + alias="_models", + typing_section=(TypingSection.TYPING if kwargs.get("model_typing") else TypingSection.REGULAR), + ) + return file_import diff --git a/packages/http-client-python/generator/pygen/codegen/models/imports.py b/packages/http-client-python/generator/pygen/codegen/models/imports.py new file mode 100644 index 0000000000..c4d2b8f4cc --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/imports.py @@ -0,0 +1,291 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from enum import Enum, auto +from typing import Dict, List, Optional, Tuple, Union, Set, TYPE_CHECKING + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class ImportType(str, Enum): + """ + Ordering of these enum matters. We order import groupings in a file based off of this ordering. + """ + + STDLIB = "stdlib" + THIRDPARTY = "thirdparty" + SDKCORE = "sdkcore" + LOCAL = "local" + BY_VERSION = "by_version" + + +class TypingSection(str, Enum): + REGULAR = "regular" # this import is always a typing import + CONDITIONAL = "conditional" # is a typing import when we're dealing with files that py2 will use, else regular + TYPING = "typing" # never a typing import + + +class MsrestImportType(Enum): + Module = auto() # import _serialization.py or msrest.serialization as Module + Serializer = auto() # from _serialization.py or msrest.serialization import Serializer + SerializerDeserializer = auto() # from _serialization.py or msrest.serialization import Serializer and Deserializer + + +class ImportModel: + def __init__( + self, + typing_section: TypingSection, + import_type: ImportType, + module_name: str, + *, + submodule_name: Optional[str] = None, + alias: Optional[str] = None, + version_modules: Optional[Tuple[Tuple[Tuple[int, int], str, Optional[str]]]] = None, + ): + self.typing_section = typing_section + self.import_type = import_type + self.module_name = module_name + self.submodule_name = submodule_name + self.alias = alias + # version_modules: this field is for imports submodule from specified module by python version. + # It's a list of "python version, module_name, comments". + # The python version is in form of (major, minor), for instance (3, 9) stands for py3.9. + self.version_modules = version_modules + + def __eq__(self, other): + try: + return ( + self.typing_section == other.typing_section + and self.import_type == other.import_type + and self.module_name == other.module_name + and self.submodule_name == other.submodule_name + and self.alias == other.alias + ) + except AttributeError: + return False + + def __hash__(self) -> int: + retval: int = 0 + for attr in dir(self): + if attr[0] != "_": + retval += hash(getattr(self, attr)) + return retval + + +class TypeDefinition: + def __init__( + self, + sync_definition: str, + async_definition: str, + ): + self.sync_definition = sync_definition + self.async_definition = async_definition + + +class FileImport: + def __init__(self, code_model: "CodeModel") -> None: + self.imports: List[ImportModel] = [] + self.code_model = code_model + # has sync and async type definitions + self.type_definitions: Dict[str, TypeDefinition] = {} + self.core_library = self.code_model.core_library + + def _append_import(self, import_model: ImportModel) -> None: + if import_model.import_type == ImportType.SDKCORE: + mod_name = import_model.module_name + core_libraries = [ + self.code_model.core_library, + "azure", + "msrest", + ] + if all(l not in mod_name for l in core_libraries): + # this is to make sure we don't tack on core libraries when we don't need to + import_model.module_name = f"{self.code_model.core_library}{'.' if mod_name else ''}{mod_name}" + if not any( + i + for i in self.imports + if all(getattr(i, attr) == getattr(import_model, attr) for attr in dir(i) if attr[0] != "_") + ): + self.imports.append(import_model) + + def get_imports_from_section(self, typing_section: TypingSection) -> List[ImportModel]: + return [i for i in self.imports if i.typing_section == typing_section] + + def add_submodule_import( + self, + module_name: str, + submodule_name: str, + import_type: ImportType, + typing_section: TypingSection = TypingSection.REGULAR, + alias: Optional[str] = None, + version_modules: Optional[Tuple[Tuple[Tuple[int, int], str, Optional[str]]]] = None, + ) -> None: + """Add an import to this import block.""" + self._append_import( + ImportModel( + typing_section=typing_section, + import_type=import_type, + module_name=module_name, + submodule_name=submodule_name, + alias=alias, + version_modules=version_modules, + ) + ) + + def add_import( + self, + module_name: str, + import_type: ImportType, + typing_section: TypingSection = TypingSection.REGULAR, + alias: Optional[str] = None, + ) -> None: + # Implementation detail: a regular import is just a "from" with no from + self._append_import( + ImportModel( + typing_section=typing_section, + import_type=import_type, + module_name=module_name, + alias=alias, + ) + ) + + def define_mypy_type( + self, + type_name: str, + type_value: str, + async_type_value: Optional[str] = None, + ): + self.type_definitions[type_name] = TypeDefinition(type_value, async_type_value or type_value) + + def merge(self, file_import: "FileImport") -> None: + """Merge the given file import format.""" + for i in file_import.imports: + self._append_import(i) + self.type_definitions.update(file_import.type_definitions) + + def add_mutable_mapping_import(self) -> None: + self.add_import("sys", ImportType.STDLIB) + self.add_submodule_import( + "typing", + "MutableMapping", + ImportType.BY_VERSION, + TypingSection.REGULAR, + None, + (((3, 9), "collections.abc", None),), + ) + + def define_mutable_mapping_type(self) -> None: + """Helper function for defining the mutable mapping type""" + self.add_mutable_mapping_import() + self.define_mypy_type( + "JSON", + "MutableMapping[str, Any]", + ) + self.add_submodule_import("typing", "Any", ImportType.STDLIB) + + def to_dict( + self, + ) -> Dict[ + TypingSection, + Dict[ + ImportType, + Dict[ + str, + Set[ + Optional[ + Union[ + str, + Tuple[str, str], + Tuple[ + str, + Optional[str], + Tuple[Tuple[Tuple[int, int], str, Optional[str]]], + ], + ] + ] + ], + ], + ], + ]: + retval: Dict[ + TypingSection, + Dict[ + ImportType, + Dict[ + str, + Set[ + Optional[ + Union[ + str, + Tuple[str, str], + Tuple[ + str, + Optional[str], + Tuple[Tuple[Tuple[int, int], str, Optional[str]]], + ], + ] + ] + ], + ], + ], + ] = {} + for i in self.imports: + name_import: Optional[ + Union[ + str, + Tuple[str, str], + Tuple[ + str, + Optional[str], + Tuple[Tuple[Tuple[int, int], str, Optional[str]]], + ], + ] + ] = None + if i.submodule_name: + if i.version_modules: + name_import = (i.submodule_name, i.alias, i.version_modules) + elif i.alias: + name_import = (i.submodule_name, i.alias) + else: + name_import = i.submodule_name + retval.setdefault(i.typing_section, {}).setdefault(i.import_type, {}).setdefault(i.module_name, set()).add( + name_import + ) + return retval + + def add_msrest_import( + self, + *, + relative_path: str, + msrest_import_type: MsrestImportType, + typing_section: TypingSection, + ): + if self.code_model.options["client_side_validation"]: + if msrest_import_type == MsrestImportType.Module: + self.add_import("msrest.serialization", ImportType.SDKCORE, typing_section) + else: + self.add_submodule_import("msrest", "Serializer", ImportType.THIRDPARTY, typing_section) + if msrest_import_type == MsrestImportType.SerializerDeserializer: + self.add_submodule_import("msrest", "Deserializer", ImportType.THIRDPARTY, typing_section) + else: + if self.code_model.options["multiapi"]: + relative_path += "." + if msrest_import_type == MsrestImportType.Module: + self.add_submodule_import(relative_path, "_serialization", ImportType.LOCAL, typing_section) + else: + self.add_submodule_import( + f"{relative_path}_serialization", + "Serializer", + ImportType.LOCAL, + typing_section, + ) + if msrest_import_type == MsrestImportType.SerializerDeserializer: + self.add_submodule_import( + f"{relative_path}_serialization", + "Deserializer", + ImportType.LOCAL, + typing_section, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/models/list_type.py b/packages/http-client-python/generator/pygen/codegen/models/list_type.py new file mode 100644 index 0000000000..8c29ed6561 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/list_type.py @@ -0,0 +1,143 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, Optional, Union, TYPE_CHECKING, List +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection + +if TYPE_CHECKING: + from .code_model import CodeModel + from .model_type import ModelType + + +class ListType(BaseType): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + element_type: BaseType, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.element_type = element_type + self.max_items: Optional[int] = yaml_data.get("maxItems") + self.min_items: Optional[int] = yaml_data.get("minItems") + self.unique_items: bool = yaml_data.get("uniqueItems", False) + + @property + def encode(self) -> Optional[str]: + return self.element_type.encode if hasattr(self.element_type, "encode") else None # type: ignore + + @property + def serialization_type(self) -> str: + return f"[{self.element_type.serialization_type}]" + + def type_annotation(self, **kwargs: Any) -> str: + if ( + self.code_model.options["version_tolerant"] + and self.element_type.is_xml + and not self.code_model.options["models_mode"] + ): + # this means we're version tolerant XML, we just return the XML element + return self.element_type.type_annotation(**kwargs) + return f"List[{self.element_type.type_annotation(**kwargs)}]" + + def description(self, *, is_operation_file: bool) -> str: + return "" if is_operation_file else self.yaml_data.get("description", "") + + @property + def xml_serialization_ctxt(self) -> Optional[str]: + attrs_list = [] + base_xml_map = super().xml_serialization_ctxt + if base_xml_map: + attrs_list.append(base_xml_map) + + # Attribute at the list level + if self.xml_metadata.get("wrapped", False): + attrs_list.append("'wrapped': True") + + # Attributes of the items + item_xml_metadata = self.element_type.xml_metadata + if item_xml_metadata.get("name"): + attrs_list.append(f"'itemsName': '{item_xml_metadata['name']}'") + if item_xml_metadata.get("prefix", False): + attrs_list.append(f"'itemsPrefix': '{item_xml_metadata['prefix']}'") + if item_xml_metadata.get("namespace", False): + attrs_list.append(f"'itemsNs': '{item_xml_metadata['namespace']}'") + + return ", ".join(attrs_list) + + def docstring_type(self, **kwargs: Any) -> str: + if self.code_model.options["version_tolerant"] and self.element_type.xml_metadata: + # this means we're version tolerant XML, we just return the XML element + return self.element_type.docstring_type(**kwargs) + return f"list[{self.element_type.docstring_type(**kwargs)}]" + + def docstring_text(self, **kwargs: Any) -> str: + if self.code_model.options["version_tolerant"] and self.element_type.xml_metadata: + # this means we're version tolerant XML, we just return the XML element + return self.element_type.docstring_text(**kwargs) + return f"list of {self.element_type.docstring_text(**kwargs)}" + + @property + def validation(self) -> Optional[Dict[str, Union[bool, int, str]]]: + validation: Dict[str, Union[bool, int, str]] = {} + if self.max_items: + validation["max_items"] = self.max_items + validation["min_items"] = self.min_items or 0 + if self.min_items: + validation["min_items"] = self.min_items + if self.unique_items: + validation["unique"] = True + return validation or None + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + return [ + self.element_type.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + ] + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + from .model_type import ModelType + + if isinstance(self.element_type, ModelType): + is_polymorphic_subtype = ( + self.element_type.discriminator_value and not self.element_type.discriminated_subtypes + ) + if self.element_type.name not in (m.name for m in polymorphic_subtypes) and is_polymorphic_subtype: + polymorphic_subtypes.append(self.element_type) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, list)" + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ListType": + from . import build_type + + return cls( + yaml_data=yaml_data, + code_model=code_model, + element_type=build_type(yaml_data=yaml_data["elementType"], code_model=code_model), + ) + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + if not ( + self.code_model.options["version_tolerant"] + and self.element_type.is_xml + and not self.code_model.options["models_mode"] + ): + file_import.add_submodule_import("typing", "List", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.merge(self.element_type.imports(**kwargs)) + return file_import + + @property + def type_description(self) -> str: + return f"[{self.element_type.type_description}]" diff --git a/packages/http-client-python/generator/pygen/codegen/models/lro_operation.py b/packages/http-client-python/generator/pygen/codegen/models/lro_operation.py new file mode 100644 index 0000000000..84647a037c --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/lro_operation.py @@ -0,0 +1,142 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, Optional, List, TYPE_CHECKING, TypeVar, Union +from .imports import FileImport +from .operation import OperationBase, Operation +from .response import LROPagingResponse, LROResponse, Response +from .imports import ImportType, TypingSection +from .request_builder import RequestBuilder +from .parameter_list import ParameterList + +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + from . import OperationType + +LROResponseType = TypeVar("LROResponseType", bound=Union[LROResponse, LROPagingResponse]) + + +class LROOperationBase(OperationBase[LROResponseType]): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + name: str, + request_builder: RequestBuilder, + parameters: ParameterList, + responses: List[LROResponseType], + exceptions: List[Response], + *, + overloads: Optional[List[Operation]] = None, + ) -> None: + super().__init__( + code_model=code_model, + client=client, + yaml_data=yaml_data, + name=name, + request_builder=request_builder, + parameters=parameters, + responses=responses, + exceptions=exceptions, + overloads=overloads, + ) + if not self.name.lstrip("_").startswith("begin"): + self.name = ("_begin" if self.internal else "begin_") + self.name + self.lro_options: Dict[str, Any] = self.yaml_data.get("lroOptions", {}) + self._initial_operation: Optional["OperationType"] = None + + @property + def initial_operation(self) -> "OperationType": + if not self._initial_operation: + raise ValueError("You need to first call client.link_lro_initial_operations before accessing") + return self._initial_operation + + @initial_operation.setter + def initial_operation(self, val: "OperationType") -> None: + self._initial_operation = val + + @property + def operation_type(self) -> str: + return "lro" + + @property + def has_optional_return_type(self) -> bool: + return False + + @property + def lro_response(self) -> Optional[LROResponseType]: + responses_with_bodies = [r for r in self.responses if r.type] + num_response_schemas = {id(r.type.yaml_data) for r in responses_with_bodies if r.type} + response = None + if len(num_response_schemas) > 1: + # choose the response that has a status code of 200 + try: + response = next(r for r in responses_with_bodies if 200 in r.status_codes) + except StopIteration as exc: + raise ValueError( + "Your swagger is invalid because you have multiple response schemas for LRO" + + f" method {self.name} and none of them have a 200 status code." + ) from exc + + elif num_response_schemas: + response = responses_with_bodies[0] + return response + + def response_type_annotation(self, **kwargs) -> str: + lro_response = self.lro_response or next(iter(self.responses), None) + if lro_response: + return lro_response.type_annotation(**kwargs) + return "None" + + def cls_type_annotation(self, *, async_mode: bool) -> str: + """We don't want the poller to show up in ClsType, so we call super() on response type annotation""" + return f"ClsType[{Response.type_annotation(self.responses[0], async_mode=async_mode)}]" + + def get_poller_with_response_type(self, async_mode: bool) -> str: + return self.response_type_annotation(async_mode=async_mode) + + def get_poller(self, async_mode: bool) -> str: + return self.responses[0].get_poller(async_mode) + + def get_polling_method(self, async_mode: bool) -> str: + return self.responses[0].get_polling_method(async_mode) + + def get_base_polling_method(self, async_mode: bool) -> str: + return self.responses[0].get_base_polling_method(async_mode) + + def get_base_polling_method_path(self, async_mode: bool) -> str: + return self.responses[0].get_base_polling_method_path(async_mode) + + def get_no_polling_method(self, async_mode: bool) -> str: + return self.responses[0].get_no_polling_method(async_mode) + + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = super().imports(async_mode, **kwargs) + if self.abstract: + return file_import + if async_mode and self.code_model.options["tracing"] and self.want_tracing: + file_import.add_submodule_import( + "azure.core.tracing.decorator_async", + "distributed_trace_async", + ImportType.SDKCORE, + ) + if ( + self.code_model.options["models_mode"] == "dpg" + and self.lro_response + and self.lro_response.type + and self.lro_response.type.type == "model" + ): + # used in the case if initial operation returns none + # but final call returns a model + relative_path = "..." if async_mode else ".." + file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL) + file_import.add_submodule_import("typing", "Union", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.add_submodule_import("typing", "cast", ImportType.STDLIB) + return file_import + + +class LROOperation(LROOperationBase[LROResponse]): ... diff --git a/packages/http-client-python/generator/pygen/codegen/models/lro_paging_operation.py b/packages/http-client-python/generator/pygen/codegen/models/lro_paging_operation.py new file mode 100644 index 0000000000..326ed2aac9 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/lro_paging_operation.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any +from .imports import FileImport +from .lro_operation import LROOperationBase +from .paging_operation import PagingOperationBase +from .response import LROPagingResponse, Response + + +class LROPagingOperation(LROOperationBase[LROPagingResponse], PagingOperationBase[LROPagingResponse]): + @property + def success_status_codes(self): + """The list of all successfull status code.""" + return [200] + + @property + def operation_type(self) -> str: + return "lropaging" + + def cls_type_annotation(self, *, async_mode: bool) -> str: + return f"ClsType[{Response.type_annotation(self.responses[0], async_mode=async_mode)}]" + + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + lro_imports = LROOperationBase.imports(self, async_mode, **kwargs) + paging_imports = PagingOperationBase.imports(self, async_mode, **kwargs) + + file_import = lro_imports + file_import.merge(paging_imports) + return file_import diff --git a/packages/http-client-python/generator/pygen/codegen/models/model_type.py b/packages/http-client-python/generator/pygen/codegen/models/model_type.py new file mode 100644 index 0000000000..8dd3a94dfa --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/model_type.py @@ -0,0 +1,359 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from enum import Enum +from collections import OrderedDict +from typing import Any, Dict, List, Optional, TYPE_CHECKING, cast +import sys +from .utils import add_to_pylint_disable +from .base import BaseType +from .constant_type import ConstantType +from .property import Property +from .imports import FileImport, ImportType, TypingSection +from ...utils import NAME_LENGTH_LIMIT + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal # type: ignore + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class UsageFlags(Enum): + Default = 0 + Input = 2 + Output = 4 + ApiVersionEnum = 8 + JsonMergePatch = 16 + MultipartFormData = 32 + Spread = 64 + Error = 128 + Json = 256 + Xml = 512 + + +def _get_properties(type: "ModelType", properties: List[Property]) -> List[Property]: + for parent in type.parents: + # here we're adding the properties from our parents + + # need to make sure that the properties we choose from our parent also don't contain + # any of our own properties + property_names = set([p.client_name for p in properties] + [p.client_name for p in type.properties]) + chosen_parent_properties = [p for p in parent.properties if p.client_name not in property_names] + properties = _get_properties(parent, chosen_parent_properties) + properties + return properties + + +class ModelType(BaseType): # pylint: disable=too-many-instance-attributes, too-many-public-methods + """Represents a class ready to be serialized in Python. + + :param str name: The name of the class. + :param str description: The description of the class. + :param properties: the optional properties of the class. + :type properties: dict(str, str) + """ + + base: Literal["msrest", "dpg", "json"] + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + *, + properties: Optional[List[Property]] = None, + parents: Optional[List["ModelType"]] = None, + discriminated_subtypes: Optional[Dict[str, "ModelType"]] = None, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.name: str = self.yaml_data["name"] + self.max_properties: Optional[int] = self.yaml_data.get("maxProperties") + self.min_properties: Optional[int] = self.yaml_data.get("minProperties") + self.properties = properties or [] + self.parents = parents or [] + self.discriminated_subtypes = discriminated_subtypes or {} + self.discriminator_value: Optional[str] = self.yaml_data.get("discriminatorValue") + self._created_json_template_representation = False + self._got_polymorphic_subtypes = False + self.internal: bool = self.yaml_data.get("internal", False) + self.snake_case_name: str = self.yaml_data["snakeCaseName"] + self.cross_language_definition_id: Optional[str] = self.yaml_data.get("crossLanguageDefinitionId") + self.usage: int = self.yaml_data.get("usage", UsageFlags.Input.value | UsageFlags.Output.value) + + @property + def is_usage_output(self) -> bool: + return bool(self.usage & UsageFlags.Output.value) + + @property + def flattened_property(self) -> Optional[Property]: + try: + return next(p for p in self.properties if p.flatten) + except StopIteration: + return None + + @property + def flattened_items(self) -> List[str]: + return [ + item.client_name + for prop in self.properties + if isinstance(prop.type, ModelType) and prop.flatten + for item in prop.type.properties + ] + + @property + def is_form_data(self) -> bool: + return any(p.is_multipart_file_input for p in self.properties) + + @property + def is_xml(self) -> bool: + return self.yaml_data.get("isXml", False) + + @property + def msrest_deserialization_key(self) -> str: + return self.name + + @property + def is_polymorphic(self) -> bool: + return any(p.is_polymorphic for p in self.properties) + + def description(self, *, is_operation_file: bool = False) -> str: + return "" if is_operation_file else self.yaml_data.get("description", self.name) + + def get_declaration(self, value: Any) -> str: + return f"{self.name}()" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + @property + def xml_serialization_ctxt(self) -> Optional[str]: + # object schema contains _xml_map, they don't need serialization context + return "" + + @property + def xml_map_content(self) -> Optional[str]: + # This is NOT an error on the super call, we use the serialization context for "xml_map", + # but we don't want to write a serialization context for an object. + return super().xml_serialization_ctxt + + @property + def discriminated_subtypes_name_mapping(self) -> Dict[str, str]: + return {k: v.name for k, v in self.discriminated_subtypes.items()} + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + if self._created_json_template_representation: + return "..." # do this to avoid loop + self._created_json_template_representation = True + if self.discriminated_subtypes: + # we will instead print the discriminated subtypes + self._created_json_template_representation = False + return f'"{self.snake_case_name}"' if self.code_model.for_test else self.snake_case_name + + # don't add additional properties, because there's not really a concept of + # additional properties in the template + representation = { + f'"{prop.wire_name}"': prop.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + for prop in [ + p for p in self.properties if not (p.is_discriminator or p.client_name == "additional_properties") + ] + } + if self.discriminator and self.discriminator_value: + representation[f'"{self.discriminator.wire_name}"'] = f'"{self.discriminator_value}"' + + # once we've finished, we want to reset created_json_template_representation to false + # so we can call it again + self._created_json_template_representation = False + optional_keys = [f'"{p.wire_name}"' for p in self.properties if getattr(p, "optional", False)] + return OrderedDict( + sorted( + representation.items(), + key=lambda item: f"{1 if item[0] in optional_keys else 0}{item[0]}", + ) + ) + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + is_polymorphic_subtype = self.discriminator_value and not self.discriminated_subtypes + if self._got_polymorphic_subtypes: + return + self._got_polymorphic_subtypes = True + if self.name not in (m.name for m in polymorphic_subtypes) and is_polymorphic_subtype: + polymorphic_subtypes.append(self) + for discriminated_subtype in self.discriminated_subtypes.values(): + discriminated_subtype.get_polymorphic_subtypes(polymorphic_subtypes) + for property in self.properties: + property.get_polymorphic_subtypes(polymorphic_subtypes) + self._got_polymorphic_subtypes = False + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ModelType": + raise ValueError( + "You shouldn't call from_yaml for ModelType to avoid recursion. " + "Please initial a blank ModelType, then call .fill_instance_from_yaml on the created type." + ) + + def fill_instance_from_yaml(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + from . import build_type + + self.parents = [cast(ModelType, build_type(bm, code_model)) for bm in yaml_data.get("parents", [])] + properties = [Property.from_yaml(p, code_model) for p in yaml_data["properties"]] + self.properties = _get_properties(self, properties) + # checking to see if this is a polymorphic class + self.discriminated_subtypes = { + k: cast(ModelType, build_type(v, code_model)) + for k, v in self.yaml_data.get("discriminatedSubtypes", {}).items() + } + + @property + def has_readonly_or_constant_property(self) -> bool: + return any(x.readonly or x.constant or x.visibility == ["read"] for x in self.properties) + + @property + def discriminator(self) -> Optional[Property]: + try: + return next(p for p in self.properties if p.is_discriminator) + except StopIteration: + return None + + @property + def discriminator_property(self) -> Optional[Property]: + try: + return next( + p + for p in self.properties + if p.is_discriminator and isinstance(p.type, ConstantType) and p.type.value == self.discriminator_value + ) + except StopIteration: + return None + + def pylint_disable(self) -> str: + retval: str = "" + if len(self.properties) > 10: + retval = add_to_pylint_disable(retval, "too-many-instance-attributes") + if len(self.name) > NAME_LENGTH_LIMIT: + retval = add_to_pylint_disable(retval, "name-too-long") + return retval + + @property + def init_pylint_disable(self) -> str: + retval: str = "" + if len(self.properties) > 23: + retval = add_to_pylint_disable(retval, "too-many-locals") + return retval + + +class JSONModelType(ModelType): + base = "json" + + def type_annotation(self, **kwargs: Any) -> str: + return "ET.Element" if self.is_xml else "JSON" + + @property + def serialization_type(self) -> str: + return "object" + + def docstring_type(self, **kwargs: Any) -> str: + return "ET.Element" if self.is_xml else "JSON" + + def docstring_text(self, **kwargs: Any) -> str: + return "XML Element" if self.is_xml else "JSON object" + + @property + def instance_check_template(self) -> str: + return "isinstance({}, MutableMapping)" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.define_mutable_mapping_type() + if self.is_xml: + file_import.add_submodule_import("xml.etree", "ElementTree", ImportType.STDLIB, alias="ET") + return file_import + + +class GeneratedModelType(ModelType): + def type_annotation(self, **kwargs: Any) -> str: + is_operation_file = kwargs.pop("is_operation_file", False) + skip_quote = kwargs.get("skip_quote", False) + module_name = "_models." if kwargs.get("need_module_name", True) else "" + file_name = f"{self.code_model.models_filename}." if self.internal else "" + retval = module_name + file_name + self.name + return retval if is_operation_file or skip_quote else f'"{retval}"' + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.code_model.namespace}.models.{self.type_annotation(need_module_name=False, skip_quote=True)}" + + def docstring_text(self, **kwargs: Any) -> str: + return self.name + + @property + def type_description(self) -> str: + return self.name + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + relative_path = kwargs.pop("relative_path", None) + if relative_path: + # add import for models in operations or _types file + file_import.add_submodule_import( + relative_path, + "models", + ImportType.LOCAL, + alias="_models", + typing_section=(TypingSection.TYPING if kwargs.get("model_typing") else TypingSection.REGULAR), + ) + if self.is_form_data: + file_import.add_submodule_import( + relative_path, + "_model_base", + ImportType.LOCAL, + typing_section=(TypingSection.TYPING if kwargs.get("model_typing") else TypingSection.REGULAR), + ) + return file_import + + +class MsrestModelType(GeneratedModelType): + base = "msrest" + + @property + def serialization_type(self) -> str: + return self.type_annotation(skip_quote=True) if self.internal else self.name + + @property + def instance_check_template(self) -> str: + return "isinstance({}, msrest.Model)" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + return file_import + + +class DPGModelType(GeneratedModelType): + base = "dpg" + + @property + def serialization_type(self) -> str: + return ( + self.type_annotation(skip_quote=True) + if self.internal + else self.type_annotation(need_module_name=False, skip_quote=True) + ) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, _model_base.Model)" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + if self.flattened_property: + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB) + return file_import diff --git a/packages/http-client-python/generator/pygen/codegen/models/operation.py b/packages/http-client-python/generator/pygen/codegen/models/operation.py new file mode 100644 index 0000000000..1937a41678 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/operation.py @@ -0,0 +1,525 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from itertools import chain +from typing import ( + Dict, + List, + Any, + Optional, + Union, + TYPE_CHECKING, + Generic, + TypeVar, + cast, + Sequence, +) + +from .request_builder_parameter import RequestBuilderParameter + +from .utils import OrderedSet, add_to_pylint_disable +from .base_builder import BaseBuilder +from .imports import FileImport, ImportType, TypingSection +from .response import ( + Response, + PagingResponse, + LROResponse, + LROPagingResponse, + get_response, +) +from .parameter import ( + BodyParameter, + Parameter, + ParameterLocation, +) +from .parameter_list import ParameterList +from .model_type import ModelType +from .base import BaseType +from .request_builder import OverloadedRequestBuilder, RequestBuilder +from ...utils import xml_serializable, json_serializable, NAME_LENGTH_LIMIT + +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + from . import OperationType + +ResponseType = TypeVar( + "ResponseType", + bound=Union[Response, PagingResponse, LROResponse, LROPagingResponse], +) + + +def is_internal(target: Optional[BaseType]) -> bool: + return isinstance(target, ModelType) and target.base == "dpg" and target.internal + + +class OperationBase( # pylint: disable=too-many-public-methods,too-many-instance-attributes + Generic[ResponseType], BaseBuilder[ParameterList, List["Operation"]] +): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + name: str, + request_builder: Union[RequestBuilder, OverloadedRequestBuilder], + parameters: ParameterList, + responses: List[ResponseType], + exceptions: List[Response], + *, + overloads: Optional[List["Operation"]] = None, + ) -> None: + super().__init__( + code_model=code_model, + client=client, + yaml_data=yaml_data, + name=name, + parameters=parameters, + overloads=overloads, + ) + self.overloads: List["Operation"] = overloads or [] + self.responses = responses + self.request_builder = request_builder + self.deprecated = False + self.exceptions = exceptions + self.is_lro_initial_operation: bool = self.yaml_data.get("isLroInitialOperation", False) + self.include_documentation: bool = not self.is_lro_initial_operation + self.internal: bool = self.yaml_data.get("internal", False) + if self.internal: + self.name = "_" + self.name + self.has_etag: bool = self.yaml_data.get("hasEtag", False) + self.cross_language_definition_id: Optional[str] = self.yaml_data.get("crossLanguageDefinitionId") + + @property + def stream_value(self) -> Union[str, bool]: + return ( + f'kwargs.pop("stream", {self.has_stream_response})' + if self.expose_stream_keyword and self.has_response_body + else self.has_stream_response + ) + + @property + def has_form_data_body(self): + return self.parameters.has_form_data_body + + @property + def expose_stream_keyword(self) -> bool: + return self.yaml_data.get("exposeStreamKeyword", False) + + @property + def operation_type(self) -> str: + return "operation" + + @property + def has_optional_return_type(self) -> bool: + """Has optional return type if there are multiple successful response types where some have + bodies and some are None + """ + # means if we have at least one successful response with a body and one without + successful_response_with_body = any(r for r in self.responses if r.type) + successful_response_without_body = any(r for r in self.responses if not r.type) + return successful_response_with_body and successful_response_without_body + + def response_type_annotation(self, **kwargs) -> str: + if self.code_model.options["head_as_boolean"] and self.request_builder.method.lower() == "head": + return "bool" + response_type_annotations: OrderedSet[str] = { + response.type_annotation(**kwargs): None for response in self.responses if response.type + } + response_str = ", ".join(response_type_annotations.keys()) + if len(response_type_annotations) > 1: + return f"Union[{response_str}]" + if self.has_optional_return_type: + return f"Optional[{response_str}]" + if self.responses: + return self.responses[0].type_annotation(**kwargs) + return "None" + + def pylint_disable(self, async_mode: bool) -> str: + retval: str = "" + if not async_mode and not self.is_overload and self.response_type_annotation(async_mode=False) == "None": + # doesn't matter if it's async or not + retval = add_to_pylint_disable(retval, "inconsistent-return-statements") + if len(self.name) > NAME_LENGTH_LIMIT: + retval = add_to_pylint_disable(retval, "name-too-long") + return retval + + def cls_type_annotation(self, *, async_mode: bool) -> str: + if self.request_builder.method.lower() == "head" and self.code_model.options["head_as_boolean"]: + return "ClsType[None]" + return f"ClsType[{self.response_type_annotation(async_mode=async_mode)}]" + + def _response_docstring_helper(self, attr_name: str, **kwargs: Any) -> str: + responses_with_body = [r for r in self.responses if r.type] + if self.request_builder.method.lower() == "head" and self.code_model.options["head_as_boolean"]: + return "bool" + if responses_with_body: + response_docstring_values: OrderedSet[str] = { + getattr(response, attr_name)(**kwargs): None for response in responses_with_body + } + retval = " or ".join(response_docstring_values.keys()) + if self.has_optional_return_type: + retval += " or None" + return retval + if self.responses: + return getattr(self.responses[0], attr_name)(**kwargs) + return "None" + + def response_docstring_text(self, **kwargs) -> str: + retval = self._response_docstring_helper("docstring_text", **kwargs) + if not self.code_model.options["version_tolerant"]: + retval += " or the result of cls(response)" + if self.code_model.options["models_mode"] == "dpg" and any( + isinstance(r.type, ModelType) for r in self.responses + ): + r = next(r for r in self.responses if isinstance(r.type, ModelType)) + item_type = getattr(r, "item_type", getattr(r, "type")) + if item_type: + type_name = item_type.docstring_text(**kwargs) + retval += f". The {type_name} is compatible with MutableMapping" + return retval + + def response_docstring_type(self, **kwargs) -> str: + return self._response_docstring_helper("docstring_type", **kwargs) + + @property + def has_response_body(self) -> bool: + """Tell if at least one response has a body.""" + return any(response.type for response in self.responses) + + @property + def any_response_has_headers(self) -> bool: + return any(response.headers for response in self.responses) + + @property + def default_error_deserialization(self) -> Optional[str]: + default_exceptions = [e for e in self.exceptions if "default" in e.status_codes and e.type] + if not default_exceptions: + return None + exception_schema = default_exceptions[0].type + if isinstance(exception_schema, ModelType): + return exception_schema.type_annotation(skip_quote=True) + # in this case, it's just an AnyType + return "'object'" + + @property + def non_default_errors(self) -> List[Response]: + return [e for e in self.exceptions if "default" not in e.status_codes] + + @property + def non_default_error_status_codes(self) -> List[Union[str, int]]: + """Actually returns all of the status codes from exceptions (besides default)""" + return list(chain.from_iterable([error.status_codes for error in self.non_default_errors])) + + def _imports_shared(self, async_mode: bool, **kwargs: Any) -> FileImport: # pylint: disable=unused-argument + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + + response_types = [r.type_annotation(async_mode=async_mode, operation=self) for r in self.responses if r.type] + if len(set(response_types)) > 1: + file_import.add_submodule_import("typing", "Union", ImportType.STDLIB, TypingSection.CONDITIONAL) + if self.added_on: + file_import.add_submodule_import( + f"{'.' if async_mode else ''}.._validation", + "api_version_validation", + ImportType.LOCAL, + ) + return file_import + + def imports_for_multiapi(self, async_mode: bool, **kwargs: Any) -> FileImport: + if self.abstract: + return FileImport(self.code_model) + file_import = self._imports_shared(async_mode, **kwargs) + for param in self.parameters.method: + file_import.merge( + param.imports_for_multiapi( + async_mode, + operation=self, + **kwargs, + ) + ) + for response in self.responses: + file_import.merge(response.imports_for_multiapi(async_mode=async_mode, operation=self, **kwargs)) + if self.code_model.options["models_mode"]: + for exception in self.exceptions: + file_import.merge(exception.imports_for_multiapi(async_mode=async_mode, operation=self, **kwargs)) + return file_import + + @staticmethod + def has_kwargs_to_pop_with_default( + kwargs_to_pop: List[ + Union[ + Parameter, + RequestBuilderParameter, + BodyParameter, + ] + ], + location: ParameterLocation, + ) -> bool: + return any( + (kwarg.client_default_value or kwarg.optional) and kwarg.location == location for kwarg in kwargs_to_pop + ) + + @property + def need_validation(self) -> bool: + """Whether we need parameter / operation validation. For API version.""" + return bool(self.added_on) or any(p for p in self.parameters if p.added_on) + + def get_request_builder_import( + self, + request_builder: Union[RequestBuilder, OverloadedRequestBuilder], + async_mode: bool, + ) -> FileImport: + """Helper method to get a request builder import.""" + file_import = FileImport(self.code_model) + if self.code_model.options["builders_visibility"] != "embedded": + group_name = request_builder.group_name + rest_import_path = "..." if async_mode else ".." + if group_name: + file_import.add_submodule_import( + f"{rest_import_path}{self.code_model.rest_layer_name}", + group_name, + import_type=ImportType.LOCAL, + alias=f"rest_{group_name}", + ) + else: + file_import.add_submodule_import( + rest_import_path, + self.code_model.rest_layer_name, + import_type=ImportType.LOCAL, + alias="rest", + ) + if self.code_model.options["builders_visibility"] == "embedded" and async_mode: + file_import.add_submodule_import( + f"...{self.code_model.operations_folder_name}.{self.filename}", + request_builder.name, + import_type=ImportType.LOCAL, + ) + return file_import + + def imports( # pylint: disable=too-many-branches, disable=too-many-statements + self, async_mode: bool, **kwargs: Any + ) -> FileImport: + if self.abstract: + return FileImport(self.code_model) + file_import = self._imports_shared(async_mode, **kwargs) + + for param in self.parameters.method: + file_import.merge( + param.imports( + async_mode, + operation=self, + **kwargs, + ) + ) + for response in self.responses: + file_import.merge(response.imports(async_mode=async_mode, operation=self, **kwargs)) + if self.code_model.options["models_mode"]: + for exception in self.exceptions: + file_import.merge(exception.imports(async_mode=async_mode, **kwargs)) + + if self.parameters.has_body and self.parameters.body_parameter.flattened: + file_import.merge(self.parameters.body_parameter.type.imports(operation=self, **kwargs)) + if not async_mode: + for param in self.parameters.headers: + if param.wire_name.lower() == "repeatability-request-id": + file_import.add_import("uuid", ImportType.STDLIB) + elif param.wire_name.lower() == "repeatability-first-sent": + file_import.add_import("datetime", ImportType.STDLIB) + + # Exceptions + errors = [ + "map_error", + "HttpResponseError", + "ClientAuthenticationError", + "ResourceNotFoundError", + "ResourceExistsError", + "ResourceNotModifiedError", + ] + if self.stream_value: + errors.extend(["StreamConsumedError", "StreamClosedError"]) + for error in errors: + file_import.add_submodule_import("exceptions", error, ImportType.SDKCORE) + if self.code_model.options["azure_arm"]: + file_import.add_submodule_import("azure.mgmt.core.exceptions", "ARMErrorFormat", ImportType.SDKCORE) + if self.non_default_errors: + file_import.add_submodule_import( + "typing", + "Type", + ImportType.STDLIB, + ) + file_import.add_mutable_mapping_import() + if self.non_default_error_status_codes: + file_import.add_submodule_import( + "typing", + "cast", + ImportType.STDLIB, + ) + + if self.has_kwargs_to_pop_with_default( + self.parameters.kwargs_to_pop, ParameterLocation.HEADER # type: ignore + ) or self.has_kwargs_to_pop_with_default( + self.parameters.kwargs_to_pop, ParameterLocation.QUERY # type: ignore + ): + file_import.add_submodule_import( + "utils", + "case_insensitive_dict", + ImportType.SDKCORE, + ) + if self.deprecated: + file_import.add_import("warnings", ImportType.STDLIB) + + relative_path = "..." if async_mode else ".." + if self.has_etag: + file_import.add_submodule_import( + "exceptions", + "ResourceModifiedError", + ImportType.SDKCORE, + ) + if not async_mode: + file_import.add_submodule_import(f"{relative_path}_vendor", "prep_if_match", ImportType.LOCAL) + file_import.add_submodule_import(f"{relative_path}_vendor", "prep_if_none_match", ImportType.LOCAL) + if async_mode: + file_import.add_submodule_import( + "rest", + "AsyncHttpResponse", + ImportType.SDKCORE, + ) + else: + file_import.add_submodule_import( + "rest", + "HttpResponse", + ImportType.SDKCORE, + ) + if self.code_model.options["builders_visibility"] == "embedded" and not async_mode: + file_import.merge(self.request_builder.imports()) + file_import.add_submodule_import( + f"{'' if self.code_model.is_azure_flavor else 'runtime.'}pipeline", + "PipelineResponse", + ImportType.SDKCORE, + ) + file_import.add_submodule_import("rest", "HttpRequest", ImportType.SDKCORE) + file_import.add_submodule_import("typing", "Callable", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.add_submodule_import("typing", "TypeVar", ImportType.STDLIB, TypingSection.CONDITIONAL) + if self.code_model.options["tracing"] and self.want_tracing and not async_mode: + file_import.add_submodule_import( + "azure.core.tracing.decorator", + "distributed_trace", + ImportType.SDKCORE, + ) + file_import.merge(self.get_request_builder_import(self.request_builder, async_mode)) + if self.overloads: + file_import.add_submodule_import("typing", "overload", ImportType.STDLIB) + if self.code_model.options["models_mode"] == "dpg": + if self.parameters.has_body: + if self.has_form_data_body: + file_import.add_submodule_import(relative_path, "_model_base", ImportType.LOCAL) + elif xml_serializable(self.parameters.body_parameter.default_content_type): + file_import.add_submodule_import( + f"{relative_path}_model_base", + "_get_element", + ImportType.LOCAL, + ) + elif json_serializable(self.parameters.body_parameter.default_content_type): + file_import.add_submodule_import( + f"{relative_path}_model_base", + "SdkJSONEncoder", + ImportType.LOCAL, + ) + file_import.add_import("json", ImportType.STDLIB) + if any(xml_serializable(str(r.default_content_type)) for r in self.responses): + file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize_xml", ImportType.LOCAL) + elif any(r.type for r in self.responses): + file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL) + if self.default_error_deserialization or self.non_default_errors: + file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL) + return file_import + + def get_response_from_status(self, status_code: Optional[Union[str, int]]) -> ResponseType: + try: + return next(r for r in self.responses if status_code in r.status_codes) + except StopIteration as exc: + raise ValueError(f"Incorrect status code {status_code}, operation {self.name}") from exc + + @property + def success_status_codes(self) -> Sequence[Union[str, int]]: + """The list of all successfull status code.""" + return sorted([code for response in self.responses for code in response.status_codes]) + + @property + def filename(self) -> str: + basename = self.group_name + if basename == "": + # in a mixin + basename = self.code_model.clients[0].legacy_filename + + if basename == "operations" or self.code_model.options["combine_operation_files"]: + return "_operations" + return f"_{basename}_operations" + + @property + def has_stream_response(self) -> bool: + return any(r.is_stream_response for r in self.responses) + + @classmethod + def get_request_builder(cls, yaml_data: Dict[str, Any], client: "Client"): + return client.lookup_request_builder(id(yaml_data)) + + @classmethod + def from_yaml( + cls, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + ): + name = yaml_data["name"] + request_builder = cls.get_request_builder(yaml_data, client) + responses = [cast(ResponseType, get_response(r, code_model)) for r in yaml_data["responses"]] + exceptions = [Response.from_yaml(e, code_model) for e in yaml_data["exceptions"]] + parameter_list = ParameterList.from_yaml(yaml_data, code_model) + overloads = [cls.from_yaml(overload, code_model, client) for overload in yaml_data.get("overloads", [])] + + return cls( + yaml_data=yaml_data, + code_model=code_model, + client=client, + request_builder=request_builder, + name=name, + parameters=parameter_list, + overloads=overloads, + responses=responses, + exceptions=exceptions, + ) + + +class Operation(OperationBase[Response]): + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = super().imports(async_mode, **kwargs) + if self.abstract: + return file_import + if async_mode and self.code_model.options["tracing"] and self.want_tracing: + file_import.add_submodule_import( + "azure.core.tracing.decorator_async", + "distributed_trace_async", + ImportType.SDKCORE, + ) + if self.has_response_body and not self.has_optional_return_type and not self.code_model.options["models_mode"]: + file_import.add_submodule_import("typing", "cast", ImportType.STDLIB) + + return file_import + + +def get_operation(yaml_data: Dict[str, Any], code_model: "CodeModel", client: "Client") -> "OperationType": + if yaml_data["discriminator"] == "lropaging": + from .lro_paging_operation import LROPagingOperation as OperationCls + elif yaml_data["discriminator"] == "lro": + from .lro_operation import LROOperation as OperationCls # type: ignore + elif yaml_data["discriminator"] == "paging": + from .paging_operation import PagingOperation as OperationCls # type: ignore + else: + from . import Operation as OperationCls # type: ignore + return OperationCls.from_yaml(yaml_data, code_model, client) diff --git a/packages/http-client-python/generator/pygen/codegen/models/operation_group.py b/packages/http-client-python/generator/pygen/codegen/models/operation_group.py new file mode 100644 index 0000000000..a6339a86bf --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/operation_group.py @@ -0,0 +1,184 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Dict, List, Any, TYPE_CHECKING + +from .utils import OrderedSet + +from .base import BaseModel +from .operation import get_operation +from .imports import FileImport, ImportType, TypingSection +from .utils import add_to_pylint_disable +from .lro_operation import LROOperation +from .lro_paging_operation import LROPagingOperation +from ...utils import NAME_LENGTH_LIMIT + +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + from . import OperationType + + +class OperationGroup(BaseModel): + """Represent an operation group.""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + operations: List["OperationType"], + api_versions: List[str], + ) -> None: + super().__init__(yaml_data, code_model) + self.client = client + self.class_name: str = yaml_data["className"] + self.identify_name: str = yaml_data["identifyName"] + self.property_name: str = yaml_data["propertyName"] + self.operations = operations + self.api_versions = api_versions + self.operation_groups: List[OperationGroup] = [] + if self.code_model.options["show_operations"]: + self.operation_groups = [ + OperationGroup.from_yaml(op_group, code_model, client) + for op_group in self.yaml_data.get("operationGroups", []) + ] + self.link_lro_initial_operations() + + @property + def has_abstract_operations(self) -> bool: + return any(o for o in self.operations if o.abstract) or any( + operation_group.has_abstract_operations for operation_group in self.operation_groups + ) + + @property + def has_non_abstract_operations(self) -> bool: + return any(o for o in self.operations if not o.abstract) or any( + operation_group.has_non_abstract_operations for operation_group in self.operation_groups + ) + + @property + def base_class(self) -> str: + base_classes: List[str] = [] + if self.is_mixin: + base_classes.append(f"{self.client.name}MixinABC") + return ", ".join(base_classes) + + def imports_for_multiapi(self, async_mode: bool) -> FileImport: + file_import = FileImport(self.code_model) + relative_path = ".." if async_mode else "." + for operation in self.operations: + file_import.merge(operation.imports_for_multiapi(async_mode, relative_path=relative_path)) + if (self.code_model.model_types or self.code_model.enums) and self.code_model.options[ + "models_mode" + ] == "msrest": + file_import.add_submodule_import(relative_path, "models", ImportType.LOCAL, alias="_models") + return file_import + + def pylint_disable(self) -> str: + retval: str = "" + if self.has_abstract_operations: + retval = add_to_pylint_disable(retval, "abstract-class-instantiated") + if len(self.operations) > 20: + retval = add_to_pylint_disable(retval, "too-many-public-methods") + if len(self.class_name) > NAME_LENGTH_LIMIT: + retval = add_to_pylint_disable(retval, "name-too-long") + if len(self.operation_groups) > 6: + retval = add_to_pylint_disable(retval, "too-many-instance-attributes") + return retval + + @property + def need_validation(self) -> bool: + """Whether any of its operations need validation""" + return any(o for o in self.operations if o.need_validation) + + def imports(self, async_mode: bool) -> FileImport: + file_import = FileImport(self.code_model) + + relative_path = ("..." if async_mode else "..") + ("." if self.client.is_subclient else "") + for operation in self.operations: + file_import.merge(operation.imports(async_mode, relative_path=relative_path)) + if not self.code_model.options["combine_operation_files"]: + for og in self.operation_groups: + file_import.add_submodule_import( + ".", + og.class_name, + ImportType.LOCAL, + ) + # for multiapi + if ( + (self.code_model.public_model_types) + and self.code_model.options["models_mode"] == "msrest" + and not self.is_mixin + ): + file_import.add_submodule_import(relative_path, "models", ImportType.LOCAL, alias="_models") + if self.is_mixin: + file_import.add_submodule_import(".._vendor", f"{self.client.name}MixinABC", ImportType.LOCAL) + if self.has_abstract_operations: + file_import.add_submodule_import(".._vendor", "raise_if_not_implemented", ImportType.LOCAL) + if all(o.abstract for o in self.operations): + return file_import + file_import.add_submodule_import("typing", "TypeVar", ImportType.STDLIB, TypingSection.CONDITIONAL) + file_import.define_mypy_type("T", "TypeVar('T')") + type_value = "Optional[Callable[[PipelineResponse[HttpRequest, {}HttpResponse], T, Dict[str, Any]], Any]]" + file_import.define_mypy_type("ClsType", type_value.format(""), type_value.format("Async")) + return file_import + + @property + def filename(self) -> str: + return self.operations[0].filename + + @property + def is_mixin(self) -> bool: + """The operation group with no name is the direct client methods.""" + return self.identify_name == "" + + def link_lro_initial_operations(self) -> None: + """Link each LRO operation to its initial operation""" + for operation_group in self.operation_groups: + for operation in operation_group.operations: + if isinstance(operation, (LROOperation, LROPagingOperation)): + operation.initial_operation = self.lookup_operation(id(operation.yaml_data["initialOperation"])) + + def lookup_operation(self, operation_id: int) -> "OperationType": + try: + return next(o for og in self.operation_groups for o in og.operations if id(o.yaml_data) == operation_id) + except StopIteration as exc: + raise KeyError(f"No operation with id {operation_id} found.") from exc + + @property + def lro_operations(self) -> List["OperationType"]: + return [operation for operation in self.operations if operation.operation_type in ("lro", "lropaging")] + [ + operation for operation_group in self.operation_groups for operation in operation_group.lro_operations + ] + + @property + def has_operations(self) -> bool: + return any(operation_group.has_operations for operation_group in self.operation_groups) or bool(self.operations) + + @property + def has_form_data_body(self) -> bool: + operations = self.operations + [o for og in self.operation_groups for o in og.operations] + return any(operation.has_form_data_body for operation in operations) + + @classmethod + def from_yaml( + cls, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + ) -> "OperationGroup": + operations = [get_operation(o, code_model, client) for o in yaml_data["operations"]] + api_versions: OrderedSet[str] = {} + for operation in operations: + for api_version in operation.api_versions: + api_versions[api_version] = None + return cls( + yaml_data=yaml_data, + code_model=code_model, + client=client, + operations=operations, + api_versions=list(api_versions.keys()), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/models/paging_operation.py b/packages/http-client-python/generator/pygen/codegen/models/paging_operation.py new file mode 100644 index 0000000000..dd5ee82640 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/paging_operation.py @@ -0,0 +1,155 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Dict, List, Any, Optional, Union, TYPE_CHECKING, cast, TypeVar + +from .operation import Operation, OperationBase +from .response import PagingResponse, LROPagingResponse, Response +from .request_builder import ( + OverloadedRequestBuilder, + RequestBuilder, + get_request_builder, +) +from .imports import ImportType, FileImport, TypingSection +from .parameter_list import ParameterList +from .model_type import ModelType +from .list_type import ListType + +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + +PagingResponseType = TypeVar("PagingResponseType", bound=Union[PagingResponse, LROPagingResponse]) + + +class PagingOperationBase(OperationBase[PagingResponseType]): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + name: str, + request_builder: RequestBuilder, + parameters: ParameterList, + responses: List[PagingResponseType], + exceptions: List[Response], + *, + overloads: Optional[List[Operation]] = None, + override_success_response_to_200: bool = False, + ) -> None: + super().__init__( + code_model=code_model, + client=client, + yaml_data=yaml_data, + name=name, + request_builder=request_builder, + parameters=parameters, + responses=responses, + exceptions=exceptions, + overloads=overloads, + ) + self.next_request_builder: Optional[Union[RequestBuilder, OverloadedRequestBuilder]] = ( + get_request_builder(self.yaml_data["nextOperation"], code_model, client) + if self.yaml_data.get("nextOperation") + else None + ) + self.override_success_response_to_200 = override_success_response_to_200 + self.pager_sync: str = yaml_data.get("pagerSync") or f"{self.code_model.core_library}.paging.ItemPaged" + self.pager_async: str = yaml_data.get("pagerAsync") or f"{self.code_model.core_library}.paging.AsyncItemPaged" + + def _get_attr_name(self, wire_name: str) -> str: + response_type = self.responses[0].type + if not response_type: + raise ValueError(f"Can't find a matching property in response for {wire_name}") + if response_type.type == "list": + response_type = cast(ListType, response_type).element_type + try: + return next(p.client_name for p in cast(ModelType, response_type).properties if p.wire_name == wire_name) + except StopIteration as exc: + raise ValueError(f"Can't find a matching property in response for {wire_name}") from exc + + def get_pager(self, async_mode: bool) -> str: + return self.responses[0].get_pager(async_mode) + + @property + def continuation_token_name(self) -> Optional[str]: + wire_name = self.yaml_data.get("continuationTokenName") + if not wire_name: + # That's an ok scenario, it just means no next page possible + return None + if self.code_model.options["models_mode"] == "msrest": + return self._get_attr_name(wire_name) + return wire_name + + @property + def item_name(self) -> str: + wire_name = self.yaml_data["itemName"] + if self.code_model.options["models_mode"] == "msrest": + # we don't use the paging model for dpg + return self._get_attr_name(wire_name) + return wire_name + + @property + def item_type(self) -> ModelType: + try: + item_type_yaml = self.yaml_data["itemType"] + except KeyError as e: + raise ValueError("Only call this for DPG paging model deserialization") from e + return cast(ModelType, self.code_model.types_map[id(item_type_yaml)]) + + @property + def operation_type(self) -> str: + return "paging" + + def cls_type_annotation(self, *, async_mode: bool) -> str: + return f"ClsType[{Response.type_annotation(self.responses[0], async_mode=async_mode)}]" + + def _imports_shared(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = super()._imports_shared(async_mode, **kwargs) + if async_mode: + file_import.add_submodule_import("typing", "AsyncIterable", ImportType.STDLIB, TypingSection.CONDITIONAL) + else: + file_import.add_submodule_import("typing", "Iterable", ImportType.STDLIB, TypingSection.CONDITIONAL) + if ( + self.next_request_builder + and self.code_model.options["builders_visibility"] == "embedded" + and not async_mode + ): + file_import.merge(self.next_request_builder.imports()) + return file_import + + @property + def has_optional_return_type(self) -> bool: + return False + + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + if self.abstract: + return FileImport(self.code_model) + file_import = self._imports_shared(async_mode, **kwargs) + file_import.merge(super().imports(async_mode, **kwargs)) + if self.code_model.options["tracing"] and self.want_tracing: + file_import.add_submodule_import( + "azure.core.tracing.decorator", + "distributed_trace", + ImportType.SDKCORE, + ) + if self.next_request_builder: + file_import.merge(self.get_request_builder_import(self.next_request_builder, async_mode)) + elif any(p.is_api_version for p in self.client.parameters): + file_import.add_import("urllib.parse", ImportType.STDLIB) + file_import.add_submodule_import( + "utils", + "case_insensitive_dict", + ImportType.SDKCORE, + ) + if self.code_model.options["models_mode"] == "dpg": + relative_path = "..." if async_mode else ".." + file_import.merge(self.item_type.imports(**kwargs)) + if self.default_error_deserialization or any(r.type for r in self.responses): + file_import.add_submodule_import(f"{relative_path}_model_base", "_deserialize", ImportType.LOCAL) + return file_import + + +class PagingOperation(PagingOperationBase[PagingResponse]): ... diff --git a/packages/http-client-python/generator/pygen/codegen/models/parameter.py b/packages/http-client-python/generator/pygen/codegen/models/parameter.py new file mode 100644 index 0000000000..7703c9f2e7 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/parameter.py @@ -0,0 +1,412 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import abc +from enum import Enum + +from typing import ( + Dict, + Any, + TYPE_CHECKING, + List, + Optional, + TypeVar, + Union, +) + +from .imports import FileImport, ImportType, TypingSection +from .base import BaseModel +from .base import BaseType +from .constant_type import ConstantType +from .utils import add_to_description +from .combined_type import CombinedType +from .model_type import JSONModelType + +if TYPE_CHECKING: + from .code_model import CodeModel + from .request_builder_parameter import RequestBuilderBodyParameter + + +class ParameterLocation(str, Enum): + HEADER = "header" + PATH = "path" + ENDPOINT_PATH = "endpointPath" + QUERY = "query" + BODY = "body" + OTHER = "other" + + +class ParameterMethodLocation(str, Enum): + POSITIONAL = "positional" + KEYWORD_ONLY = "keywordOnly" + KWARG = "kwarg" + + +class ParameterDelimeter(str, Enum): + SPACE = "space" + PIPE = "pipe" + TAB = "tab" + COMMA = "comma" + + +class _ParameterBase(BaseModel, abc.ABC): # pylint: disable=too-many-instance-attributes + """Base class for all parameters""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + type: BaseType, + ) -> None: + super().__init__(yaml_data, code_model) + self.wire_name: str = yaml_data.get("wireName", "") + self.client_name: str = self.yaml_data["clientName"] + self.optional: bool = self.yaml_data["optional"] + self.implementation: str = yaml_data.get("implementation", None) + self.location: ParameterLocation = self.yaml_data["location"] + self.client_default_value = self.yaml_data.get("clientDefaultValue", None) + self.in_docstring = self.yaml_data.get("inDocstring", True) + self.type = type + if self.client_default_value is None: + self.client_default_value = self.type.client_default_value + # name of grouper if it is grouped by another parameter + self.grouped_by: Optional[str] = self.yaml_data.get("groupedBy") + # property matching property name to parameter name for grouping params + # and flattened body params + self.property_to_parameter_name: Optional[Dict[str, str]] = self.yaml_data.get("propertyToParameterName") + self.flattened: bool = self.yaml_data.get("flattened", False) + self.in_flattened_body: bool = self.yaml_data.get("inFlattenedBody", False) + self.grouper: bool = self.yaml_data.get("grouper", False) + self.check_client_input: bool = self.yaml_data.get("checkClientInput", False) + self.added_on: Optional[str] = self.yaml_data.get("addedOn") + self.is_api_version: bool = self.yaml_data.get("isApiVersion", False) + self.in_overload: bool = self.yaml_data.get("inOverload", False) + self.default_to_unset_sentinel: bool = self.yaml_data.get("defaultToUnsetSentinel", False) + self.hide_in_method: bool = self.yaml_data.get("hideInMethod", False) + + def get_declaration(self, value: Any = None) -> Any: + return self.type.get_declaration(value) + + @property + def hide_in_operation_signature(self) -> bool: + return False + + @property + def constant(self) -> bool: + """Returns whether a parameter is a constant or not. + Checking to see if it's required, because if not, we don't consider it + a constant because it can have a value of None. + """ + return (not self.optional or self.is_api_version) and isinstance(self.type, ConstantType) + + @property + def description(self) -> str: + base_description = self.yaml_data["description"] + type_description = self.type.description(is_operation_file=True) + if type_description: + base_description = add_to_description(base_description, type_description) + if self.optional and isinstance(self.type, ConstantType): + base_description = add_to_description( + base_description, + f"Known values are {self.get_declaration()} and None.", + ) + if not (self.optional or self.client_default_value): + base_description = add_to_description(base_description, "Required.") + if self.client_default_value is not None: + base_description = add_to_description( + base_description, + f"Default value is {self.client_default_value_declaration}.", + ) + if self.optional and self.client_default_value is None: + base_description = add_to_description( + base_description, + f"Default value is {self.client_default_value_declaration}.", + ) + if self.constant: + base_description = add_to_description( + base_description, + "Note that overriding this default value may result in unsupported behavior.", + ) + return base_description + + @property + def client_default_value_declaration(self): + """Declaration of parameter's client default value""" + if self.client_default_value is None: + return None + return self.get_declaration(self.client_default_value) + + def type_annotation(self, **kwargs: Any) -> str: + kwargs["is_operation_file"] = True + # special logic for api-version parameter + if self.is_api_version: + type_annotation = "str" + else: + type_annotation = self.type.type_annotation(**kwargs) + if self.optional and self.client_default_value is None: + return f"Optional[{type_annotation}]" + return type_annotation + + def docstring_text(self, **kwargs: Any) -> str: + return self.type.docstring_text(**kwargs) + + def docstring_type(self, **kwargs: Any) -> str: + return self.type.docstring_type(**kwargs) + + @property + def serialization_type(self) -> str: + return self.type.serialization_type + + def _imports_shared(self, async_mode: bool, **_: Any) -> FileImport: + file_import = FileImport(self.code_model) + if self.optional and self.client_default_value is None: + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB) + if self.added_on and self.implementation != "Client": + file_import.add_submodule_import( + f"{'.' if async_mode else ''}.._validation", + "api_version_validation", + ImportType.LOCAL, + ) + if isinstance(self.type, CombinedType) and self.type.name: + file_import.add_submodule_import( + "..." if async_mode else "..", + "_types", + ImportType.LOCAL, + TypingSection.TYPING, + ) + return file_import + + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = self._imports_shared(async_mode, **kwargs) + # special logic for api-version parameter + if not self.is_api_version: + file_import.merge(self.type.imports(async_mode=async_mode, **kwargs)) + if self.default_to_unset_sentinel: + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB) + file_import.define_mypy_type( + "_Unset: Any", + "object()", + ) + return file_import + + def imports_for_multiapi(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = self._imports_shared(async_mode, **kwargs) + file_import.merge(self.type.imports_for_multiapi(async_mode=async_mode, **kwargs)) + return file_import + + @property + def method_location(self) -> ParameterMethodLocation: + raise NotImplementedError("Please implement in children") + + @property + def description_keyword(self) -> str: + return "param" if self.method_location == ParameterMethodLocation.POSITIONAL else "keyword" + + @property + def docstring_type_keyword(self) -> str: + return "type" if self.method_location == ParameterMethodLocation.POSITIONAL else "paramtype" + + @property + @abc.abstractmethod + def in_method_signature(self) -> bool: ... + + def method_signature(self, async_mode: bool) -> str: + type_annotation = self.type_annotation(async_mode=async_mode) + if self.client_default_value is not None or self.optional: + return f"{self.client_name}: {type_annotation} = {self.client_default_value_declaration}," + if self.default_to_unset_sentinel: + return f"{self.client_name}: {type_annotation} = _Unset," + return f"{self.client_name}: {type_annotation}," + + +class BodyParameter(_ParameterBase): + """Body parameter.""" + + @property + def entries(self) -> List["BodyParameter"]: + return [BodyParameter.from_yaml(e, self.code_model) for e in self.yaml_data.get("entries", [])] + + @property + def is_form_data(self) -> bool: + # hacky, but rn in legacy, there is no formdata model type, it's just a dict + # with all of the entries splatted out + return ( + self.type.is_form_data + or bool(self.entries) + or ("multipart/form-data" in self.content_types and self.code_model.options["from_typespec"]) + ) + + @property + def is_partial_body(self) -> bool: + """Whether it's part of a bigger body parameter, i.e. a MultipartBodyParameter""" + return self.yaml_data.get("isPartialBody", False) + + @property + def method_location(self) -> ParameterMethodLocation: + return ParameterMethodLocation.KWARG if self.constant else ParameterMethodLocation.POSITIONAL + + @property + def in_method_signature(self) -> bool: + if self.yaml_data.get("entries"): + # Right now, only legacy generates with multipart bodies and entries + # and legacy generates with the multipart body arguments splatted out + return False + return not (self.flattened or self.grouped_by) + + @property + def content_types(self) -> List[str]: + return self.yaml_data["contentTypes"] + + @property + def default_content_type(self) -> str: + return self.yaml_data["defaultContentType"] + + @property + def has_json_model_type(self) -> bool: + if isinstance(self.type, CombinedType): + return self.type.target_model_subtype((JSONModelType,)) is not None + return isinstance(self.type, JSONModelType) + + def imports(self, async_mode: bool, **kwargs: Any) -> FileImport: + file_import = super().imports(async_mode, **kwargs) + if self.is_form_data: + relative_path = "..." if async_mode else ".." + file_import.add_submodule_import( + f"{relative_path}_vendor", + "prepare_multipart_form_data", + ImportType.LOCAL, + ) + file_import.add_submodule_import("typing", "List", ImportType.STDLIB) + return file_import + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "BodyParameter": + return cls( + yaml_data=yaml_data, + code_model=code_model, + type=code_model.lookup_type(id(yaml_data["type"])), + ) + + +EntryBodyParameterType = TypeVar("EntryBodyParameterType", bound=Union[BodyParameter, "RequestBuilderBodyParameter"]) + + +class Parameter(_ParameterBase): + """Basic Parameter class""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + type: BaseType, + ) -> None: + super().__init__(yaml_data, code_model, type=type) + + self.skip_url_encoding: bool = self.yaml_data.get("skipUrlEncoding", False) + self.explode: bool = self.yaml_data.get("explode", False) + self.in_overridden: bool = self.yaml_data.get("inOverridden", False) + self.delimiter: Optional[ParameterDelimeter] = self.yaml_data.get("delimiter") + self._default_to_unset_sentinel: bool = False + + @property + def hide_in_operation_signature(self) -> bool: + if self.code_model.options["version_tolerant"] and self.client_name == "maxpagesize": + return True + return False + + @property + def in_method_signature(self) -> bool: + return not (self.wire_name == "Accept" or self.grouped_by or self.flattened) + + @property + def full_client_name(self) -> str: + if self.implementation == "Client": + return f"self._config.{self.client_name}" + return self.client_name + + @property + def xml_serialization_ctxt(self) -> str: + return self.type.xml_serialization_ctxt or "" + + @property + def is_content_type(self) -> bool: + return bool(self.wire_name) and self.wire_name.lower() == "content-type" + + @property + def method_location( # pylint: disable=too-many-return-statements + self, + ) -> ParameterMethodLocation: + if not self.in_method_signature: + raise ValueError(f"Parameter '{self.client_name}' is not in the method.") + if self.code_model.options["models_mode"] == "dpg" and self.in_flattened_body: + return ParameterMethodLocation.KEYWORD_ONLY + if self.grouper: + return ParameterMethodLocation.POSITIONAL + if self.constant and self.wire_name != "Content-Type": + return ParameterMethodLocation.KWARG + if self.is_content_type: + if self.in_overload: + return ParameterMethodLocation.KEYWORD_ONLY + return ParameterMethodLocation.KWARG + query_or_header = self.location in ( + ParameterLocation.HEADER, + ParameterLocation.QUERY, + ) + if self.code_model.options["only_path_and_body_params_positional"] and query_or_header: + return ParameterMethodLocation.KEYWORD_ONLY + return ParameterMethodLocation.POSITIONAL + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel"): + return cls( + yaml_data=yaml_data, + code_model=code_model, + type=code_model.lookup_type(id(yaml_data["type"])), + ) + + +class ClientParameter(Parameter): + """Client parameter""" + + @property + def is_host(self) -> bool: + return self.wire_name == "$host" + + @property + def method_location(self) -> ParameterMethodLocation: + if self.constant: + return ParameterMethodLocation.KWARG + if ( + self.is_host + and (self.code_model.options["version_tolerant"] or self.code_model.options["low_level_client"]) + and not self.code_model.options["azure_arm"] + ): + # this means i am the base url + return ParameterMethodLocation.KEYWORD_ONLY + if ( + self.client_default_value is not None + and self.code_model.options["from_typespec"] + and not self.code_model.options["azure_arm"] + ): + return ParameterMethodLocation.KEYWORD_ONLY + return ParameterMethodLocation.POSITIONAL + + +class ConfigParameter(Parameter): + """Config Parameter""" + + @property + def in_method_signature(self) -> bool: + return not self.is_host + + @property + def is_host(self) -> bool: + return self.wire_name == "$host" + + @property + def method_location(self) -> ParameterMethodLocation: + if self.constant: + return ParameterMethodLocation.KWARG + return ParameterMethodLocation.POSITIONAL diff --git a/packages/http-client-python/generator/pygen/codegen/models/parameter_list.py b/packages/http-client-python/generator/pygen/codegen/models/parameter_list.py new file mode 100644 index 0000000000..fe0210c301 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/parameter_list.py @@ -0,0 +1,387 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + TYPE_CHECKING, + Union, + Generic, + TypeVar, + cast, +) +from abc import abstractmethod +from collections.abc import MutableSequence +from enum import Enum + +from .request_builder_parameter import ( + RequestBuilderBodyParameter, + RequestBuilderParameter, +) +from .parameter import ( + ParameterLocation, + BodyParameter, + Parameter, + ParameterMethodLocation, + ClientParameter, + ConfigParameter, +) + +ParameterType = TypeVar("ParameterType", bound=Union[Parameter, RequestBuilderParameter]) +BodyParameterType = TypeVar("BodyParameterType", bound=Union[BodyParameter, RequestBuilderBodyParameter]) + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class ParameterImplementation(Enum): + METHOD = "method" + CLIENT = "client" + + +_LOGGER = logging.getLogger(__name__) + + +def method_signature_helper(positional: List[str], keyword_only: Optional[List[str]], kwarg_params: List[str]): + keyword_only = keyword_only or [] + return positional + keyword_only + kwarg_params + + +def _sort(params): + return sorted(params, key=lambda x: not (x.client_default_value or x.optional), reverse=True) + + +class _ParameterListBase( + MutableSequence, Generic[ParameterType, BodyParameterType] +): # pylint: disable=too-many-public-methods + """Base class for all of our different ParameterList classes""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + parameters: List[ParameterType], + body_parameter: Optional[BodyParameterType] = None, + ) -> None: + self.yaml_data = yaml_data + self.code_model = code_model + self.parameters = parameters or [] + self._body_parameter = body_parameter + + # MutableSequence + + def __getitem__(self, index): + if isinstance(index, str): + raise TypeError(f"{index} is invalid type") + return self.parameters[index] + + def __len__(self) -> int: + return len(self.parameters) + + def __setitem__(self, index, parameter): + self.parameters[index] = parameter + + def __delitem__(self, index): + del self.parameters[index] + + def insert(self, index: int, value: ParameterType) -> None: + self.parameters.insert(index, value) + + # Parameter helpers + + @staticmethod + @abstractmethod + def parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], ParameterType]: + """Callable for creating parameters""" + + @staticmethod + @abstractmethod + def body_parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], BodyParameterType]: + """Callable for creating body parameters""" + + @property + def grouped(self) -> List[Union[ParameterType, BodyParameterType]]: + """All parameters that are inside a parameter group""" + params: List[Union[ParameterType, BodyParameterType]] = [p for p in self.parameters if p.grouped_by] + if self.has_body and self.body_parameter.grouped_by: + params.append(self.body_parameter) + return params + + @property + def has_form_data_body(self): + return self.has_body and self.body_parameter.is_form_data + + @property + def has_body(self) -> bool: + """Whether there is a body parameter in the parameter list""" + return bool(self._body_parameter) + + @property + def path(self) -> List[ParameterType]: + """All path parameters""" + return [p for p in self.parameters if p.location in (ParameterLocation.PATH, ParameterLocation.ENDPOINT_PATH)] + + @property + def query(self) -> List[ParameterType]: + """All query parameters""" + return [p for p in self.parameters if p.location == ParameterLocation.QUERY] + + @property + def headers(self) -> List[ParameterType]: + """All header parameters""" + return [p for p in self.parameters if p.location == ParameterLocation.HEADER] + + @property + def constant(self) -> List[Union[ParameterType, BodyParameterType]]: + """All constant parameters""" + return [p for p in self.parameters if p.constant] + + @property + def positional(self) -> List[Union[ParameterType, BodyParameterType]]: + """All positional parameters""" + return _sort( + [p for p in self.unsorted_method_params if p.method_location == ParameterMethodLocation.POSITIONAL] + ) + + @property + def keyword_only(self) -> List[Union[ParameterType, BodyParameterType]]: + """All keyword only parameters""" + return _sort( + [p for p in self.unsorted_method_params if p.method_location == ParameterMethodLocation.KEYWORD_ONLY] + ) + + @property + def kwarg(self) -> List[Union[ParameterType, BodyParameterType]]: + """All kwargs""" + return _sort([p for p in self.unsorted_method_params if p.method_location == ParameterMethodLocation.KWARG]) + + @property + def body_parameter(self) -> BodyParameterType: + """The body parameter of the parameter list. Will only ever be at most one.""" + if not self._body_parameter: + raise ValueError("There is no body parameter") + return self._body_parameter + + @property + @abstractmethod + def implementation(self) -> str: + """Whether this is a client or a method parameter""" + + @property + def unsorted_method_params(self) -> List[Union[ParameterType, BodyParameterType]]: + """Method params before sorting""" + method_params: List[Union[ParameterType, BodyParameterType]] = [ + p + for p in self.parameters + if p.in_method_signature + and p.implementation == self.implementation + and (self.code_model.is_legacy or not p.hide_in_method) + ] + if self._body_parameter: + if self._body_parameter.in_method_signature: + method_params.append(self._body_parameter) + try: + # i am a multipart body parameter + # Only legacy generates operations with me, so I will follow the legacy rules + # I will splat out my entries as individual entries + method_params.extend(self._body_parameter.entries) # type: ignore + except AttributeError: + pass + return method_params + + @property + def method(self) -> List[Union[ParameterType, BodyParameterType]]: + """Sorted method params. First positional, then keyword only, then kwarg""" + return self.positional + self.keyword_only + self.kwarg + + def method_signature(self, async_mode: bool) -> List[str]: + """Method signature for this parameter list.""" + return method_signature_helper( + positional=self.method_signature_positional(async_mode), + keyword_only=self.method_signature_keyword_only(async_mode), + kwarg_params=self.method_signature_kwargs, + ) + + def method_signature_positional(self, async_mode: bool) -> List[str]: + """Signature for positional parameters""" + return [parameter.method_signature(async_mode) for parameter in self.positional] + + def method_signature_keyword_only(self, async_mode: bool) -> List[str]: + """Signature for keyword only parameters""" + result = [ + parameter.method_signature(async_mode) + for parameter in self.keyword_only + if not parameter.hide_in_operation_signature + ] + return ["*,"] + result if result else [] + + @property + def method_signature_kwargs(self) -> List[str]: + """Signature for kwargs""" + return ["**kwargs: Any"] + + @property + def kwargs_to_pop(self) -> List[Union[ParameterType, BodyParameterType]]: + """Method kwargs we want to pop""" + # don't want to pop bodies unless it's a constant + kwargs_to_pop = self.kwarg + return [k for k in kwargs_to_pop if k.location != ParameterLocation.BODY or k.constant] + + @property + def call(self) -> List[str]: + """How to pass in parameters to call the operation""" + retval = [p.client_name for p in self.method if p.method_location == ParameterMethodLocation.POSITIONAL] + retval.extend( + [ + f"{p.client_name}={p.client_name}" + for p in self.method + if p.method_location == ParameterMethodLocation.KEYWORD_ONLY + ] + ) + retval.append("**kwargs") + return retval + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel"): + parameters = [cls.parameter_creator()(parameter, code_model) for parameter in yaml_data["parameters"]] + body_parameter = None + if yaml_data.get("bodyParameter"): + body_parameter = cls.body_parameter_creator()(yaml_data["bodyParameter"], code_model) + return cls( + yaml_data, + code_model, + parameters=parameters, + body_parameter=body_parameter, + ) + + +class _ParameterList(_ParameterListBase[Parameter, BodyParameter]): + + @staticmethod + def parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], Parameter]: + return Parameter.from_yaml + + @staticmethod + def body_parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], BodyParameter]: + return BodyParameter.from_yaml + + @property + def implementation(self) -> str: + return "Method" + + @property + def path(self) -> List[Parameter]: + return [k for k in super().path if k.location == ParameterLocation.ENDPOINT_PATH] + + +class ParameterList(_ParameterList): + """ParameterList is the parameter list for Operation classes""" + + +class _RequestBuilderParameterList(_ParameterListBase[RequestBuilderParameter, RequestBuilderBodyParameter]): + """_RequestBuilderParameterList is base parameter list for RequestBuilder classes""" + + @staticmethod + def parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], RequestBuilderParameter]: + return RequestBuilderParameter.from_yaml + + @staticmethod + def body_parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], RequestBuilderBodyParameter]: + return RequestBuilderBodyParameter.from_yaml + + @property + def implementation(self) -> str: + return "Method" + + @property + def unsorted_method_params( + self, + ) -> List[Union[RequestBuilderParameter, RequestBuilderBodyParameter]]: + # don't have access to client params in request builder + retval = [ + p + for p in super().unsorted_method_params + if not (p.location == ParameterLocation.BODY and cast(RequestBuilderBodyParameter, p).is_partial_body) + ] + retval.extend([p for p in self.parameters if p.implementation == "Client" and p.in_method_signature]) + return retval + + @property + def path(self) -> List[RequestBuilderParameter]: + return [p for p in super().path if p.location != ParameterLocation.ENDPOINT_PATH] + + @property + def constant( + self, + ) -> List[Union[RequestBuilderParameter, RequestBuilderBodyParameter]]: + """All constant parameters""" + return [p for p in super().constant if p.location != ParameterLocation.ENDPOINT_PATH] + + +class RequestBuilderParameterList(_RequestBuilderParameterList): + """Parameter list for Request Builder""" + + +class OverloadedRequestBuilderParameterList(_RequestBuilderParameterList): + """Parameter list for OverloadedRequestBuilder""" + + +class _ClientGlobalParameterList(_ParameterListBase[ParameterType, BodyParameter]): # pylint: disable=abstract-method + """Base parameter list for client and config classes""" + + @staticmethod + def body_parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], BodyParameter]: + return BodyParameter.from_yaml + + @property + def implementation(self) -> str: + return "Client" + + @property + def credential(self) -> Optional[ParameterType]: + try: + return next(p for p in self.parameters if p.client_name == "credential") + except StopIteration: + return None + + @property + def path(self) -> List[ParameterType]: + return [p for p in super().path if p.location == ParameterLocation.ENDPOINT_PATH] + + +class ClientGlobalParameterList(_ClientGlobalParameterList[ClientParameter]): + """Parameter list for Client class""" + + @staticmethod + def parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], ClientParameter]: + return ClientParameter.from_yaml + + @property + def path(self) -> List[ClientParameter]: + return [p for p in super().path if not p.is_host] + + @property + def host(self) -> Optional[ClientParameter]: + """Get the host parameter""" + try: + return next(p for p in self.parameters if p.is_host) + except StopIteration: + return None + + +class ConfigGlobalParameterList(_ClientGlobalParameterList[ConfigParameter]): + """Parameter list for config""" + + @staticmethod + def parameter_creator() -> Callable[[Dict[str, Any], "CodeModel"], ConfigParameter]: + return ConfigParameter.from_yaml + + @property + def implementation(self) -> str: + return "Client" diff --git a/packages/http-client-python/generator/pygen/codegen/models/primitive_types.py b/packages/http-client-python/generator/pygen/codegen/models/primitive_types.py new file mode 100644 index 0000000000..bbb2bcf2af --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/primitive_types.py @@ -0,0 +1,659 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime +import decimal +from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING + +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class RawString(object): + def __init__(self, string: str) -> None: + self.string = string + + def __repr__(self) -> str: + return "r'{}'".format(self.string.replace("'", "\\'")) + + +class PrimitiveType(BaseType): + def description(self, *, is_operation_file: bool) -> str: + return "" + + def type_annotation(self, **kwargs: Any) -> str: + return self.docstring_type(**kwargs) + + def docstring_text(self, **kwargs: Any) -> str: + return self.docstring_type(**kwargs) + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + if self.client_default_value is not None: + client_default_value_declaration = client_default_value_declaration or self.get_declaration( + self.client_default_value + ) + return client_default_value_declaration or self.default_template_representation_declaration + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(self.docstring_type()) + + +class BooleanType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "bool" + + def docstring_type(self, **kwargs: Any) -> str: + return "bool" + + @property + def instance_check_template(self) -> str: + return "isinstance({}, bool)" + + +class BinaryType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.type = "IO" + + @property + def serialization_type(self) -> str: + return self.type + + def docstring_type(self, **kwargs: Any) -> str: + return f"{self.type}[bytes]" + + def type_annotation(self, **kwargs: Any) -> str: + return f"{self.type}[bytes]" + + def docstring_text(self, **kwargs: Any) -> str: + return f"{self.type}[bytes]" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(b"bytes") + + def imports(self, **kwargs: Any) -> FileImport: + from .combined_type import CombinedType + from .operation import OperationBase + + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "IO", ImportType.STDLIB) + operation = kwargs.get("operation") + if ( + isinstance(operation, OperationBase) + and operation.parameters.has_body + and isinstance(operation.parameters.body_parameter.type, CombinedType) + ): + file_import.add_submodule_import("io", "IOBase", ImportType.STDLIB) + return file_import + + @property + def instance_check_template(self) -> str: + return "isinstance({}, (IOBase, bytes))" + + +class BinaryIteratorType(PrimitiveType): + def _iterator_name(self, **kwargs: Any) -> str: + return "AsyncIterator" if kwargs.pop("async_mode") else "Iterator" + + @property + def serialization_type(self) -> str: + return "IO" + + def docstring_type(self, **kwargs: Any) -> str: + return f"{self._iterator_name(**kwargs)}[bytes]" + + def type_annotation(self, **kwargs: Any) -> str: + return f"{self._iterator_name(**kwargs)}[bytes]" + + def docstring_text(self, **kwargs: Any) -> str: + return f"{self._iterator_name(**kwargs)}[bytes]" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(b"bytes") + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", self._iterator_name(**kwargs), ImportType.STDLIB) + return file_import + + @property + def instance_check_template(self) -> str: + return "getattr({}, '__aiter__', None) is not None or getattr({}, '__iter__', None) is not None" + + +class AnyType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "object" + + def docstring_type(self, **kwargs: Any) -> str: + return "any" + + def type_annotation(self, **kwargs: Any) -> str: + return "Any" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration({}) + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, TypingSection.CONDITIONAL) + return file_import + + @property + def instance_check_template(self) -> str: + raise ValueError("Shouldn't do instance check on an anytype, it can be anything") + + +class AnyObjectType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "object" + + def docstring_type(self, **kwargs: Any) -> str: + return "JSON" + + def type_annotation(self, **kwargs: Any) -> str: + return "JSON" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration({}) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, MutableMapping)" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.define_mutable_mapping_type() + return file_import + + @property + def type_description(self) -> str: + return "JSON" + + +class NumberType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.precision: Optional[int] = yaml_data.get("precision") + self.multiple: Optional[int] = yaml_data.get("multipleOf") + self.maximum: Optional[int] = yaml_data.get("maximum") + self.minimum: Optional[int] = yaml_data.get("minimum") + self.exclusive_maximum: Optional[int] = yaml_data.get("exclusiveMaximum") + self.exclusive_minimum: Optional[int] = yaml_data.get("exclusiveMinimum") + + @property + def serialization_constraints(self) -> List[str]: + validation_constraints = [ + (f"maximum_ex={self.maximum}" if self.maximum is not None and self.exclusive_maximum else None), + (f"maximum={self.maximum}" if self.maximum is not None and not self.exclusive_maximum else None), + (f"minimum_ex={self.minimum}" if self.minimum is not None and self.exclusive_minimum else None), + (f"minimum={self.minimum}" if self.minimum is not None and not self.exclusive_minimum else None), + f"multiple={self.multiple}" if self.multiple else None, + ] + return [x for x in validation_constraints if x is not None] + + @property + def validation(self) -> Optional[Dict[str, Union[bool, int, str]]]: + validation: Dict[str, Union[bool, int, str]] = {} + if self.maximum is not None: + if self.exclusive_maximum: + validation["maximum_ex"] = self.maximum + else: + validation["maximum"] = self.maximum + if self.minimum is not None: + if self.exclusive_minimum: + validation["minimum_ex"] = self.minimum + else: + validation["minimum"] = self.minimum + if self.multiple: + validation["multiple"] = self.multiple + return validation or None + + @property + def default_template_representation_declaration(self) -> str: + default_value = 0 if self.docstring_type() == "int" else 0.0 + return self.get_declaration(default_value) + + +class IntegerType(NumberType): + + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + if yaml_data.get("encode") == "string": + self.encode = "str" + + @property + def serialization_type(self) -> str: + return "int" + + def docstring_type(self, **kwargs: Any) -> str: + return "int" + + def type_annotation(self, **kwargs: Any) -> str: + return "int" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(0) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, int)" + + +class FloatType(NumberType): + @property + def serialization_type(self) -> str: + return "float" + + def docstring_type(self, **kwargs: Any) -> str: + return "float" + + def type_annotation(self, **kwargs: Any) -> str: + return "float" + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(0.0) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, float)" + + +class DecimalType(NumberType): + @property + def serialization_type(self) -> str: + return "decimal" + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "decimal.Decimal" + + def docstring_text(self, **kwargs: Any) -> str: + return self.type_annotation() + + def get_declaration(self, value: decimal.Decimal) -> str: + return str(value) + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("decimal", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(decimal.Decimal("0.0")) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, decimal.Decimal)" + + +class StringType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.max_length: Optional[int] = yaml_data.get("maxLength") + self.min_length: Optional[int] = ( + yaml_data.get("minLength", 0) if yaml_data.get("maxLength") else yaml_data.get("minLength") + ) + self.pattern: Optional[str] = yaml_data.get("pattern") + + @property + def serialization_constraints(self) -> List[str]: + validation_constraints = [ + f"max_length={self.max_length}" if self.max_length is not None else None, + f"min_length={self.min_length}" if self.min_length is not None else None, + f"pattern={RawString(self.pattern)}" if self.pattern else None, + ] + return [x for x in validation_constraints if x is not None] + + @property + def validation(self) -> Optional[Dict[str, Union[bool, int, str]]]: + validation: Dict[str, Union[bool, int, str]] = {} + if self.max_length is not None: + validation["max_length"] = self.max_length + if self.min_length is not None: + validation["min_length"] = self.min_length + if self.pattern: + # https://github.com/Azure/autorest.python/issues/407 + validation["pattern"] = RawString(self.pattern) # type: ignore + return validation or None + + def get_declaration(self, value) -> str: + return f'"{value}"' + + @property + def serialization_type(self) -> str: + return "str" + + def docstring_type(self, **kwargs: Any) -> str: + return "str" + + @property + def instance_check_template(self) -> str: + return "isinstance({}, str)" + + +class DatetimeType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.encode = ( + "rfc3339" + if yaml_data.get("encode", "date-time") == "date-time" or yaml_data.get("encode", "date-time") == "rfc3339" + else "rfc7231" + ) + + @property + def serialization_type(self) -> str: + formats_to_attribute_type = { + "rfc3339": "iso-8601", + "rfc7231": "rfc-1123", + } + return formats_to_attribute_type[self.encode] + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "datetime.datetime" + + def docstring_text(self, **kwargs: Any) -> str: + return "datetime" + + def get_declaration(self, value: datetime.datetime) -> str: + """Could be discussed, since technically I should return a datetime object, + but msrest will do fine. + """ + return f'"{value}"' + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self): + return self.get_declaration(datetime.datetime(2020, 2, 20)) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, datetime.datetime)" + + def imports_for_sample(self) -> FileImport: + file_import = super().imports_for_sample() + file_import.add_import("isodate", ImportType.STDLIB) + return file_import + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return f"isodate.parse_datetime({repr(value)})" + + +class TimeType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "time" + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "datetime.time" + + def docstring_text(self, **kwargs: Any) -> str: + return "time" + + def get_declaration(self, value: datetime.time) -> str: + """Could be discussed, since technically I should return a time object, + but msrest will do fine. + """ + return f'"{value}"' + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(datetime.time(12, 30, 0)) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, datetime.time)" + + def imports_for_sample(self) -> FileImport: + file_import = super().imports_for_sample() + file_import.add_import("isodate", ImportType.STDLIB) + return file_import + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return f"isodate.parse_time({repr(value)})" + + +class UnixTimeType(PrimitiveType): + @property + def encode(self) -> str: + return "unix-timestamp" + + @property + def serialization_type(self) -> str: + return "unix-time" + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "datetime.datetime" + + def docstring_text(self, **kwargs: Any) -> str: + return "datetime" + + def get_declaration(self, value: datetime.datetime) -> str: + """Could be discussed, since technically I should return a datetime object, + but msrest will do fine. + """ + return f'"{value}"' + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(datetime.datetime(2020, 2, 20)) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, datetime.time)" + + def imports_for_sample(self) -> FileImport: + file_import = super().imports_for_sample() + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return f"datetime.datetime.fromtimestamp({repr(value)}, datetime.timezone.utc)" + + +class DateType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "date" + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "datetime.date" + + def docstring_text(self, **kwargs: Any) -> str: + return "date" + + def get_declaration(self, value: datetime.date) -> str: + """Could be discussed, since technically I should return a datetime object, + but msrest will do fine. + """ + return f'"{value}"' + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(datetime.date(2020, 2, 20)) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, datetime.date)" + + def imports_for_sample(self) -> FileImport: + file_import = super().imports_for_sample() + file_import.add_import("isodate", ImportType.STDLIB) + return file_import + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return f"isodate.parse_date({repr(value)})" + + +class DurationType(PrimitiveType): + @property + def serialization_type(self) -> str: + return "duration" + + def docstring_type(self, **kwargs: Any) -> str: + return "~" + self.type_annotation() + + def type_annotation(self, **kwargs: Any) -> str: + return "datetime.timedelta" + + def docstring_text(self, **kwargs: Any) -> str: + return "timedelta" + + def get_declaration(self, value: datetime.timedelta) -> str: + """Could be discussed, since technically I should return a datetime object, + but msrest will do fine. + """ + return f'"{value}"' + + def imports(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_import("datetime", ImportType.STDLIB) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return self.get_declaration(datetime.timedelta(1)) + + @property + def instance_check_template(self) -> str: + return "isinstance({}, datetime.timedelta)" + + def imports_for_sample(self) -> FileImport: + file_import = super().imports_for_sample() + file_import.add_import("isodate", ImportType.STDLIB) + return file_import + + @staticmethod + def serialize_sample_value(value: Any) -> str: + return f"isodate.parse_duration({repr(value)})" + + +class ByteArraySchema(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.encode = yaml_data.get("encode", "base64") + + @property + def serialization_type(self) -> str: + if self.encode == "base64url": + return "base64" + return "bytearray" + + def docstring_type(self, **kwargs: Any) -> str: + return "bytes" + + def get_declaration(self, value: str) -> str: + return f'bytes("{value}", encoding="utf-8")' + + @property + def instance_check_template(self) -> str: + return "isinstance({}, bytes)" + + +class SdkCoreType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.name = yaml_data.get("name", "") + self.submodule = yaml_data.get("submodule", "") + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.code_model.core_library}.{self.type_annotation(**kwargs)}" + + def type_annotation(self, **kwargs: Any) -> str: + return self.name + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + file_import.add_submodule_import(self.submodule, self.name, ImportType.SDKCORE) + return file_import + + @property + def instance_check_template(self) -> str: + return f"isinstance({{}}, {self.name})" + + @property + def serialization_type(self) -> str: + return self.name + + +class MultiPartFileType(PrimitiveType): + def __init__(self, yaml_data: Dict[str, Any], code_model: "CodeModel") -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.name = "FileType" + + def type_annotation(self, **kwargs: Any) -> str: + return self.name + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.code_model.namespace}._vendor.{self.name}" + + def imports(self, **kwargs: Any) -> FileImport: + file_import = super().imports(**kwargs) + relative_path = "..." if kwargs.get("async_mode") else ".." + file_import.add_submodule_import(f"{relative_path}_vendor", self.name, ImportType.LOCAL) + return file_import + + @property + def default_template_representation_declaration(self) -> str: + return '"filetype"' if self.code_model.for_test else "filetype" + + @property + def instance_check_template(self) -> str: + return f"isinstance({{}}, {self.name})" diff --git a/packages/http-client-python/generator/pygen/codegen/models/property.py b/packages/http-client-python/generator/pygen/codegen/models/property.py new file mode 100644 index 0000000000..963437d94d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/property.py @@ -0,0 +1,170 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, Optional, TYPE_CHECKING, List, cast, Union + +from .base import BaseModel +from .constant_type import ConstantType +from .enum_type import EnumType +from .base import BaseType +from .imports import FileImport, ImportType +from .utils import add_to_description, add_to_pylint_disable + +if TYPE_CHECKING: + from .code_model import CodeModel + from .model_type import ModelType + + +class Property(BaseModel): # pylint: disable=too-many-instance-attributes + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + type: BaseType, + ) -> None: + super().__init__(yaml_data, code_model) + self.wire_name: str = self.yaml_data["wireName"] + self.client_name: str = self.yaml_data["clientName"] + self.type = type + self.optional: bool = self.yaml_data["optional"] + self.readonly: bool = self.yaml_data.get("readonly", False) + self.visibility: List[str] = self.yaml_data.get("visibility", []) + self.is_polymorphic: bool = self.yaml_data.get("isPolymorphic", False) + self.is_discriminator: bool = yaml_data.get("isDiscriminator", False) + self.client_default_value = yaml_data.get("clientDefaultValue", None) + if self.client_default_value is None: + self.client_default_value = self.type.client_default_value + self.flattened_names: List[str] = yaml_data.get("flattenedNames", []) + self.is_multipart_file_input: bool = yaml_data.get("isMultipartFileInput", False) + self.flatten = self.yaml_data.get("flatten", False) and not getattr(self.type, "flattened_property", False) + + def pylint_disable(self) -> str: + retval: str = "" + if self.yaml_data.get("pylintDisable"): + retval = add_to_pylint_disable(retval, self.yaml_data["pylintDisable"]) + return retval + + def description(self, *, is_operation_file: bool) -> str: + from .model_type import ModelType + + description = self.yaml_data.get("description", "") + if not (self.optional or self.client_default_value): + description = add_to_description(description, "Required.") + # don't want model type documentation as part of property doc + type_description = ( + "" if isinstance(self.type, ModelType) else self.type.description(is_operation_file=is_operation_file) + ) + return add_to_description(description, type_description) + + @property + def client_default_value_declaration(self) -> str: + if self.client_default_value is not None: + return self.get_declaration(self.client_default_value) + if self.type.client_default_value is not None: + return self.get_declaration(self.type.client_default_value) + return "None" + + @property + def constant(self) -> bool: + # this bool doesn't consider you to be constant if you are a discriminator + # you also have to be required to be considered a constant + return isinstance(self.type, ConstantType) and not self.optional and not self.is_discriminator + + @property + def is_input(self): + return not (self.constant or self.readonly or self.is_discriminator) + + @property + def serialization_type(self) -> str: + return self.type.serialization_type + + @property + def msrest_deserialization_key(self) -> str: + return self.type.msrest_deserialization_key + + @property + def is_enum_discriminator(self) -> bool: + return self.is_discriminator and self.type.type == "enum" + + @property + def is_base_discriminator(self) -> bool: + """If this discriminator is on the base model for polymorphic inheritance""" + if self.is_enum_discriminator: + return self.is_polymorphic and self.client_default_value is None + return self.is_discriminator and self.is_polymorphic and cast(ConstantType, self.type).value is None + + @property + def xml_metadata(self) -> Optional[Dict[str, Union[str, bool]]]: + return self.yaml_data.get("xmlMetadata") + + def type_annotation(self, *, is_operation_file: bool = False) -> str: + if self.is_base_discriminator: + return "str" + types_type_annotation = self.type.type_annotation(is_operation_file=is_operation_file) + if self.optional and self.client_default_value is None: + return f"Optional[{types_type_annotation}]" + return types_type_annotation + + def get_declaration(self, value: Any = None) -> Any: + return self.type.get_declaration(value) + + def get_json_template_representation( + self, + *, + client_default_value_declaration: Optional[str] = None, + ) -> Any: + if self.client_default_value: + client_default_value_declaration = self.get_declaration(self.client_default_value) + # make sure there is no \n otherwise the json template will be invalid + return self.type.get_json_template_representation( + client_default_value_declaration=client_default_value_declaration, + ) + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + from .model_type import ModelType + + if isinstance(self.type, ModelType): + self.type.get_polymorphic_subtypes(polymorphic_subtypes) + + @property + def validation(self) -> Optional[Dict[str, Any]]: + retval: Dict[str, Any] = {} + if not self.optional: + retval["required"] = True + if self.readonly: + retval["readonly"] = True + if self.constant: + retval["constant"] = True + retval.update(self.type.validation or {}) + return retval or None + + def imports(self, **kwargs) -> FileImport: + file_import = FileImport(self.code_model) + if self.is_discriminator and isinstance(self.type, EnumType): + return file_import + file_import.merge(self.type.imports(**kwargs, relative_path="..", model_typing=True)) + if self.optional and self.client_default_value is None: + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB) + if self.code_model.options["models_mode"] == "dpg": + file_import.add_submodule_import( + ".._model_base", + "rest_discriminator" if self.is_discriminator else "rest_field", + ImportType.LOCAL, + ) + return file_import + + @classmethod + def from_yaml( + cls, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + ) -> "Property": + from . import build_type # pylint: disable=import-outside-toplevel + + return cls( + yaml_data=yaml_data, + code_model=code_model, + type=build_type(yaml_data["type"], code_model), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/models/request_builder.py b/packages/http-client-python/generator/pygen/codegen/models/request_builder.py new file mode 100644 index 0000000000..81e00d18b4 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/request_builder.py @@ -0,0 +1,189 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import ( + Any, + Callable, + Dict, + List, + TypeVar, + TYPE_CHECKING, + Union, + Optional, +) +from abc import abstractmethod + +from .base_builder import BaseBuilder +from .utils import add_to_pylint_disable +from .parameter_list import ( + RequestBuilderParameterList, + OverloadedRequestBuilderParameterList, +) +from .imports import FileImport, ImportType, TypingSection, MsrestImportType +from ...utils import NAME_LENGTH_LIMIT + +if TYPE_CHECKING: + from .code_model import CodeModel + from .client import Client + +ParameterListType = TypeVar( + "ParameterListType", + bound=Union[RequestBuilderParameterList, OverloadedRequestBuilderParameterList], +) + + +class RequestBuilderBase(BaseBuilder[ParameterListType, List["RequestBuilder"]]): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + name: str, + parameters: ParameterListType, + *, + overloads: Optional[List["RequestBuilder"]] = None, + ) -> None: + super().__init__( + code_model=code_model, + client=client, + yaml_data=yaml_data, + name=name, + parameters=parameters, + overloads=overloads, + ) + self.overloads: List["RequestBuilder"] = overloads or [] + self.url: str = yaml_data["url"] + self.method: str = yaml_data["method"] + self.want_tracing = False + + @property + def has_form_data_body(self): + return self.parameters.has_form_data_body + + @property + def is_lro(self) -> bool: + return self.yaml_data.get("discriminator") in ("lro", "lropaging") + + def pylint_disable(self, async_mode: bool) -> str: + if len(self.name) > NAME_LENGTH_LIMIT: + return add_to_pylint_disable("", "name-too-long") + return "" + + def response_type_annotation(self, **kwargs) -> str: + return "HttpRequest" + + def response_docstring_text(self, **kwargs) -> str: + return ( + f"Returns an :class:`~{self.response_docstring_type()}` that you will pass to the client's " + + "`send_request` method. See https://aka.ms/azsdk/dpcodegen/python/send_request for how to " + + "incorporate this response into your code flow." + ) + + def response_docstring_type(self, **kwargs) -> str: + return f"~{self.code_model.core_library}.rest.HttpRequest" + + def imports(self) -> FileImport: + file_import = FileImport(self.code_model) + relative_path = ".." + if not self.code_model.options["builders_visibility"] == "embedded" and self.group_name: + relative_path = "..." if self.group_name else ".." + if self.abstract: + return file_import + for parameter in self.parameters.method: + file_import.merge(parameter.imports(async_mode=False, relative_path=relative_path, operation=self)) + + file_import.add_submodule_import( + "rest", + "HttpRequest", + ImportType.SDKCORE, + ) + + if self.parameters.headers or self.parameters.query: + file_import.add_submodule_import( + "utils", + "case_insensitive_dict", + ImportType.SDKCORE, + ) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB, typing_section=TypingSection.CONDITIONAL) + file_import.add_msrest_import( + relative_path=( + "..." + if (not self.code_model.options["builders_visibility"] == "embedded" and self.group_name) + else ".." + ), + msrest_import_type=MsrestImportType.Serializer, + typing_section=TypingSection.REGULAR, + ) + if self.overloads and self.code_model.options["builders_visibility"] != "embedded": + file_import.add_submodule_import("typing", "overload", ImportType.STDLIB) + return file_import + + @staticmethod + @abstractmethod + def parameter_list_type() -> Callable[[Dict[str, Any], "CodeModel"], ParameterListType]: ... + + @classmethod + def get_name( + cls, + name: str, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + ) -> str: + additional_mark = "" + if code_model.options["combine_operation_files"] and code_model.options["builders_visibility"] == "embedded": + additional_mark = yaml_data["groupName"] or client.yaml_data["builderPadName"] + names = [ + "build", + additional_mark, + name, + "request", + ] + return "_".join([n for n in names if n]) + + @classmethod + def from_yaml( + cls, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + client: "Client", + ): + # when combine embedded builders into one operation file, we need to avoid duplicated build function name. + # So add operation group name is effective method + + overloads = [ + RequestBuilder.from_yaml(rb_yaml_data, code_model, client) + for rb_yaml_data in yaml_data.get("overloads", []) + ] + parameter_list = cls.parameter_list_type()(yaml_data, code_model) + + return cls( + yaml_data=yaml_data, + code_model=code_model, + client=client, + name=cls.get_name(yaml_data["name"], yaml_data, code_model, client), + parameters=parameter_list, + overloads=overloads, + ) + + +class RequestBuilder(RequestBuilderBase[RequestBuilderParameterList]): + @staticmethod + def parameter_list_type() -> Callable[[Dict[str, Any], "CodeModel"], RequestBuilderParameterList]: + return RequestBuilderParameterList.from_yaml + + +class OverloadedRequestBuilder(RequestBuilderBase[OverloadedRequestBuilderParameterList]): + @staticmethod + def parameter_list_type() -> Callable[[Dict[str, Any], "CodeModel"], OverloadedRequestBuilderParameterList]: + return OverloadedRequestBuilderParameterList.from_yaml + + +def get_request_builder( + yaml_data: Dict[str, Any], code_model: "CodeModel", client: "Client" +) -> Union[RequestBuilder, OverloadedRequestBuilder]: + if yaml_data.get("overloads"): + return OverloadedRequestBuilder.from_yaml(yaml_data, code_model, client) + return RequestBuilder.from_yaml(yaml_data, code_model, client) diff --git a/packages/http-client-python/generator/pygen/codegen/models/request_builder_parameter.py b/packages/http-client-python/generator/pygen/codegen/models/request_builder_parameter.py new file mode 100644 index 0000000000..c471ff029d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/request_builder_parameter.py @@ -0,0 +1,115 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING, Any, Dict +from .parameter import ( + ParameterLocation, + ParameterMethodLocation, + Parameter, + BodyParameter, +) +from .base import BaseType +from .primitive_types import BinaryType, StringType +from .combined_type import CombinedType + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class RequestBuilderBodyParameter(BodyParameter): + """BOdy parmaeter for RequestBuilders""" + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + if ( + isinstance(self.type, (BinaryType, StringType)) + or any("xml" in ct for ct in self.content_types) + or self.code_model.options["models_mode"] == "dpg" + ): + self.client_name = "content" + else: + self.client_name = "json" + + def type_annotation(self, **kwargs: Any) -> str: + if self.type.is_xml: + return "Any" # xml type technically not in type signature for HttpRequest content param + return super().type_annotation(**kwargs) + + @property + def in_method_signature(self) -> bool: + return ( + super().in_method_signature and not self.is_partial_body and self.code_model.options["models_mode"] != "dpg" + ) + + @property + def method_location(self) -> ParameterMethodLocation: + return ( + ParameterMethodLocation.KWARG + if (self.constant or isinstance(self.type, CombinedType)) + else ParameterMethodLocation.KEYWORD_ONLY + ) + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "RequestBuilderBodyParameter": + return super().from_yaml(yaml_data, code_model) # type: ignore + + @property + def name_in_high_level_operation(self) -> str: + if self.client_name == "json": + return "_json" + return "_content" + + +class RequestBuilderParameter(Parameter): + """Basic RequestBuilder Parameter.""" + + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + type: BaseType, + ) -> None: + super().__init__(yaml_data, code_model, type) + # we don't want any default content type behavior in request builder + if self.is_content_type: + self.client_default_value = None + if self.grouped_by and self.client_name[0] == "_": + # we don't want hidden parameters for grouped by in request builders + self.client_name = self.client_name[1:] + + @property + def hide_in_operation_signature(self) -> bool: + return False + + @property + def in_method_signature(self) -> bool: + if self.grouped_by and not self.in_flattened_body: + return True + return super().in_method_signature and not ( + self.location == ParameterLocation.ENDPOINT_PATH or self.in_flattened_body or self.grouper + ) + + @property + def full_client_name(self) -> str: + return self.client_name + + @property + def method_location(self) -> ParameterMethodLocation: + super_method_location = super().method_location + if super_method_location == ParameterMethodLocation.KWARG: + return super_method_location + if self.in_overridden and super_method_location == ParameterMethodLocation.KEYWORD_ONLY: + return ParameterMethodLocation.KWARG + if self.location != ParameterLocation.PATH: + return ParameterMethodLocation.KEYWORD_ONLY + return super_method_location + + @property + def name_in_high_level_operation(self) -> str: + if self.grouped_by: + return f"_{self.client_name}" + if self.implementation == "Client": + return f"self._config.{self.client_name}" + return self.client_name diff --git a/packages/http-client-python/generator/pygen/codegen/models/response.py b/packages/http-client-python/generator/pygen/codegen/models/response.py new file mode 100644 index 0000000000..19a7d62e94 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/response.py @@ -0,0 +1,348 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Dict, Optional, List, Any, TYPE_CHECKING, Union + +from .base import BaseModel +from .base import BaseType +from .imports import FileImport, ImportType, TypingSection +from .primitive_types import BinaryType, BinaryIteratorType, ByteArraySchema +from .dictionary_type import DictionaryType +from .list_type import ListType +from .model_type import ModelType +from .combined_type import CombinedType + +if TYPE_CHECKING: + from .code_model import CodeModel + + +class ResponseHeader(BaseModel): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + type: BaseType, + ) -> None: + super().__init__(yaml_data, code_model) + self.wire_name: str = yaml_data["wireName"] + self.type = type + + @property + def serialization_type(self) -> str: + return self.type.serialization_type + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "ResponseHeader": + from . import build_type + + return cls( + yaml_data=yaml_data, + code_model=code_model, + type=build_type(yaml_data["type"], code_model), + ) + + +class Response(BaseModel): + def __init__( + self, + yaml_data: Dict[str, Any], + code_model: "CodeModel", + *, + headers: Optional[List[ResponseHeader]] = None, + type: Optional[BaseType] = None, + ) -> None: + super().__init__(yaml_data=yaml_data, code_model=code_model) + self.status_codes: List[Union[int, str]] = yaml_data["statusCodes"] + self.headers = headers or [] + self.type = type + self.nullable = yaml_data.get("nullable") + self.default_content_type = yaml_data.get("defaultContentType") + + @property + def result_property(self) -> str: + field = self.yaml_data.get("resultProperty") + if field: + return f'.get("{field}")' + return "" + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + if self.type: + self.type.get_polymorphic_subtypes(polymorphic_subtypes) + + def get_json_template_representation(self) -> Any: + if not self.type: + return None + if not isinstance(self.type, (DictionaryType, ListType, ModelType)): + return None + return self.type.get_json_template_representation() + + @property + def is_stream_response(self) -> bool: + """Is the response expected to be streamable, like a download.""" + retval = isinstance(self.type, BinaryIteratorType) or ( + isinstance(self.type, ByteArraySchema) + and bool(self.default_content_type) + and self.default_content_type != "application/json" + ) + return retval + + @property + def serialization_type(self) -> str: + if self.type: + return self.type.serialization_type + return "None" + + def type_annotation(self, **kwargs: Any) -> str: + if self.type: + kwargs["is_operation_file"] = True + type_annotation = self.type.type_annotation(**kwargs) + if self.nullable: + return f"Optional[{type_annotation}]" + return type_annotation + return "None" + + def docstring_text(self, **kwargs: Any) -> str: + if self.nullable and self.type: + return f"{self.type.docstring_text(**kwargs)} or None" + return self.type.docstring_text(**kwargs) if self.type else "None" + + def docstring_type(self, **kwargs: Any) -> str: + if self.nullable and self.type: + return f"{self.type.docstring_type(**kwargs)} or None" + return self.type.docstring_type(**kwargs) if self.type else "None" + + def _imports_shared(self, **kwargs: Any) -> FileImport: + file_import = FileImport(self.code_model) + if self.type: + file_import.merge(self.type.imports(**kwargs)) + if self.nullable: + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB) + if isinstance(self.type, CombinedType) and self.type.name: + async_mode = kwargs.get("async_mode", False) + file_import.add_submodule_import( + "..." if async_mode else "..", + "_types", + ImportType.LOCAL, + TypingSection.TYPING, + ) + return file_import + + def imports(self, **kwargs: Any) -> FileImport: + return self._imports_shared(**kwargs) + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + return self._imports_shared(**kwargs) + + def _get_import_type(self, input_path: str) -> ImportType: + # helper function to return imports for responses based off + # of whether we're importing from the core library, or users + # are customizing responses + return ImportType.SDKCORE if self.code_model.core_library.split(".")[0] in input_path else ImportType.THIRDPARTY + + @classmethod + def from_yaml(cls, yaml_data: Dict[str, Any], code_model: "CodeModel") -> "Response": + type = code_model.lookup_type(id(yaml_data["type"])) if yaml_data.get("type") else None + # use ByteIteratorType if we are returning a binary type + default_content_type = yaml_data.get("defaultContentType", "application/json") + if isinstance(type, BinaryType) or ( + isinstance(type, ByteArraySchema) and default_content_type != "application/json" + ): + type = BinaryIteratorType(type.yaml_data, type.code_model) + return cls( + yaml_data=yaml_data, + code_model=code_model, + headers=[ResponseHeader.from_yaml(header, code_model) for header in yaml_data["headers"]], + type=type, + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.status_codes}>" + + +class PagingResponse(Response): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.item_type = self.code_model.lookup_type(id(self.yaml_data["itemType"])) + self.pager_sync: str = self.yaml_data.get("pagerSync") or f"{self.code_model.core_library}.paging.ItemPaged" + default_paging_submodule = f"{'async_' if self.code_model.is_azure_flavor else ''}paging" + self.pager_async: str = ( + self.yaml_data.get("pagerAsync") + or f"{self.code_model.core_library}.{default_paging_submodule}.AsyncItemPaged" + ) + + def get_polymorphic_subtypes(self, polymorphic_subtypes: List["ModelType"]) -> None: + return self.item_type.get_polymorphic_subtypes(polymorphic_subtypes) + + def get_json_template_representation(self) -> Any: + return self.item_type.get_json_template_representation() + + def get_pager_import_path(self, async_mode: bool) -> str: + return ".".join(self.get_pager_path(async_mode).split(".")[:-1]) + + def get_pager_path(self, async_mode: bool) -> str: + return self.pager_async if async_mode else self.pager_sync + + def get_pager(self, async_mode: bool) -> str: + return self.get_pager_path(async_mode).split(".")[-1] + + def type_annotation(self, **kwargs: Any) -> str: + iterable = "AsyncIterable" if kwargs["async_mode"] else "Iterable" + return f"{iterable}[{self.item_type.type_annotation(**kwargs)}]" + + def docstring_text(self, **kwargs: Any) -> str: + base_description = "An iterator like instance of " + if not self.code_model.options["version_tolerant"]: + base_description += "either " + return base_description + self.item_type.docstring_text(**kwargs) + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.get_pager_path(kwargs['async_mode'])}[{self.item_type.docstring_type(**kwargs)}]" + + def _imports_shared(self, **kwargs: Any) -> FileImport: + file_import = super()._imports_shared(**kwargs) + async_mode = kwargs.get("async_mode", False) + pager = self.get_pager(async_mode) + pager_path = self.get_pager_import_path(async_mode) + + file_import.add_submodule_import(pager_path, pager, self._get_import_type(pager_path)) + return file_import + + def imports(self, **kwargs: Any) -> FileImport: + file_import = self._imports_shared(**kwargs) + async_mode = kwargs.get("async_mode") + if async_mode: + file_import.add_submodule_import( + f"{'async_' if self.code_model.is_azure_flavor else ''}paging", + "AsyncList", + ImportType.SDKCORE, + ) + + return file_import + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + return self._imports_shared(**kwargs) + + +class LROResponse(Response): + def get_poller_path(self, async_mode: bool) -> str: + return self.yaml_data["pollerAsync"] if async_mode else self.yaml_data["pollerSync"] + + def get_poller(self, async_mode: bool) -> str: + """Get the name of the poller. Default is LROPoller / AsyncLROPoller""" + return self.get_poller_path(async_mode).split(".")[-1] + + def get_polling_method_path(self, async_mode: bool) -> str: + """Get the full name of the poller path. Default are the azure core pollers""" + return self.yaml_data["pollingMethodAsync"] if async_mode else self.yaml_data["pollingMethodSync"] + + def get_polling_method(self, async_mode: bool) -> str: + """Get the default pollint method""" + return self.get_polling_method_path(async_mode).split(".")[-1] + + @staticmethod + def get_no_polling_method_path(async_mode: bool) -> str: + """Get the path of the default of no polling method""" + return f"azure.core.polling.{'Async' if async_mode else ''}NoPolling" + + def get_no_polling_method(self, async_mode: bool) -> str: + """Get the default no polling method""" + return self.get_no_polling_method_path(async_mode).split(".")[-1] + + @staticmethod + def get_base_polling_method_path(async_mode: bool) -> str: + """Get the base polling method path. Used in docstrings and type annotations.""" + return f"azure.core.polling.{'Async' if async_mode else ''}PollingMethod" + + def get_base_polling_method(self, async_mode: bool) -> str: + """Get the base polling method.""" + return self.get_base_polling_method_path(async_mode).split(".")[-1] + + def type_annotation(self, **kwargs: Any) -> str: + return f"{self.get_poller(kwargs.get('async_mode', False))}[{super().type_annotation(**kwargs)}]" + + def docstring_type(self, **kwargs: Any) -> str: + return f"~{self.get_poller_path(kwargs.get('async_mode', False))}[{super().docstring_type(**kwargs)}]" + + def docstring_text(self, **kwargs) -> str: + super_text = super().docstring_text(**kwargs) + base_description = f"An instance of {self.get_poller(kwargs.get('async_mode', False))} that returns " + if not self.code_model.options["version_tolerant"]: + base_description += "either " + return base_description + super_text + + def _imports_shared(self, **kwargs: Any) -> FileImport: + file_import = super()._imports_shared(**kwargs) + async_mode = kwargs["async_mode"] + poller_import_path = ".".join(self.get_poller_path(async_mode).split(".")[:-1]) + poller = self.get_poller(async_mode) + file_import.add_submodule_import(poller_import_path, poller, self._get_import_type(poller_import_path)) + return file_import + + def imports(self, **kwargs: Any) -> FileImport: + file_import = self._imports_shared(**kwargs) + async_mode = kwargs["async_mode"] + + default_polling_method_import_path = ".".join(self.get_polling_method_path(async_mode).split(".")[:-1]) + default_polling_method = self.get_polling_method(async_mode) + file_import.add_submodule_import( + default_polling_method_import_path, + default_polling_method, + self._get_import_type(default_polling_method_import_path), + ) + default_no_polling_method_import_path = ".".join(self.get_no_polling_method_path(async_mode).split(".")[:-1]) + default_no_polling_method = self.get_no_polling_method(async_mode) + file_import.add_submodule_import( + default_no_polling_method_import_path, + default_no_polling_method, + self._get_import_type(default_no_polling_method_import_path), + ) + + base_polling_method_import_path = ".".join(self.get_base_polling_method_path(async_mode).split(".")[:-1]) + base_polling_method = self.get_base_polling_method(async_mode) + file_import.add_submodule_import( + base_polling_method_import_path, + base_polling_method, + self._get_import_type(base_polling_method_import_path), + ) + return file_import + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + return self._imports_shared(**kwargs) + + +class LROPagingResponse(LROResponse, PagingResponse): + def type_annotation(self, **kwargs: Any) -> str: + paging_type_annotation = PagingResponse.type_annotation(self, **kwargs) + return f"{self.get_poller(kwargs.get('async_mode', False))}[{paging_type_annotation}]" + + def docstring_type(self, **kwargs: Any) -> str: + paging_docstring_type = PagingResponse.docstring_type(self, **kwargs) + return f"~{self.get_poller_path(kwargs.get('async_mode', False))}[{paging_docstring_type}]" + + def docstring_text(self, **kwargs) -> str: + base_description = "An instance of LROPoller that returns an iterator like instance of " + if not self.code_model.options["version_tolerant"]: + base_description += "either " + return base_description + Response.docstring_text(self) + + def imports_for_multiapi(self, **kwargs: Any) -> FileImport: + file_import = LROResponse.imports_for_multiapi(self, **kwargs) + file_import.merge(PagingResponse.imports_for_multiapi(self, **kwargs)) + return file_import + + def imports(self, **kwargs: Any) -> FileImport: + file_import = LROResponse.imports(self, **kwargs) + file_import.merge(PagingResponse.imports(self, **kwargs)) + return file_import + + +def get_response(yaml_data: Dict[str, Any], code_model: "CodeModel") -> Response: + if yaml_data["discriminator"] == "lropaging": + return LROPagingResponse.from_yaml(yaml_data, code_model) + if yaml_data["discriminator"] == "lro": + return LROResponse.from_yaml(yaml_data, code_model) + if yaml_data["discriminator"] == "paging": + return PagingResponse.from_yaml(yaml_data, code_model) + return Response.from_yaml(yaml_data, code_model) diff --git a/packages/http-client-python/generator/pygen/codegen/models/utils.py b/packages/http-client-python/generator/pygen/codegen/models/utils.py new file mode 100644 index 0000000000..e8472382e2 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/models/utils.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import TypeVar, Dict + +T = TypeVar("T") +OrderedSet = Dict[T, None] + + +def add_to_description(description: str, entry: str) -> str: + if description: + return f"{description} {entry}" + return entry + + +def add_to_pylint_disable(curr_str: str, entry: str) -> str: + if curr_str: + return f"{curr_str},{entry}" + return f" # pylint: disable={entry}" diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/__init__.py b/packages/http-client-python/generator/pygen/codegen/serializers/__init__.py new file mode 100644 index 0000000000..c6ad4cee49 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/__init__.py @@ -0,0 +1,574 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import List, Optional, Any, Union +from pathlib import Path +from jinja2 import PackageLoader, Environment, FileSystemLoader, StrictUndefined + +from ... import ReaderAndWriter +from ..models import ( + OperationGroup, + RequestBuilder, + OverloadedRequestBuilder, + CodeModel, + Client, +) +from .enum_serializer import EnumSerializer +from .general_serializer import GeneralSerializer +from .model_init_serializer import ModelInitSerializer +from .model_serializer import DpgModelSerializer, MsrestModelSerializer +from .operations_init_serializer import OperationsInitSerializer +from .operation_groups_serializer import OperationGroupsSerializer +from .metadata_serializer import MetadataSerializer +from .request_builders_serializer import RequestBuildersSerializer +from .patch_serializer import PatchSerializer +from .sample_serializer import SampleSerializer +from .test_serializer import TestSerializer, TestGeneralSerializer +from .types_serializer import TypesSerializer +from ...utils import to_snake_case +from .._utils import VALID_PACKAGE_MODE +from .utils import ( + extract_sample_name, + get_namespace_from_package_name, + get_namespace_config, + get_all_operation_groups_recursively, +) + +_LOGGER = logging.getLogger(__name__) + +__all__ = [ + "JinjaSerializer", +] + +_PACKAGE_FILES = [ + "CHANGELOG.md.jinja2", + "dev_requirements.txt.jinja2", + "LICENSE.jinja2", + "MANIFEST.in.jinja2", + "README.md.jinja2", + "setup.py.jinja2", +] + +_REGENERATE_FILES = {"setup.py", "MANIFEST.in"} + + +# extract sub folders. For example, source_file_path is like: +# "xxx/resource-manager/Microsoft.XX/stable/2023-04-01/examples/Compute/createOrUpdate/AKSCompute.json", +# and we want to extract the sub folders after "examples/", which is "compute/create_or_update" +def _sample_output_path(source_file_path: str) -> Path: + posix_path = Path(source_file_path).as_posix() + if "examples/" in posix_path: + after_examples = Path(posix_path.split("examples/", maxsplit=1)[-1]).parent + return Path("/".join([to_snake_case(i) for i in after_examples.parts])) + return Path("") + + +class JinjaSerializer(ReaderAndWriter): + def __init__( + self, + code_model: CodeModel, + *, + output_folder: Union[str, Path], + **kwargs: Any, + ) -> None: + super().__init__(output_folder=output_folder, **kwargs) + self.code_model = code_model + + @property + def has_aio_folder(self) -> bool: + return not self.code_model.options["no_async"] and bool(self.code_model.has_operations) + + @property + def has_operations_folder(self) -> bool: + return self.code_model.options["show_operations"] and bool(self.code_model.has_operations) + + def _serialize_namespace_level(self, env: Environment, namespace_path: Path, clients: List[Client]) -> None: + # if there was a patch file before, we keep it + self._keep_patch_file(namespace_path / Path("_patch.py"), env) + if self.has_aio_folder: + self._keep_patch_file(namespace_path / Path("aio") / Path("_patch.py"), env) + + if self.has_operations_folder: + self._keep_patch_file( + namespace_path / Path(self.code_model.operations_folder_name) / Path("_patch.py"), + env, + ) + if self.has_aio_folder: + self._keep_patch_file( + namespace_path / Path("aio") / Path(self.code_model.operations_folder_name) / Path("_patch.py"), + env, + ) + self._serialize_and_write_top_level_folder(env=env, namespace_path=namespace_path, clients=clients) + + if any(c for c in self.code_model.clients if c.operation_groups): + if self.code_model.options["builders_visibility"] != "embedded": + self._serialize_and_write_rest_layer(env=env, namespace_path=namespace_path) + if self.has_aio_folder: + self._serialize_and_write_aio_top_level_folder( + env=env, + namespace_path=namespace_path, + clients=clients, + ) + + if self.has_operations_folder: + self._serialize_and_write_operations_folder(clients, env=env, namespace_path=namespace_path) + if self.code_model.options["multiapi"]: + self._serialize_and_write_metadata(env=env, namespace_path=namespace_path) + if self.code_model.options["package_mode"]: + self._serialize_and_write_package_files(namespace_path=namespace_path) + + if ( + self.code_model.options["show_operations"] + and self.code_model.has_operations + and self.code_model.options["generate_sample"] + ): + self._serialize_and_write_sample(env, namespace_path) + + if ( + self.code_model.options["show_operations"] + and self.code_model.has_operations + and self.code_model.options["generate_test"] + ): + self._serialize_and_write_test(env, namespace_path) + + def serialize(self) -> None: + env = Environment( + loader=PackageLoader("pygen.codegen", "templates"), + keep_trailing_newline=True, + line_statement_prefix="##", + line_comment_prefix="###", + trim_blocks=True, + lstrip_blocks=True, + ) + + namespace_path = ( + Path(".") if self.code_model.options["no_namespace_folders"] else Path(*self._name_space().split(".")) + ) + + p = namespace_path.parent + general_serializer = GeneralSerializer(code_model=self.code_model, env=env, async_mode=False) + while p != Path("."): + # write pkgutil init file + self.write_file( + p / Path("__init__.py"), + general_serializer.serialize_pkgutil_init_file(), + ) + p = p.parent + + # serialize main module + self._serialize_namespace_level( + env, + namespace_path, + [c for c in self.code_model.clients if c.has_operations], + ) + # serialize sub modules + for ( + subnamespace, + clients, + ) in self.code_model.subnamespace_to_clients.items(): + subnamespace_path = namespace_path / Path(subnamespace) + self._serialize_namespace_level(env, subnamespace_path, [c for c in clients if c.has_operations]) + + if self.code_model.options["models_mode"] and (self.code_model.model_types or self.code_model.enums): + self._keep_patch_file(namespace_path / Path("models") / Path("_patch.py"), env) + + if self.code_model.options["models_mode"] and (self.code_model.model_types or self.code_model.enums): + self._serialize_and_write_models_folder(env=env, namespace_path=namespace_path) + if not self.code_model.options["models_mode"]: + # keep models file if users ended up just writing a models file + if self.read_file(namespace_path / Path("models.py")): + self.write_file( + namespace_path / Path("models.py"), + self.read_file(namespace_path / Path("models.py")), + ) + if self.code_model.named_unions: + self.write_file( + namespace_path / Path("_types.py"), + TypesSerializer(code_model=self.code_model, env=env).serialize(), + ) + + def _serialize_and_write_package_files(self, namespace_path: Path) -> None: + root_of_sdk = self._package_root_folder(namespace_path) + if self.code_model.options["package_mode"] in VALID_PACKAGE_MODE: + env = Environment( + loader=PackageLoader("pygen.codegen", "templates/packaging_templates"), + undefined=StrictUndefined, + ) + + package_files = _PACKAGE_FILES + elif Path(self.code_model.options["package_mode"]).exists(): + env = Environment( + loader=FileSystemLoader(str(Path(self.code_model.options["package_mode"]))), + keep_trailing_newline=True, + undefined=StrictUndefined, + ) + package_files = env.list_templates() + else: + return + serializer = GeneralSerializer(self.code_model, env, async_mode=False) + params = self.code_model.options["packaging_files_config"] or {} + for template_name in package_files: + if not self.code_model.is_azure_flavor and template_name == "dev_requirements.txt.jinja2": + continue + file = template_name.replace(".jinja2", "") + output_name = root_of_sdk / file + if not self.read_file(output_name) or file in _REGENERATE_FILES: + self.write_file( + output_name, + serializer.serialize_package_file(template_name, **params), + ) + + def _keep_patch_file(self, path_file: Path, env: Environment): + if self.read_file(path_file): + self.write_file(path_file, self.read_file(path_file)) + else: + self.write_file( + path_file, + PatchSerializer(env=env, code_model=self.code_model).serialize(), + ) + + def _serialize_and_write_models_folder(self, env: Environment, namespace_path: Path) -> None: + # Write the models folder + models_path = namespace_path / Path("models") + serializer = DpgModelSerializer if self.code_model.options["models_mode"] == "dpg" else MsrestModelSerializer + if self.code_model.model_types: + self.write_file( + models_path / Path(f"{self.code_model.models_filename}.py"), + serializer(code_model=self.code_model, env=env).serialize(), + ) + if self.code_model.enums: + self.write_file( + models_path / Path(f"{self.code_model.enums_filename}.py"), + EnumSerializer(code_model=self.code_model, env=env).serialize(), + ) + self.write_file( + models_path / Path("__init__.py"), + ModelInitSerializer(code_model=self.code_model, env=env).serialize(), + ) + + def _serialize_and_write_rest_layer(self, env: Environment, namespace_path: Path) -> None: + rest_path = namespace_path / Path(self.code_model.rest_layer_name) + group_names = {rb.group_name for c in self.code_model.clients for rb in c.request_builders} + + for group_name in group_names: + request_builders = [ + r for c in self.code_model.clients for r in c.request_builders if r.group_name == group_name + ] + self._serialize_and_write_single_rest_layer(env, rest_path, request_builders) + if not "" in group_names: + self.write_file( + rest_path / Path("__init__.py"), + self.code_model.options["license_header"], + ) + + def _serialize_and_write_single_rest_layer( + self, + env: Environment, + rest_path: Path, + request_builders: List[Union[RequestBuilder, OverloadedRequestBuilder]], + ) -> None: + group_name = request_builders[0].group_name + output_path = rest_path / Path(group_name) if group_name else rest_path + # write generic request builders file + self.write_file( + output_path / Path("_request_builders.py"), + RequestBuildersSerializer( + code_model=self.code_model, + env=env, + request_builders=request_builders, + ).serialize_request_builders(), + ) + + # write rest init file + self.write_file( + output_path / Path("__init__.py"), + RequestBuildersSerializer( + code_model=self.code_model, + env=env, + request_builders=request_builders, + ).serialize_init(), + ) + + def _serialize_and_write_operations_file( + self, + env: Environment, + clients: List[Client], + namespace_path: Path, + operation_group: Optional[OperationGroup] = None, + ) -> None: + filename = operation_group.filename if operation_group else "_operations" + # write first sync file + operation_group_serializer = OperationGroupsSerializer( + code_model=self.code_model, + clients=clients, + env=env, + async_mode=False, + operation_group=operation_group, + ) + self.write_file( + namespace_path / Path(self.code_model.operations_folder_name) / Path(f"{filename}.py"), + operation_group_serializer.serialize(), + ) + + if self.has_aio_folder: + # write async operation group and operation files + operation_group_async_serializer = OperationGroupsSerializer( + code_model=self.code_model, + clients=clients, + env=env, + async_mode=True, + operation_group=operation_group, + ) + self.write_file( + (namespace_path / Path("aio") / Path(self.code_model.operations_folder_name) / Path(f"{filename}.py")), + operation_group_async_serializer.serialize(), + ) + + def _serialize_and_write_operations_folder( + self, clients: List[Client], env: Environment, namespace_path: Path + ) -> None: + # write sync operations init file + operations_init_serializer = OperationsInitSerializer( + code_model=self.code_model, clients=clients, env=env, async_mode=False + ) + self.write_file( + namespace_path / Path(self.code_model.operations_folder_name) / Path("__init__.py"), + operations_init_serializer.serialize(), + ) + + # write async operations init file + if self.has_aio_folder: + operations_async_init_serializer = OperationsInitSerializer( + code_model=self.code_model, clients=clients, env=env, async_mode=True + ) + self.write_file( + namespace_path / Path("aio") / Path(self.code_model.operations_folder_name) / Path("__init__.py"), + operations_async_init_serializer.serialize(), + ) + + if self.code_model.options["combine_operation_files"]: + self._serialize_and_write_operations_file( + env=env, + namespace_path=namespace_path, + clients=clients, + ) + else: + for operation_group in get_all_operation_groups_recursively(self.code_model.clients): + self._serialize_and_write_operations_file( + env=env, + namespace_path=namespace_path, + operation_group=operation_group, + clients=clients, + ) + + def _serialize_and_write_version_file( + self, + namespace_path: Path, + general_serializer: GeneralSerializer, + ): + def _read_version_file(original_version_file_name: str) -> str: + return self.read_file(namespace_path / original_version_file_name) + + def _write_version_file(original_version_file_name: str) -> None: + self.write_file( + namespace_path / Path("_version.py"), + _read_version_file(original_version_file_name), + ) + + keep_version_file = self.code_model.options["keep_version_file"] + if keep_version_file and _read_version_file("_version.py"): + _write_version_file(original_version_file_name="_version.py") + elif keep_version_file and _read_version_file("version.py"): + _write_version_file(original_version_file_name="version.py") + elif self.code_model.options["package_version"]: + self.write_file( + namespace_path / Path("_version.py"), + general_serializer.serialize_version_file(), + ) + + def _serialize_client_and_config_files( + self, + namespace_path: Path, + general_serializer: GeneralSerializer, + async_mode: bool, + clients: List[Client], + ) -> None: + if self.code_model.has_operations: + namespace_path = namespace_path / Path("aio") if async_mode else namespace_path + self.write_file( + namespace_path / Path(f"{self.code_model.client_filename}.py"), + general_serializer.serialize_service_client_file(clients), + ) + self.write_file( + namespace_path / Path("_configuration.py"), + general_serializer.serialize_config_file(clients), + ) + + def _serialize_and_write_top_level_folder( + self, env: Environment, namespace_path: Path, clients: List[Client] + ) -> None: + general_serializer = GeneralSerializer(code_model=self.code_model, env=env, async_mode=False) + + self.write_file( + namespace_path / Path("__init__.py"), + general_serializer.serialize_init_file(clients), + ) + + # Write the service client + self._serialize_client_and_config_files(namespace_path, general_serializer, async_mode=False, clients=clients) + if self.code_model.need_vendored_code(async_mode=False): + self.write_file( + namespace_path / Path("_vendor.py"), + general_serializer.serialize_vendor_file(clients), + ) + + self._serialize_and_write_version_file(namespace_path, general_serializer) + + # write the empty py.typed file + self.write_file(namespace_path / Path("py.typed"), "# Marker file for PEP 561.") + + if not self.code_model.options["client_side_validation"] and not self.code_model.options["multiapi"]: + self.write_file( + namespace_path / Path("_serialization.py"), + general_serializer.serialize_serialization_file(), + ) + if self.code_model.options["models_mode"] == "dpg": + self.write_file( + namespace_path / Path("_model_base.py"), + general_serializer.serialize_model_base_file(), + ) + + if any(og for client in self.code_model.clients for og in client.operation_groups if og.need_validation): + self.write_file( + namespace_path / Path("_validation.py"), + general_serializer.serialize_validation_file(), + ) + if self.code_model.options.get("emit_cross_language_definition_file"): + self.write_file( + Path("./apiview_mapping_python.json"), + general_serializer.serialize_cross_language_definition_file(), + ) + + # Write the setup file + if self.code_model.options["basic_setup_py"]: + self.write_file(Path("setup.py"), general_serializer.serialize_setup_file()) + + def _serialize_and_write_aio_top_level_folder( + self, env: Environment, namespace_path: Path, clients: List[Client] + ) -> None: + aio_general_serializer = GeneralSerializer(code_model=self.code_model, env=env, async_mode=True) + + aio_path = namespace_path / Path("aio") + + # Write the __init__ file + self.write_file( + aio_path / Path("__init__.py"), + aio_general_serializer.serialize_init_file(clients), + ) + + # Write the service client + self._serialize_client_and_config_files( + namespace_path, aio_general_serializer, async_mode=True, clients=clients + ) + if self.code_model.need_vendored_code(async_mode=True): + self.write_file( + aio_path / Path("_vendor.py"), + aio_general_serializer.serialize_vendor_file(clients), + ) + + def _serialize_and_write_metadata(self, env: Environment, namespace_path: Path) -> None: + metadata_serializer = MetadataSerializer(self.code_model, env) + self.write_file(namespace_path / Path("_metadata.json"), metadata_serializer.serialize()) + + @property + def _namespace_from_package_name(self) -> str: + return get_namespace_from_package_name(self.code_model.options["package_name"]) + + def _name_space(self) -> str: + if self.code_model.namespace.count(".") >= self._namespace_from_package_name.count("."): + return self.code_model.namespace + + return self._namespace_from_package_name + + # find root folder where "setup.py" is + def _package_root_folder(self, namespace_path: Path) -> Path: + return namespace_path / Path("../" * (self._name_space().count(".") + 1)) + + @property + def _additional_folder(self) -> Path: + namespace_config = get_namespace_config(self.code_model.namespace, self.code_model.options["multiapi"]) + num_of_namespace = namespace_config.count(".") + 1 + num_of_package_namespace = self._namespace_from_package_name.count(".") + 1 + if num_of_namespace > num_of_package_namespace: + return Path("/".join(namespace_config.split(".")[num_of_package_namespace:])) + return Path("") + + def _serialize_and_write_sample(self, env: Environment, namespace_path: Path): + out_path = self._package_root_folder(namespace_path) / Path("generated_samples") + for client in self.code_model.clients: + for op_group in client.operation_groups: + for operation in op_group.operations: + if ( + self.code_model.options["multiapi"] + and operation.api_versions[0] != self.code_model.options["default_api_version"] + ): + continue + samples = operation.yaml_data.get("samples") + if not samples or operation.name.startswith("_"): + continue + for value in samples.values(): + file = value.get("x-ms-original-file", "sample.json") + file_name = to_snake_case(extract_sample_name(file)) + ".py" + try: + self.write_file( + out_path / self._additional_folder / _sample_output_path(file) / file_name, + SampleSerializer( + code_model=self.code_model, + env=env, + operation_group=op_group, + operation=operation, + sample=value, + file_name=file_name, + ).serialize(), + ) + except Exception as e: # pylint: disable=broad-except + # sample generation shall not block code generation, so just log error + log_error = f"error happens in sample {file}: {e}" + _LOGGER.error(log_error) + + def _serialize_and_write_test(self, env: Environment, namespace_path: Path): + self.code_model.for_test = True + out_path = self._package_root_folder(namespace_path) / Path("generated_tests") + general_serializer = TestGeneralSerializer(code_model=self.code_model, env=env) + self.write_file(out_path / "conftest.py", general_serializer.serialize_conftest()) + if not self.code_model.options["azure_arm"]: + for is_async in (True, False): + async_suffix = "_async" if is_async else "" + general_serializer.is_async = is_async + self.write_file( + out_path / f"testpreparer{async_suffix}.py", + general_serializer.serialize_testpreparer(), + ) + + for client in self.code_model.clients: + for og in client.operation_groups: + if self.code_model.options["multiapi"] and any( + o.api_versions[0] != self.code_model.options["default_api_version"] for o in og.operations + ): + continue + test_serializer = TestSerializer(self.code_model, env, client=client, operation_group=og) + for is_async in (True, False): + try: + test_serializer.is_async = is_async + self.write_file( + out_path / f"{to_snake_case(test_serializer.test_class_name)}.py", + test_serializer.serialize_test(), + ) + except Exception as e: # pylint: disable=broad-except + # test generation shall not block code generation, so just log error + log_error = f"error happens in test generation for operation group {og.class_name}: {e}" + _LOGGER.error(log_error) + self.code_model.for_test = False diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/base_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/base_serializer.py new file mode 100644 index 0000000000..0ac623166a --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/base_serializer.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from jinja2 import Environment +from ..models import ( + FileImport, + CodeModel, +) + + +class BaseSerializer: + """Base serializer for SDK root level files""" + + def __init__(self, code_model: CodeModel, env: Environment): + self.code_model = code_model + self.env = env + + def init_file_import(self) -> FileImport: + return FileImport(self.code_model) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/builder_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/builder_serializer.py new file mode 100644 index 0000000000..e46a4299a1 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/builder_serializer.py @@ -0,0 +1,1507 @@ +# pylint: disable=too-many-lines +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from abc import abstractmethod +from collections import defaultdict +from typing import Generic, List, Type, TypeVar, Dict, Union, Optional, cast + +from ..models import ( + Operation, + PagingOperation, + CodeModel, + LROOperation, + LROPagingOperation, + ModelType, + DictionaryType, + ListType, + RequestBuilder, + ParameterLocation, + Response, + BinaryType, + BodyParameter, + ParameterMethodLocation, + OverloadedRequestBuilder, + Property, + RequestBuilderType, + CombinedType, + JSONModelType, + DPGModelType, + ParameterListType, + ByteArraySchema, +) +from .parameter_serializer import ParameterSerializer, PopKwargType +from ..models.parameter_list import ParameterType +from . import utils +from ...utils import xml_serializable, json_serializable + +T = TypeVar("T") +OrderedSet = Dict[T, None] + +BuilderType = TypeVar( + "BuilderType", + bound=Union[ + RequestBuilder, + Operation, + PagingOperation, + LROOperation, + LROPagingOperation, + OverloadedRequestBuilder, + ], +) +OperationType = TypeVar( + "OperationType", + bound=Union[Operation, PagingOperation, LROOperation, LROPagingOperation], +) + + +def _all_same(data: List[List[str]]) -> bool: + return len(data) > 1 and all(sorted(data[0]) == sorted(data[i]) for i in range(1, len(data))) + + +def _need_type_ignore(builder: OperationType) -> bool: + for e in builder.non_default_errors: + for status_code in e.status_codes: + if status_code in (401, 404, 409, 304): + return True + return False + + +def _xml_config(send_xml: bool, content_types: List[str]) -> str: + if not (send_xml and "xml" in str(content_types)): + return "" + if len(content_types) == 1: + return ", is_xml=True" + return ", is_xml='xml' in str(content_type)" + + +def _escape_str(input_str: str) -> str: + replace = input_str.replace("'", "\\'") + return f'"{replace}"' + + +def _get_polymorphic_subtype_template(polymorphic_subtype: ModelType) -> List[str]: + retval: List[str] = [] + retval.append("") + retval.append(f'# JSON input template for discriminator value "{polymorphic_subtype.discriminator_value}":') + subtype_template = utils.json_dumps_template( + polymorphic_subtype.get_json_template_representation(), + ) + + def _get_polymorphic_parent( + polymorphic_subtype: Optional[ModelType], + ) -> Optional[ModelType]: + if not polymorphic_subtype: + return None + try: + return next(p for p in polymorphic_subtype.parents if p.discriminated_subtypes) + except StopIteration: + return None + + polymorphic_parent = _get_polymorphic_parent(polymorphic_subtype) + while _get_polymorphic_parent(polymorphic_parent): + polymorphic_parent = _get_polymorphic_parent(polymorphic_parent) + retval.extend(f"{cast(ModelType, polymorphic_parent).snake_case_name} = {subtype_template}".splitlines()) + return retval + + +def _serialize_grouped_body(builder: BuilderType) -> List[str]: + retval: List[str] = [] + for grouped_parameter in builder.parameters.grouped: + retval.append(f"{grouped_parameter.client_name} = None") + groupers = [p for p in builder.parameters if p.grouper] + for grouper in groupers: + retval.append(f"if {grouper.client_name} is not None:") + retval.extend( + [ + f" {parameter} = {grouper.client_name}.{property}" + for property, parameter in grouper.property_to_parameter_name.items() + ] + ) + return retval + + +def _serialize_flattened_body(body_parameter: BodyParameter) -> List[str]: + retval: List[str] = [] + if not body_parameter.property_to_parameter_name: + raise ValueError("This method can't be called if the operation doesn't need parameter flattening") + + parameter_string = ", ".join( + f"{property_name}={parameter_name}" + for property_name, parameter_name in body_parameter.property_to_parameter_name.items() + ) + model_type = cast(ModelType, body_parameter.type) + retval.append(f"{body_parameter.client_name} = _models.{model_type.name}({parameter_string})") + return retval + + +def _serialize_json_model_body(body_parameter: BodyParameter, parameters: List[ParameterType]) -> List[str]: + retval: List[str] = [] + if not body_parameter.property_to_parameter_name: + raise ValueError("This method can't be called if the operation doesn't need parameter flattening") + + retval.append(f"if {body_parameter.client_name} is _Unset:") + for p in parameters: + if p.client_default_value is None and not p.optional and p.default_to_unset_sentinel: + retval.append(f" if {p.client_name} is _Unset:") + retval.append(f" raise TypeError('missing required argument: {p.client_name}')") + parameter_string = ", \n".join( + f'"{property_name}": {parameter_name}' + for property_name, parameter_name in body_parameter.property_to_parameter_name.items() + ) + model_type = cast(ModelType, body_parameter.type) + if isinstance(model_type, CombinedType) and model_type.target_model_subtype((JSONModelType,)): + model_type = model_type.target_model_subtype((JSONModelType,)) + retval.append(f" {body_parameter.client_name} = {{{parameter_string}}}") + retval.append(f" {body_parameter.client_name} = {{") + retval.append(f" k: v for k, v in {body_parameter.client_name}.items() if v is not None") + retval.append(" }") + return retval + + +def _serialize_multipart_body(builder: BuilderType) -> List[str]: + retval: List[str] = [] + body_param = builder.parameters.body_parameter + # we have to construct our form data before passing to the request as well + retval.append("# Construct form data") + retval.append(f"_{body_param.client_name} = {{") + for param in body_param.entries: + retval.append(f' "{param.wire_name}": {param.client_name},') + retval.append("}") + return retval + + +def _get_json_response_template_to_status_codes( + builder: OperationType, +) -> Dict[str, List[str]]: + retval = defaultdict(list) + for response in builder.responses: + json_template = response.get_json_template_representation() + if not json_template: + continue + status_codes = [str(status_code) for status_code in response.status_codes] + response_json = utils.json_dumps_template(json_template) + retval[response_json].extend(status_codes) + return retval + + +def is_json_model_type(parameters: ParameterListType) -> bool: + return ( + parameters.has_body + and parameters.body_parameter.has_json_model_type + and any(p.in_flattened_body for p in parameters.parameters) + ) + + +class _BuilderBaseSerializer(Generic[BuilderType]): + def __init__(self, code_model: CodeModel, async_mode: bool) -> None: + self.code_model = code_model + self.async_mode = async_mode + self.parameter_serializer = ParameterSerializer() + + @property + @abstractmethod + def _need_self_param(self) -> bool: ... + + @property + @abstractmethod + def _function_def(self) -> str: + """The def keyword for the builder we're serializing, i.e. 'def' or 'async def'""" + + @property + @abstractmethod + def _call_method(self) -> str: + """How to call network calls. Await if we have to await network calls""" + + @property + @abstractmethod + def serializer_name(self) -> str: + """Name of serializer""" + + @abstractmethod + def response_docstring(self, builder: BuilderType) -> List[str]: + """Response portion of the docstring""" + + def decorators(self, builder: BuilderType) -> List[str]: + """Decorators for the method""" + retval: List[str] = [] + if builder.is_overload: + return ["@overload"] + if self.code_model.options["tracing"] and builder.want_tracing: + retval.append(f"@distributed_trace{'_async' if self.async_mode else ''}") + return retval + + def _method_signature(self, builder: BuilderType) -> str: + return self.parameter_serializer.serialize_method( + function_def=self._function_def, + method_name=builder.name, + need_self_param=self._need_self_param, + method_param_signatures=builder.method_signature(self.async_mode), + pylint_disable=builder.pylint_disable(self.async_mode), + ) + + def method_signature_and_response_type_annotation( + self, builder: BuilderType, *, want_decorators: Optional[bool] = True + ) -> str: + response_type_annotation = builder.response_type_annotation(async_mode=self.async_mode) + method_signature = self._method_signature(builder) + decorators = self.decorators(builder) + decorators_str = "" + if decorators and want_decorators: + decorators_str = "\n".join(decorators) + "\n" + return decorators_str + utils.method_signature_and_response_type_annotation_template( + method_signature=method_signature, + response_type_annotation=response_type_annotation, + ) + + def description_and_summary(self, builder: BuilderType) -> List[str]: + description_list: List[str] = [] + description_list.append(f"{builder.summary.strip() if builder.summary else builder.description.strip()}") + if builder.summary and builder.description: + description_list.append("") + description_list.append(builder.description.strip()) + description_list.append("") + return description_list + + @staticmethod + def line_too_long(docs: List[str]) -> bool: + return any(len(line) > 120 for line in docs) + + def example_template(self, builder: BuilderType) -> List[str]: + template = [] + if builder.abstract: + return [] + if self._json_input_example_template(builder): + template.append("") + template += self._json_input_example_template(builder) + return template + + def param_description(self, builder: BuilderType) -> List[str]: + description_list: List[str] = [] + for param in builder.parameters.method: + if ( + not param.in_docstring + or param.hide_in_operation_signature + or param.method_location == ParameterMethodLocation.KWARG + ): + continue + description_list.extend( + f":{param.description_keyword} {param.client_name}: {param.description}".replace("\n", "\n ").split( + "\n" + ) + ) + docstring_type = param.docstring_type( + async_mode=self.async_mode, + ) + description_list.append(f":{param.docstring_type_keyword} {param.client_name}: {docstring_type}") + return description_list + + def param_description_and_response_docstring(self, builder: BuilderType) -> List[str]: + if builder.abstract: + return [] + return self.param_description(builder) + self.response_docstring(builder) + + @property + @abstractmethod + def _json_response_template_name(self) -> str: ... + + def _json_input_example_template(self, builder: BuilderType) -> List[str]: + template: List[str] = [] + if not builder.parameters.has_body or builder.parameters.body_parameter.flattened: + # No input template if now body parameter + return template + + body_param = builder.parameters.body_parameter + if not isinstance(body_param.type, (ListType, DictionaryType, ModelType, CombinedType)): + return template + + if ( + isinstance(body_param.type, (ListType, DictionaryType)) + and self.code_model.options["models_mode"] == "msrest" + ): + return template + + if isinstance(body_param.type, ModelType) and body_param.type.base == "msrest": + return template + + json_type = body_param.type + if isinstance(body_param.type, CombinedType): + target_model_type = body_param.type.target_model_subtype((JSONModelType, DPGModelType)) + if target_model_type is None: + return template + json_type = target_model_type + + polymorphic_subtypes: List[ModelType] = [] + json_type.get_polymorphic_subtypes(polymorphic_subtypes) + if polymorphic_subtypes: + # we just assume one kind of polymorphic body for input + discriminator_name = cast(Property, polymorphic_subtypes[0].discriminator).wire_name + template.append( + "# The input is polymorphic. The following are possible polymorphic " + f'inputs based off discriminator "{discriminator_name}":' + ) + for idx in range( + min( + self.code_model.options["polymorphic_examples"], + len(polymorphic_subtypes), + ) + ): + template.extend(_get_polymorphic_subtype_template(polymorphic_subtypes[idx])) + template.append("") + template.append("# JSON input template you can fill out and use as your body input.") + json_template = utils.json_dumps_template( + json_type.get_json_template_representation(), + ) + template.extend(f"{builder.parameters.body_parameter.client_name} = {json_template}".splitlines()) + return template + + def serialize_path(self, builder: BuilderType) -> List[str]: + return self.parameter_serializer.serialize_path(builder.parameters.path, self.serializer_name) + + @property + def pipeline_name(self) -> str: + return f"{'_' if self.code_model.is_azure_flavor else ''}pipeline" + + +############################## REQUEST BUILDERS ############################## + + +class RequestBuilderSerializer(_BuilderBaseSerializer[RequestBuilderType]): + def description_and_summary(self, builder: RequestBuilderType) -> List[str]: + retval = super().description_and_summary(builder) + retval += [ + "See https://aka.ms/azsdk/dpcodegen/python/send_request for how to incorporate this " + "request builder into your code flow.", + "", + ] + return retval + + @property + def _call_method(self) -> str: + return "" + + @property + def serializer_name(self) -> str: + return "_SERIALIZER" + + @property + def _json_response_template_name(self) -> str: + return "response.json()" + + @staticmethod + def declare_non_inputtable_headers_queries( + builder: RequestBuilderType, + ) -> List[str]: + def _get_value(param): + declaration = param.get_declaration() if param.constant else None + if param.location in [ParameterLocation.HEADER, ParameterLocation.QUERY]: + kwarg_dict = "headers" if param.location == ParameterLocation.HEADER else "params" + return f"_{kwarg_dict}.pop('{param.wire_name}', {declaration})" + return declaration + + return [ + f"{p.client_name} = {_get_value(p)}" + for p in (builder.parameters.headers + builder.parameters.query) + if not p.in_method_signature + ] + + @property + def _function_def(self) -> str: + return "def" + + @property + def _need_self_param(self) -> bool: + return False + + def response_docstring(self, builder: RequestBuilderType) -> List[str]: + request_full_path = f"{self.code_model.core_library}.rest.HttpRequest" + response_str = ( + f":return: Returns an :class:`~{request_full_path}` that you will pass to the client's " + + "`send_request` method. See https://aka.ms/azsdk/dpcodegen/python/send_request for how to " + + "incorporate this response into your code flow." + ) + rtype_str = f":rtype: ~{request_full_path}" + return [response_str, rtype_str] + + def pop_kwargs_from_signature(self, builder: RequestBuilderType) -> List[str]: + return self.parameter_serializer.pop_kwargs_from_signature( + builder.parameters.kwargs_to_pop, + check_kwarg_dict=True, + pop_headers_kwarg=(PopKwargType.CASE_INSENSITIVE if bool(builder.parameters.headers) else PopKwargType.NO), + pop_params_kwarg=(PopKwargType.CASE_INSENSITIVE if bool(builder.parameters.query) else PopKwargType.NO), + ) + + @staticmethod + def create_http_request(builder: RequestBuilderType) -> List[str]: + retval = ["return HttpRequest("] + retval.append(f' method="{builder.method}",') + retval.append(" url=_url,") + if builder.parameters.query: + retval.append(" params=_params,") + if builder.parameters.headers: + retval.append(" headers=_headers,") + if builder.parameters.has_body and builder.parameters.body_parameter.in_method_signature: + body_param = builder.parameters.body_parameter + if body_param.constant or body_param.method_location != ParameterMethodLocation.KWARG: + # we only need to pass it through if it's not a kwarg or it's a popped kwarg + retval.append( + f" {builder.parameters.body_parameter.client_name}=" + f"{builder.parameters.body_parameter.client_name}," + ) + retval.append(" **kwargs") + retval.append(")") + return retval + + def serialize_headers(self, builder: RequestBuilderType) -> List[str]: + headers = [ + h + for h in builder.parameters.headers + if not builder.has_form_data_body or h.wire_name.lower() != "content-type" + ] + retval = ["# Construct headers"] if headers else [] + for header in headers: + retval.extend( + self.parameter_serializer.serialize_query_header( + header, + "headers", + self.serializer_name, + self.code_model.is_legacy, + ) + ) + return retval + + def serialize_query(self, builder: RequestBuilderType) -> List[str]: + retval = ["# Construct parameters"] + for parameter in builder.parameters.query: + retval.extend( + self.parameter_serializer.serialize_query_header( + parameter, + "params", + self.serializer_name, + self.code_model.is_legacy, + ) + ) + return retval + + def construct_url(self, builder: RequestBuilderType) -> str: + if any(o for o in ["low_level_client", "version_tolerant"] if self.code_model.options.get(o)): + url_value = _escape_str(builder.url) + else: + url_value = f'kwargs.pop("template_url", {_escape_str(builder.url)})' + return f"_url = {url_value}{' # pylint: disable=line-too-long' if len(url_value) > 114 else ''}" + + +############################## NORMAL OPERATIONS ############################## + + +class _OperationSerializer(_BuilderBaseSerializer[OperationType]): + def description_and_summary(self, builder: OperationType) -> List[str]: + retval = super().description_and_summary(builder) + if builder.deprecated: + retval.append(".. warning::") + retval.append(" This method is deprecated") + retval.append("") + if builder.external_docs and builder.external_docs.get("url"): + retval.append(".. seealso::") + retval.append(f" - {builder.external_docs['url']}") + retval.append("") + return retval + + @property + def _json_response_template_name(self) -> str: + return "response" + + def example_template(self, builder: OperationType) -> List[str]: + if self.code_model.options["models_mode"] in ("msrest", "dpg"): + return [] + retval = super().example_template(builder) + for response in builder.responses: + polymorphic_subtypes: List[ModelType] = [] + if not response.type: + continue + response.get_polymorphic_subtypes(polymorphic_subtypes) + if polymorphic_subtypes: + # we just assume one kind of polymorphic body for input + discriminator_name = cast(Property, polymorphic_subtypes[0].discriminator).wire_name + retval.append("") + retval.append( + "# The response is polymorphic. The following are possible polymorphic " + f'responses based off discriminator "{discriminator_name}":' + ) + for idx in range( + min( + self.code_model.options["polymorphic_examples"], + len(polymorphic_subtypes), + ) + ): + retval.extend(_get_polymorphic_subtype_template(polymorphic_subtypes[idx])) + + if _get_json_response_template_to_status_codes(builder): + retval.append("") + for ( + response_body, + status_codes, + ) in _get_json_response_template_to_status_codes(builder).items(): + retval.append("# response body for status code(s): {}".format(", ".join(status_codes))) + retval.extend(f"{self._json_response_template_name} == {response_body}".splitlines()) + return retval + + def make_pipeline_call(self, builder: OperationType) -> List[str]: + retval = [] + type_ignore = self.async_mode and builder.group_name == "" # is in a mixin + if builder.stream_value is True and not self.code_model.options["version_tolerant"]: + retval.append("_decompress = kwargs.pop('decompress', True)") + retval.extend( + [ + f"_stream = {builder.stream_value}", + f"pipeline_response: PipelineResponse = {self._call_method}self._client.{self.pipeline_name}.run( " + + f"{'# type: ignore' if type_ignore else ''} # pylint: disable=protected-access", + " _request,", + " stream=_stream,", + " **kwargs", + ")", + ] + ) + return retval + + @property + def _function_def(self) -> str: + return "async def" if self.async_mode else "def" + + @property + def _need_self_param(self) -> bool: + return True + + @property + def serializer_name(self) -> str: + return "self._serialize" + + def decorators(self, builder: OperationType) -> List[str]: + """Decorators for the method""" + retval = super().decorators(builder) + if self._api_version_validation(builder): + retval.append(self._api_version_validation(builder)) + return retval + + def _api_version_validation(self, builder: OperationType) -> str: + if builder.is_overload: + return "" + retval: List[str] = [] + if builder.added_on: + retval.append(f' method_added_on="{builder.added_on}",') + params_added_on = defaultdict(list) + for parameter in builder.parameters: + if parameter.added_on: + params_added_on[parameter.added_on].append(parameter.client_name) + if params_added_on: + retval.append(f" params_added_on={dict(params_added_on)},") + if retval: + retval_str = "\n".join(retval) + return f"@api_version_validation(\n{retval_str}\n)" + return "" + + def pop_kwargs_from_signature(self, builder: OperationType) -> List[str]: + kwargs_to_pop = builder.parameters.kwargs_to_pop + kwargs = self.parameter_serializer.pop_kwargs_from_signature( + kwargs_to_pop, + check_kwarg_dict=True, + pop_headers_kwarg=( + PopKwargType.CASE_INSENSITIVE + if builder.has_kwargs_to_pop_with_default(kwargs_to_pop, ParameterLocation.HEADER) # type: ignore + else PopKwargType.SIMPLE + ), + pop_params_kwarg=( + PopKwargType.CASE_INSENSITIVE + if builder.has_kwargs_to_pop_with_default(kwargs_to_pop, ParameterLocation.QUERY) # type: ignore + else PopKwargType.SIMPLE + ), + check_client_input=not self.code_model.options["multiapi"], + operation_name=f"('{builder.name}')" if builder.group_name == "" else "", + ) + for p in builder.parameters.parameters: + if p.hide_in_operation_signature: + kwargs.append(f'{p.client_name} = kwargs.pop("{p.client_name}", None)') + cls_annotation = builder.cls_type_annotation(async_mode=self.async_mode) + kwargs.append(f"cls: {cls_annotation} = kwargs.pop(\n 'cls', None\n)") + return kwargs + + def response_docstring(self, builder: OperationType) -> List[str]: + response_str = f":return: {builder.response_docstring_text(async_mode=self.async_mode)}" + rtype_str = f":rtype: {builder.response_docstring_type(async_mode=self.async_mode)}" + return [ + response_str, + rtype_str, + f":raises ~{self.code_model.core_library}.exceptions.HttpResponseError:", + ] + + def _serialize_body_parameter(self, builder: OperationType) -> List[str]: + """We need to serialize params if they're not meant to be streamed in. + + This function serializes the body params that need to be serialized. + """ + retval: List[str] = [] + body_param = builder.parameters.body_parameter + if body_param.is_form_data: + model_type = cast( + ModelType, + ( + body_param.type.target_model_subtype((JSONModelType, DPGModelType)) + if isinstance(body_param.type, CombinedType) + else body_param.type + ), + ) + file_fields = [p.wire_name for p in model_type.properties if p.is_multipart_file_input] + data_fields = [p.wire_name for p in model_type.properties if not p.is_multipart_file_input] + retval.extend( + [ + "_body = (", + f" {body_param.client_name}.as_dict()", + f" if isinstance({body_param.client_name}, _model_base.Model) else", + f" {body_param.client_name}", + ")", + f"_file_fields: List[str] = {file_fields}", + f"_data_fields: List[str] = {data_fields}", + "_files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields)", + ] + ) + return retval + + body_kwarg_name = builder.request_builder.parameters.body_parameter.client_name + send_xml = builder.parameters.body_parameter.type.is_xml + xml_serialization_ctxt = body_param.type.xml_serialization_ctxt if send_xml else None + ser_ctxt_name = "serialization_ctxt" + if xml_serialization_ctxt and self.code_model.options["models_mode"]: + retval.append(f'{ser_ctxt_name} = {{"xml": {{{xml_serialization_ctxt}}}}}') + if self.code_model.options["models_mode"] == "msrest": + is_xml_cmd = _xml_config(send_xml, builder.parameters.body_parameter.content_types) + serialization_ctxt_cmd = f", {ser_ctxt_name}={ser_ctxt_name}" if xml_serialization_ctxt else "" + create_body_call = ( + f"_{body_kwarg_name} = self._serialize.body({body_param.client_name}, " + f"'{body_param.type.serialization_type}'{is_xml_cmd}{serialization_ctxt_cmd})" + ) + elif self.code_model.options["models_mode"] == "dpg": + if json_serializable(body_param.default_content_type): + if hasattr(body_param.type, "encode") and body_param.type.encode: # type: ignore + create_body_call = ( + f"_{body_kwarg_name} = json.dumps({body_param.client_name}, " + "cls=SdkJSONEncoder, exclude_readonly=True, " + f"format='{body_param.type.encode}') # type: ignore" # type: ignore + ) + else: + create_body_call = ( + f"_{body_kwarg_name} = json.dumps({body_param.client_name}, " + "cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore" + ) + elif xml_serializable(body_param.default_content_type): + create_body_call = f"_{body_kwarg_name} = _get_element({body_param.client_name})" + else: + create_body_call = f"_{body_kwarg_name} = {body_param.client_name}" + else: + create_body_call = f"_{body_kwarg_name} = {body_param.client_name}" + if body_param.optional: + retval.append(f"if {body_param.client_name} is not None:") + retval.append(" " + create_body_call) + retval.append("else:") + retval.append(f" _{body_kwarg_name} = None") + else: + retval.append(create_body_call) + return retval + + def _create_body_parameter( + self, + builder: OperationType, + ) -> List[str]: + """Create the body parameter before we pass it as either json or content to the request builder""" + retval = [] + body_param = builder.parameters.body_parameter + if body_param.entries: + return _serialize_multipart_body(builder) + body_kwarg_name = builder.request_builder.parameters.body_parameter.client_name + body_param_type = body_param.type + if isinstance(body_param_type, BinaryType) or ( + isinstance(body_param.type, ByteArraySchema) and body_param.default_content_type != "application/json" + ): + retval.append(f"_{body_kwarg_name} = {body_param.client_name}") + if ( + not body_param.default_content_type + and not next(p for p in builder.parameters if p.wire_name.lower() == "content-type").optional + ): + content_types = "'" + "', '".join(body_param.content_types) + "'" + retval.extend( + [ + "if not content_type:", + f' raise TypeError("Missing required keyword-only argument: content_type. ' + f'Known values are:" + "{content_types}")', + ] + ) + else: + retval.extend(self._serialize_body_parameter(builder)) + return retval + + def _initialize_overloads(self, builder: OperationType, is_paging: bool = False) -> List[str]: + retval: List[str] = [] + # For paging, we put body parameter in local place outside `prepare_request` + if is_paging: + return retval + same_content_type = len(set(o.parameters.body_parameter.default_content_type for o in builder.overloads)) == 1 + if same_content_type: + default_content_type = builder.overloads[0].parameters.body_parameter.default_content_type + retval.append(f'content_type = content_type or "{default_content_type}"') + client_names = [ + overload.request_builder.parameters.body_parameter.client_name for overload in builder.overloads + ] + for v in sorted(set(client_names), key=client_names.index): + retval.append(f"_{v} = None") + try: + # if there is a binary overload, we do a binary check first. + binary_overload = cast( + OperationType, + next((o for o in builder.overloads if isinstance(o.parameters.body_parameter.type, BinaryType))), + ) + binary_body_param = binary_overload.parameters.body_parameter + retval.append(f"if {binary_body_param.type.instance_check_template.format(binary_body_param.client_name)}:") + if binary_body_param.default_content_type and not same_content_type: + retval.append(f' content_type = content_type or "{binary_body_param.default_content_type}"') + retval.extend(f" {l}" for l in self._create_body_parameter(binary_overload)) + retval.append("else:") + other_overload = cast( + OperationType, + next((o for o in builder.overloads if not isinstance(o.parameters.body_parameter.type, BinaryType))), + ) + retval.extend(f" {l}" for l in self._create_body_parameter(other_overload)) + if other_overload.parameters.body_parameter.default_content_type and not same_content_type: + retval.append( + " content_type = content_type or " + f'"{other_overload.parameters.body_parameter.default_content_type}"' + ) + except StopIteration: + for idx, overload in enumerate(builder.overloads): + if_statement = "if" if idx == 0 else "elif" + body_param = overload.parameters.body_parameter + retval.append( + f"{if_statement} {body_param.type.instance_check_template.format(body_param.client_name)}:" + ) + if body_param.default_content_type and not same_content_type: + retval.append(f' content_type = content_type or "{body_param.default_content_type}"') + retval.extend(f" {l}" for l in self._create_body_parameter(cast(OperationType, overload))) + return retval + + def _create_request_builder_call( + self, + builder: OperationType, + request_builder: RequestBuilderType, + is_next_request: bool = False, + ) -> List[str]: + retval: List[str] = [] + if self.code_model.options["builders_visibility"] == "embedded": + request_path_name = request_builder.name + else: + group_name = request_builder.group_name + request_path_name = "rest{}.{}".format( + ("_" + group_name) if group_name else "", + request_builder.name, + ) + retval.append(f"_request = {request_path_name}(") + for parameter in request_builder.parameters.method: + if parameter.location == ParameterLocation.BODY: + # going to pass in body later based off of overloads + continue + if ( + is_next_request + and builder.operation_type == "paging" + and not bool(builder.next_request_builder) # type: ignore + and parameter.location == ParameterLocation.QUERY + ): + # if we don't want to reformat query parameters for next link calls + # in paging operations with a single swagger operation defintion, + # we skip passing query params when building the next request + continue + type_ignore = ( + parameter.grouped_by + and parameter.client_default_value is not None + and next(p for p in builder.parameters if p.grouper and p.client_name == parameter.grouped_by).optional + ) + retval.append( + f" {parameter.client_name}={parameter.name_in_high_level_operation}," + f"{' # type: ignore' if type_ignore else ''}" + ) + if builder.parameters.has_body and builder.parameters.body_parameter.entries: + # this is for legacy + client_name = builder.parameters.body_parameter.client_name + retval.append(f" {client_name}=_{client_name},") + elif request_builder.has_form_data_body: + retval.append(" files=_files,") + retval.append(" data=_data,") + elif request_builder.overloads: + seen_body_params = set() + for overload in request_builder.overloads: + body_param = overload.parameters.body_parameter + if body_param.client_name in seen_body_params: + continue + seen_body_params.add(body_param.client_name) + + retval.append(f" {body_param.client_name}={body_param.name_in_high_level_operation},") + elif request_builder.parameters.has_body: + body_param = request_builder.parameters.body_parameter + retval.append(f" {body_param.client_name}={body_param.name_in_high_level_operation},") + retval.append(" headers=_headers,") + retval.append(" params=_params,") + retval.append(")") + return retval + + def _postprocess_http_request(self, builder: OperationType, template_url: Optional[str] = None) -> List[str]: + retval: List[str] = [] + if builder.parameters.path: + retval.extend(self.serialize_path(builder)) + url_to_format = "_request.url" + if self.code_model.options["version_tolerant"] and template_url: + url_to_format = template_url + retval.append( + "_request.url = self._client.format_url({}{})".format( + url_to_format, + ", **path_format_arguments" if builder.parameters.path else "", + ) + ) + return retval + + def _call_request_builder_helper( + self, + builder: OperationType, + request_builder: RequestBuilderType, + template_url: Optional[str] = None, + is_next_request: bool = False, + is_paging: bool = False, + ) -> List[str]: + retval = [] + if builder.parameters.grouped: + # request builders don't allow grouped parameters, so we group them before making the call + retval.extend(_serialize_grouped_body(builder)) + if builder.parameters.has_body and builder.parameters.body_parameter.flattened: + # serialize flattened body before passing to request builder as well + retval.extend(_serialize_flattened_body(builder.parameters.body_parameter)) + if is_json_model_type(builder.parameters): + retval.extend(_serialize_json_model_body(builder.parameters.body_parameter, builder.parameters.parameters)) + if builder.has_form_data_body: + retval.extend(self._create_body_parameter(builder)) + elif builder.overloads: + # we are only dealing with two overloads. If there are three, we generate an abstract operation + retval.extend(self._initialize_overloads(builder, is_paging=is_paging)) + elif builder.parameters.has_body: + # non-overloaded body + retval.extend(self._create_body_parameter(builder)) + retval.append("") + retval.extend(self._create_request_builder_call(builder, request_builder, is_next_request)) + retval.extend(self._postprocess_http_request(builder, template_url)) + return retval + + def call_request_builder(self, builder: OperationType, is_paging: bool = False) -> List[str]: + return self._call_request_builder_helper(builder, builder.request_builder, is_paging=is_paging) + + def response_headers_and_deserialization( + self, + builder: OperationType, + response: Response, + ) -> List[str]: + return self.response_headers(response) + self.response_deserialization(builder, response) + + def response_headers(self, response: Response) -> List[str]: + retval: List[str] = [ + ( + f"response_headers['{response_header.wire_name}']=self._deserialize(" + f"'{response_header.serialization_type}', response.headers.get('{response_header.wire_name}'))" + ) + for response_header in response.headers + ] + if response.headers: + retval.append("") + return retval + + def response_deserialization( + self, + builder: OperationType, + response: Response, + ) -> List[str]: + retval: List[str] = [] + deserialize_code: List[str] = [] + stream_logic = True + if builder.has_stream_response: + if isinstance(response.type, ByteArraySchema): + deserialized = f"{'await ' if self.async_mode else ''}response.read()" + else: + stream_logic = False + if self.code_model.options["version_tolerant"]: + deserialized = "response.iter_bytes()" + else: + deserialized = ( + f"response.stream_download(self._client.{self.pipeline_name}, decompress=_decompress)" + ) + deserialize_code.append(f"deserialized = {deserialized}") + elif response.type: + pylint_disable = "" + if isinstance(response.type, ModelType) and response.type.internal: + pylint_disable = " # pylint: disable=protected-access" + if self.code_model.options["models_mode"] == "msrest": + deserialize_code.append("deserialized = self._deserialize(") + deserialize_code.append(f" '{response.serialization_type}',{pylint_disable}") + deserialize_code.append(" pipeline_response.http_response") + deserialize_code.append(")") + elif self.code_model.options["models_mode"] == "dpg": + if builder.has_stream_response: + deserialize_code.append("deserialized = response.content") + else: + format_filed = ( + f', format="{response.type.encode}"' + if isinstance(response.type, ByteArraySchema) + and response.default_content_type == "application/json" + else "" + ) + response_attr = "json" if json_serializable(str(response.default_content_type)) else "text" + deserialize_func = "_deserialize" + if xml_serializable(str(response.default_content_type)): + deserialize_func = "_deserialize_xml" + deserialize_code.append(f"deserialized = {deserialize_func}(") + deserialize_code.append( + f" {response.type.type_annotation(is_operation_file=True)},{pylint_disable}" + ) + deserialize_code.append(f" response.{response_attr}(){response.result_property}{format_filed}") + deserialize_code.append(")") + + else: + deserialized_value = "ET.fromstring(response.text())" if response.type.is_xml else "response.json()" + deserialize_code.append("if response.content:") + deserialize_code.append(f" deserialized = {deserialized_value}") + deserialize_code.append("else:") + deserialize_code.append(" deserialized = None") + if len(deserialize_code) > 0: + if builder.expose_stream_keyword and stream_logic: + retval.append("if _stream:") + retval.append(" deserialized = response.iter_bytes()") + retval.append("else:") + retval.extend([f" {dc}" for dc in deserialize_code]) + else: + retval.extend(deserialize_code) + return retval + + def handle_error_response(self, builder: OperationType) -> List[str]: + async_await = "await " if self.async_mode else "" + retval = [f"if response.status_code not in {str(builder.success_status_codes)}:"] + response_read = [ + " try:", + f" {async_await}response.read() # Load the body in memory and close the socket", + " except (StreamConsumedError, StreamClosedError):", + " pass", + ] + if builder.stream_value is True: # _stream is True so no need to judge it + retval.extend(response_read) + elif isinstance(builder.stream_value, str): # _stream is not sure, so we need to judge it + retval.append(" if _stream:") + retval.extend([f" {l}" for l in response_read]) + type_ignore = " # type: ignore" if _need_type_ignore(builder) else "" + retval.append( + f" map_error(status_code=response.status_code, response=response, error_map=error_map){type_ignore}" + ) + error_model = "" + if builder.default_error_deserialization and self.code_model.options["models_mode"]: + if self.code_model.options["models_mode"] == "dpg": + retval.append(f" error = _deserialize({builder.default_error_deserialization}, response.json())") + else: + retval.append( + f" error = self._deserialize.failsafe_deserialize({builder.default_error_deserialization}, " + "pipeline_response)" + ) + error_model = ", model=error" + retval.append( + " raise HttpResponseError(response=response{}{})".format( + error_model, + (", error_format=ARMErrorFormat" if self.code_model.options["azure_arm"] else ""), + ) + ) + return retval + + def handle_response(self, builder: OperationType) -> List[str]: + retval: List[str] = ["response = pipeline_response.http_response"] + retval.append("") + retval.extend(self.handle_error_response(builder)) + retval.append("") + if builder.has_optional_return_type: + retval.append("deserialized = None") + if builder.any_response_has_headers: + retval.append("response_headers = {}") + if builder.has_response_body or builder.any_response_has_headers: # pylint: disable=too-many-nested-blocks + if len(builder.responses) > 1: + status_codes, res_headers, res_deserialization = [], [], [] + for status_code in builder.success_status_codes: + response = builder.get_response_from_status(status_code) + if response.headers or response.type: + status_codes.append(status_code) + res_headers.append(self.response_headers(response)) + res_deserialization.append(self.response_deserialization(builder, response)) + + is_headers_same = _all_same(res_headers) + is_deserialization_same = _all_same(res_deserialization) + if is_deserialization_same: + if is_headers_same: + retval.extend(res_headers[0]) + retval.extend(res_deserialization[0]) + retval.append("") + else: + for status_code, headers in zip(status_codes, res_headers): + if headers: + retval.append(f"if response.status_code == {status_code}:") + retval.extend([f" {line}" for line in headers]) + retval.append("") + retval.extend(res_deserialization[0]) + retval.append("") + else: + for status_code, headers, deserialization in zip(status_codes, res_headers, res_deserialization): + retval.append(f"if response.status_code == {status_code}:") + retval.extend([f" {line}" for line in headers]) + retval.extend([f" {line}" for line in deserialization]) + retval.append("") + else: + retval.extend(self.response_headers_and_deserialization(builder, builder.responses[0])) + retval.append("") + if builder.has_optional_return_type or self.code_model.options["models_mode"]: + deserialized = "deserialized" + else: + deserialized = f"cast({builder.response_type_annotation(async_mode=self.async_mode)}, deserialized)" + retval.append("if cls:") + retval.append( + " return cls(pipeline_response, {}, {}){}".format( + deserialized if builder.has_response_body else "None", + "response_headers" if builder.any_response_has_headers else "{}", + " # type: ignore", + ) + ) + if builder.has_response_body and any( + response.is_stream_response or response.type for response in builder.responses + ): + retval.append("") + retval.append(f"return {deserialized} # type: ignore") + if builder.request_builder.method == "HEAD" and self.code_model.options["head_as_boolean"]: + retval.append("return 200 <= response.status_code <= 299") + return retval + + def error_map(self, builder: OperationType) -> List[str]: + retval = ["error_map: MutableMapping = {"] + if builder.non_default_errors: + if not 401 in builder.non_default_error_status_codes: + retval.append(" 401: ClientAuthenticationError,") + if not 404 in builder.non_default_error_status_codes: + retval.append(" 404: ResourceNotFoundError,") + if not 409 in builder.non_default_error_status_codes: + retval.append(" 409: ResourceExistsError,") + if not 304 in builder.non_default_error_status_codes: + retval.append(" 304: ResourceNotModifiedError,") + for e in builder.non_default_errors: + error_model_str = "" + if isinstance(e.type, ModelType): + if self.code_model.options["models_mode"] == "msrest": + error_model_str = ( + f", model=self._deserialize(" f"_models.{e.type.serialization_type}, response)" + ) + elif self.code_model.options["models_mode"] == "dpg": + error_model_str = f", model=_deserialize(_models.{e.type.name}, response.json())" + error_format_str = ", error_format=ARMErrorFormat" if self.code_model.options["azure_arm"] else "" + for status_code in e.status_codes: + if status_code == 401: + retval.append( + " 401: cast(Type[HttpResponseError], " + "lambda response: ClientAuthenticationError(response=response" + f"{error_model_str}{error_format_str}))," + ) + elif status_code == 404: + retval.append( + " 404: cast(Type[HttpResponseError], " + "lambda response: ResourceNotFoundError(response=response" + f"{error_model_str}{error_format_str}))," + ) + elif status_code == 409: + retval.append( + " 409: cast(Type[HttpResponseError], " + "lambda response: ResourceExistsError(response=response" + f"{error_model_str}{error_format_str}))," + ) + elif status_code == 304: + retval.append( + " 304: cast(Type[HttpResponseError], " + "lambda response: ResourceNotModifiedError(response=response" + f"{error_model_str}{error_format_str}))," + ) + elif not error_model_str and not error_format_str: + retval.append(f" {status_code}: HttpResponseError,") + else: + retval.append( + f" {status_code}: cast(Type[HttpResponseError], " + "lambda response: HttpResponseError(response=response" + f"{error_model_str}{error_format_str}))," + ) + else: + retval.append( + " 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, " + "304: ResourceNotModifiedError" + ) + retval.append("}") + if builder.has_etag: + retval.extend( + [ + "if match_condition == MatchConditions.IfNotModified:", + " error_map[412] = ResourceModifiedError", + "elif match_condition == MatchConditions.IfPresent:", + " error_map[412] = ResourceNotFoundError", + "elif match_condition == MatchConditions.IfMissing:", + " error_map[412] = ResourceExistsError", + ] + ) + retval.append("error_map.update(kwargs.pop('error_map', {}) or {})") + return retval + + @property + def _call_method(self) -> str: + return "await " if self.async_mode else "" + + +class OperationSerializer(_OperationSerializer[Operation]): ... + + +############################## PAGING OPERATIONS ############################## + +PagingOperationType = TypeVar("PagingOperationType", bound=Union[PagingOperation, LROPagingOperation]) + + +class _PagingOperationSerializer(_OperationSerializer[PagingOperationType]): + def __init__(self, code_model: CodeModel, async_mode: bool) -> None: + # for pylint reasons need to redefine init + # probably because inheritance is going too deep + super().__init__(code_model, async_mode) + self.code_model = code_model + self.async_mode = async_mode + self.parameter_serializer = ParameterSerializer() + + def serialize_path(self, builder: PagingOperationType) -> List[str]: + return self.parameter_serializer.serialize_path(builder.parameters.path, self.serializer_name) + + def decorators(self, builder: PagingOperationType) -> List[str]: + """Decorators for the method""" + retval: List[str] = [] + if builder.is_overload: + return ["@overload"] + if self.code_model.options["tracing"] and builder.want_tracing: + retval.append("@distributed_trace") + if self._api_version_validation(builder): + retval.append(self._api_version_validation(builder)) + return retval + + def call_next_link_request_builder(self, builder: PagingOperationType) -> List[str]: + if builder.next_request_builder: + request_builder = builder.next_request_builder + template_url = None + else: + request_builder = builder.request_builder + template_url = "next_link" + + request_builder = builder.next_request_builder or builder.request_builder + if builder.next_request_builder: + return self._call_request_builder_helper( + builder, + request_builder, + template_url=template_url, + is_next_request=True, + ) + retval: List[str] = [] + query_str = "" + next_link_str = "next_link" + try: + api_version_param = next( + p for p in builder.client.parameters if p.is_api_version and p.location == ParameterLocation.QUERY + ) + retval.append("# make call to next link with the client's api-version") + retval.append("_parsed_next_link = urllib.parse.urlparse(next_link)") + retval.extend( + [ + "_next_request_params = case_insensitive_dict({", + " key: [urllib.parse.quote(v) for v in value]" + " for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()" + "})", + ] + ) + api_version = ( + "self._api_version" + if self.code_model.options["multiapi"] and builder.group_name + else api_version_param.full_client_name + ) + retval.append(f'_next_request_params["api-version"] = {api_version}') + query_str = ", params=_next_request_params" + next_link_str = "urllib.parse.urljoin(next_link, _parsed_next_link.path)" + except StopIteration: + pass + + retval.append(f'_request = HttpRequest("GET", {next_link_str}{query_str})') + retval.extend(self._postprocess_http_request(builder, "_request.url")) + + return retval + + def _prepare_request_callback(self, builder: PagingOperationType) -> List[str]: + retval = self._initialize_overloads(builder) + retval.append("def prepare_request(next_link=None):") + retval.append(" if not next_link:") + retval.extend([f" {line}" for line in self.call_request_builder(builder, is_paging=True)]) + retval.append("") + retval.append(" else:") + retval.extend([f" {line}" for line in self.call_next_link_request_builder(builder)]) + if not builder.next_request_builder and self.code_model.is_legacy: + retval.append(' _request.method = "GET"') + else: + retval.append("") + retval.append(" return _request") + return retval + + @property + def _function_def(self) -> str: + return "def" + + def _extract_data_callback(self, builder: PagingOperationType) -> List[str]: + retval = [f"{'async ' if self.async_mode else ''}def extract_data(pipeline_response):"] + response = builder.responses[0] + deserialized = "pipeline_response.http_response.json()" + if self.code_model.options["models_mode"] == "msrest": + suffix = ".http_response" if hasattr(builder, "initial_operation") else "" + deserialize_type = response.serialization_type + pylint_disable = " # pylint: disable=protected-access" + if isinstance(response.type, ModelType) and not response.type.internal: + deserialize_type = f'"{response.serialization_type}"' + pylint_disable = "" + deserialized = ( + f"self._deserialize(\n {deserialize_type},{pylint_disable}\n pipeline_response{suffix}\n)" + ) + retval.append(f" deserialized = {deserialized}") + elif self.code_model.options["models_mode"] == "dpg": + # we don't want to generate paging models for DPG + retval.append(f" deserialized = {deserialized}") + else: + retval.append(f" deserialized = {deserialized}") + item_name = builder.item_name + access = f".{item_name}" if self.code_model.options["models_mode"] == "msrest" else f'["{item_name}"]' + list_of_elem_deserialized = "" + if self.code_model.options["models_mode"] == "dpg": + item_type = builder.item_type.type_annotation(is_operation_file=True) + list_of_elem_deserialized = f"_deserialize({item_type}, deserialized{access})" + else: + list_of_elem_deserialized = f"deserialized{access}" + retval.append(f" list_of_elem = {list_of_elem_deserialized}") + retval.append(" if cls:") + retval.append(" list_of_elem = cls(list_of_elem) # type: ignore") + + continuation_token_name = builder.continuation_token_name + if not continuation_token_name: + cont_token_property = "None" + elif self.code_model.options["models_mode"] == "msrest": + cont_token_property = f"deserialized.{continuation_token_name} or None" + else: + cont_token_property = f'deserialized.get("{continuation_token_name}") or None' + list_type = "AsyncList" if self.async_mode else "iter" + retval.append(f" return {cont_token_property}, {list_type}(list_of_elem)") + return retval + + def _get_next_callback(self, builder: PagingOperationType) -> List[str]: + retval = [f"{'async ' if self.async_mode else ''}def get_next(next_link=None):"] + retval.append(" _request = prepare_request(next_link)") + retval.append("") + retval.extend([f" {l}" for l in self.make_pipeline_call(builder)]) + retval.append(" response = pipeline_response.http_response") + retval.append("") + retval.extend([f" {line}" for line in self.handle_error_response(builder)]) + retval.append("") + retval.append(" return pipeline_response") + return retval + + def set_up_params_for_pager(self, builder: PagingOperationType) -> List[str]: + retval = [] + retval.extend(self.error_map(builder)) + retval.extend(self._prepare_request_callback(builder)) + retval.append("") + retval.extend(self._extract_data_callback(builder)) + retval.append("") + retval.extend(self._get_next_callback(builder)) + return retval + + +class PagingOperationSerializer(_PagingOperationSerializer[PagingOperation]): ... + + +############################## LRO OPERATIONS ############################## + +LROOperationType = TypeVar("LROOperationType", bound=Union[LROOperation, LROPagingOperation]) + + +class _LROOperationSerializer(_OperationSerializer[LROOperationType]): + def __init__(self, code_model: CodeModel, async_mode: bool) -> None: + # for pylint reasons need to redefine init + # probably because inheritance is going too deep + super().__init__(code_model, async_mode) + self.code_model = code_model + self.async_mode = async_mode + self.parameter_serializer = ParameterSerializer() + + def serialize_path(self, builder: LROOperationType) -> List[str]: + return self.parameter_serializer.serialize_path(builder.parameters.path, self.serializer_name) + + def initial_call(self, builder: LROOperationType) -> List[str]: + retval = [ + f"polling: Union[bool, {builder.get_base_polling_method(self.async_mode)}] = kwargs.pop('polling', True)", + ] + retval.append("lro_delay = kwargs.pop(") + retval.append(" 'polling_interval',") + retval.append(" self._config.polling_interval") + retval.append(")") + retval.append("cont_token: Optional[str] = kwargs.pop('continuation_token', None)") + retval.append("if cont_token is None:") + retval.append( + f" raw_result = {self._call_method}self.{builder.initial_operation.name}(" + f"{'' if any(rsp.type for rsp in builder.initial_operation.responses) else ' # type: ignore'}" + ) + retval.extend( + [f" {parameter.client_name}={parameter.client_name}," for parameter in builder.parameters.method] + ) + retval.append(" cls=lambda x,y,z: x,") + retval.append(" headers=_headers,") + retval.append(" params=_params,") + retval.append(" **kwargs") + retval.append(" )") + retval.append(f" {'await ' if self.async_mode else ''}raw_result.http_response.read() # type: ignore") + + retval.append("kwargs.pop('error_map', None)") + return retval + + def return_lro_poller(self, builder: LROOperationType) -> List[str]: + retval = [] + lro_options_str = ( + "lro_options={'final-state-via': '" + builder.lro_options["final-state-via"] + "'}," + if builder.lro_options + else "" + ) + path_format_arguments_str = "" + if builder.parameters.path: + path_format_arguments_str = "path_format_arguments=path_format_arguments," + retval.extend(self.serialize_path(builder)) + retval.append("") + retval.extend( + [ + "if polling is True:", + f" polling_method: {builder.get_base_polling_method(self.async_mode)} " + + f"= cast({builder.get_base_polling_method(self.async_mode)}, " + f"{builder.get_polling_method(self.async_mode)}(", + " lro_delay,", + f" {lro_options_str}", + f" {path_format_arguments_str}", + " **kwargs", + "))", + ] + ) + retval.append( + f"elif polling is False: polling_method = cast({builder.get_base_polling_method(self.async_mode)}, " + f"{builder.get_no_polling_method(self.async_mode)}())" + ) + retval.append("else: polling_method = polling") + retval.append("if cont_token:") + retval.append(f" return {builder.get_poller_with_response_type(self.async_mode)}.from_continuation_token(") + retval.append(" polling_method=polling_method,") + retval.append(" continuation_token=cont_token,") + retval.append(" client=self._client,") + retval.append(" deserialization_callback=get_long_running_output") + retval.append(" )") + retval.append(f"return {builder.get_poller_with_response_type(self.async_mode)}(") + retval.append(" self._client, raw_result, get_long_running_output, polling_method # type: ignore") + retval.append(" )") + return retval + + def get_long_running_output(self, builder: LROOperationType) -> List[str]: + pylint_disable = "" + if not builder.lro_response: + pylint_disable = " # pylint: disable=inconsistent-return-statements" + retval = [f"def get_long_running_output(pipeline_response):{pylint_disable}"] + if builder.lro_response: + if builder.lro_response.headers: + retval.append(" response_headers = {}") + if ( + not self.code_model.options["models_mode"] + or self.code_model.options["models_mode"] == "dpg" + or builder.lro_response.headers + ): + retval.append(" response = pipeline_response.http_response") + retval.extend( + [f" {line}" for line in self.response_headers_and_deserialization(builder, builder.lro_response)] + ) + retval.append(" if cls:") + retval.append( + " return cls(pipeline_response, {}, {}){}".format( + ("deserialized" if builder.lro_response and builder.lro_response.type else "None"), + ("response_headers" if builder.lro_response and builder.lro_response.headers else "{}"), + " # type: ignore", + ) + ) + if builder.lro_response and builder.lro_response.type: + retval.append(" return deserialized") + return retval + + +class LROOperationSerializer(_LROOperationSerializer[LROOperation]): ... + + +############################## LRO PAGING OPERATIONS ############################## + + +class LROPagingOperationSerializer( + _LROOperationSerializer[LROPagingOperation], + _PagingOperationSerializer[LROPagingOperation], +): + @property + def _call_method(self) -> str: + return "await " if self.async_mode else "" + + @property + def _function_def(self) -> str: + return "async def" if self.async_mode else "def" + + def get_long_running_output(self, builder: LROPagingOperation) -> List[str]: + retval = ["def get_long_running_output(pipeline_response):"] + retval.append(f" {self._function_def} internal_get_next(next_link=None):") + retval.append(" if next_link is None:") + retval.append(" return pipeline_response") + retval.append(f" return {self._call_method}get_next(next_link)") + retval.append("") + retval.append(f" return {builder.get_pager(self.async_mode)}(") + retval.append(" internal_get_next, extract_data") + retval.append(" )") + return retval + + def decorators(self, builder: LROPagingOperation) -> List[str]: + """Decorators for the method""" + return _LROOperationSerializer.decorators(self, builder) # type: ignore + + +def get_operation_serializer( + builder: Operation, + code_model, + async_mode: bool, +) -> Union[ + OperationSerializer, + PagingOperationSerializer, + LROOperationSerializer, + LROPagingOperationSerializer, +]: + ret_cls: Union[ + Type[OperationSerializer], + Type[PagingOperationSerializer], + Type[LROOperationSerializer], + Type[LROPagingOperationSerializer], + ] = OperationSerializer + if builder.operation_type == "lropaging": + ret_cls = LROPagingOperationSerializer + elif builder.operation_type == "lro": + ret_cls = LROOperationSerializer + elif builder.operation_type == "paging": + ret_cls = PagingOperationSerializer + return ret_cls(code_model, async_mode) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/client_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/client_serializer.py new file mode 100644 index 0000000000..b99f471fbd --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/client_serializer.py @@ -0,0 +1,294 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List + +from . import utils +from ..models import Client, ParameterMethodLocation +from .parameter_serializer import ParameterSerializer, PopKwargType +from ...utils import build_policies + + +class ClientSerializer: + def __init__(self, client: Client) -> None: + self.client = client + self.parameter_serializer = ParameterSerializer() + + def _init_signature(self, async_mode: bool) -> str: + pylint_disable = "" + if not self.client.parameters.credential: + pylint_disable = " # pylint: disable=missing-client-constructor-parameter-credential" + return self.parameter_serializer.serialize_method( + function_def="def", + method_name="__init__", + need_self_param=True, + method_param_signatures=self.client.parameters.method_signature(async_mode), + pylint_disable=pylint_disable, + ) + + def init_signature_and_response_type_annotation(self, async_mode: bool) -> str: + init_signature = self._init_signature(async_mode) + return utils.method_signature_and_response_type_annotation_template( + method_signature=init_signature, + response_type_annotation="None", + ) + + def pop_kwargs_from_signature(self) -> List[str]: + return self.parameter_serializer.pop_kwargs_from_signature( + self.client.parameters.kwargs_to_pop, + check_kwarg_dict=False, + pop_headers_kwarg=PopKwargType.NO, + pop_params_kwarg=PopKwargType.NO, + ) + + def class_definition(self) -> str: + class_name = self.client.name + base_class = "" + if self.client.has_mixin: + base_class = f"{class_name}OperationsMixin" + pylint_disable = self.client.pylint_disable() + if base_class: + return f"class {class_name}({base_class}):{pylint_disable}" + return f"class {class_name}:{pylint_disable}" + + def property_descriptions(self, async_mode: bool) -> List[str]: + retval: List[str] = [] + operations_folder = ".aio.operations." if async_mode else ".operations." + for og in [og for og in self.client.operation_groups if not og.is_mixin]: + retval.append(f":ivar {og.property_name}: {og.class_name} operations") + property_type = f"{self.client.code_model.namespace}{operations_folder}{og.class_name}" + retval.append(f":vartype {og.property_name}: {property_type}") + for param in self.client.parameters.method: + retval.append(f":{param.description_keyword} {param.client_name}: {param.description}") + retval.append( + f":{param.docstring_type_keyword} {param.client_name}: {param.docstring_type(async_mode=async_mode)}" + ) + if self.client.has_public_lro_operations: + retval.append( + ":keyword int polling_interval: Default waiting time between two polls for LRO operations " + "if no Retry-After header is present." + ) + retval = [s.replace("\\", "\\\\") for s in retval] + retval.append('"""') + return retval + + def initialize_config(self) -> str: + config_name = f"{self.client.name}Configuration" + config_call = ", ".join( + [ + f"{p.client_name}={p.client_name}" + for p in self.client.config.parameters.method + if p.method_location != ParameterMethodLocation.KWARG + ] + + ["**kwargs"] + ) + return f"self._config = {config_name}({config_call})" + + @property + def host_variable_name(self) -> str: + try: + return next(p for p in self.client.parameters if p.is_host).client_name + except StopIteration: + return "_endpoint" + + @property + def should_init_super(self) -> bool: + return any(og for og in self.client.operation_groups if og.is_mixin and og.has_abstract_operations) + + def initialize_pipeline_client(self, async_mode: bool) -> List[str]: + result = [] + pipeline_client_name = self.client.pipeline_class(async_mode) + endpoint_name = "base_url" if self.client.code_model.is_azure_flavor else "endpoint" + params = { + endpoint_name: self.host_variable_name, + "policies": "_policies", + } + if not self.client.code_model.is_legacy and self.client.request_id_header_name: + result.append(f'kwargs["request_id_header_name"] = "{self.client.request_id_header_name}"') + policies = build_policies( + self.client.code_model.options["azure_arm"], + async_mode, + is_azure_flavor=self.client.code_model.is_azure_flavor, + tracing=self.client.code_model.options["tracing"], + ) + result.extend( + [ + "_policies = kwargs.pop('policies', None)", + "if _policies is None:", + f' _policies = [{",".join(policies)}]', + f"self._client: {pipeline_client_name} = {pipeline_client_name}(" + f"{', '.join(f'{k}={v}' for k, v in params.items())}, **kwargs)", + ] + ) + return result + + def serializers_and_operation_groups_properties(self) -> List[str]: + retval = [] + + def _get_client_models_value(models_dict_name: str) -> str: + if self.client.code_model.model_types: + return f"{{k: v for k, v in {models_dict_name}.__dict__.items() if isinstance(v, type)}}" + return "{}" + + is_msrest_model = self.client.code_model.options["models_mode"] == "msrest" + if is_msrest_model: + add_private_models = len(self.client.code_model.model_types) != len( + self.client.code_model.public_model_types + ) + model_dict_name = f"_models.{self.client.code_model.models_filename}" if add_private_models else "_models" + retval.append( + f"client_models{': Dict[str, Any]' if not self.client.code_model.model_types else ''}" + f" = {_get_client_models_value(model_dict_name)}" + ) + if add_private_models and self.client.code_model.model_types: + update_dict = "{k: v for k, v in _models.__dict__.items() if isinstance(v, type)}" + retval.append(f"client_models.update({update_dict})") + client_models_str = "client_models" if is_msrest_model else "" + retval.append(f"self._serialize = Serializer({client_models_str})") + retval.append(f"self._deserialize = Deserializer({client_models_str})") + if not self.client.code_model.options["client_side_validation"]: + retval.append("self._serialize.client_side_validation = False") + operation_groups = [og for og in self.client.operation_groups if not og.is_mixin] + for og in operation_groups: + if og.code_model.options["multiapi"]: + api_version = f", '{og.api_versions[0]}'" if og.api_versions else ", None" + else: + api_version = "" + retval.extend( + [ + f"self.{og.property_name} = {og.class_name}(", + f" self._client, self._config, self._serialize, self._deserialize{api_version}", + ")", + ] + ) + return retval + + def _send_request_signature(self) -> str: + send_request_signature = [ + "request: HttpRequest, *, stream: bool = False," + ] + self.client.parameters.method_signature_kwargs + return self.parameter_serializer.serialize_method( + function_def="def", + method_name=self.client.send_request_name, + need_self_param=True, + method_param_signatures=send_request_signature, + ) + + def send_request_signature_and_response_type_annotation(self, async_mode: bool) -> str: + send_request_signature = self._send_request_signature() + return utils.method_signature_and_response_type_annotation_template( + method_signature=send_request_signature, + response_type_annotation=("Awaitable[AsyncHttpResponse]" if async_mode else "HttpResponse"), + ) + + def _example_make_call(self, async_mode: bool) -> List[str]: + http_response = "AsyncHttpResponse" if async_mode else "HttpResponse" + retval = [f">>> response = {'await ' if async_mode else ''}client.{self.client.send_request_name}(request)"] + retval.append(f"<{http_response}: 200 OK>") + return retval + + def _request_builder_example(self, async_mode: bool) -> List[str]: + retval = [ + "We have helper methods to create requests specific to this service in " + + f"`{self.client.code_model.namespace}.{self.client.code_model.rest_layer_name}`." + ] + retval.append("Use these helper methods to create the request you pass to this method.") + retval.append("") + + request_builder = self.client.request_builders[0] + request_builder_signature = ", ".join(request_builder.parameters.call) + if request_builder.group_name: + rest_imported = request_builder.group_name + request_builder_name = f"{request_builder.group_name}.{request_builder.name}" + else: + rest_imported = request_builder.name + request_builder_name = request_builder.name + full_path = f"{self.client.code_model.namespace}.{self.client.code_model.rest_layer_name}" + retval.append(f">>> from {full_path} import {rest_imported}") + retval.append(f">>> request = {request_builder_name}({request_builder_signature})") + retval.append(f"") + retval.extend(self._example_make_call(async_mode)) + return retval + + def _rest_request_example(self, async_mode: bool) -> List[str]: + retval = [f">>> from {self.client.code_model.core_library}.rest import HttpRequest"] + retval.append('>>> request = HttpRequest("GET", "https://www.example.org/")') + retval.append("") + retval.extend(self._example_make_call(async_mode)) + return retval + + def send_request_description(self, async_mode: bool) -> List[str]: + rest_library = f"{self.client.code_model.core_library}.rest" + retval = ['"""Runs the network request through the client\'s chained policies.'] + retval.append("") + if self.client.code_model.options["builders_visibility"] != "embedded": + retval.extend(self._request_builder_example(async_mode)) + else: + retval.extend(self._rest_request_example(async_mode)) + retval.append("") + retval.append("For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request") + retval.append("") + retval.append(":param request: The network request you want to make. Required.") + retval.append(f":type request: ~{rest_library}.HttpRequest") + retval.append(":keyword bool stream: Whether the response payload will be streamed. Defaults to False.") + retval.append(":return: The response of your network call. Does not do error handling on your response.") + http_response = "AsyncHttpResponse" if async_mode else "HttpResponse" + retval.append(f":rtype: ~{rest_library}.{http_response}") + retval.append('"""') + return retval + + def serialize_path(self) -> List[str]: + return self.parameter_serializer.serialize_path(self.client.parameters.path, "self._serialize") + + +class ConfigSerializer: + def __init__(self, client: Client) -> None: + self.client = client + self.parameter_serializer = ParameterSerializer() + + def _init_signature(self, async_mode: bool) -> str: + return self.parameter_serializer.serialize_method( + function_def="def", + method_name="__init__", + need_self_param=True, + method_param_signatures=self.client.config.parameters.method_signature(async_mode), + ) + + def init_signature_and_response_type_annotation(self, async_mode: bool) -> str: + init_signature = self._init_signature(async_mode) + return utils.method_signature_and_response_type_annotation_template( + method_signature=init_signature, + response_type_annotation="None", + ) + + def pop_kwargs_from_signature(self) -> List[str]: + return self.parameter_serializer.pop_kwargs_from_signature( + self.client.config.parameters.kwargs_to_pop, + check_kwarg_dict=False, + pop_headers_kwarg=PopKwargType.NO, + pop_params_kwarg=PopKwargType.NO, + ) + + def set_constants(self) -> List[str]: + return [ + f"self.{p.client_name} = {p.client_default_value_declaration}" + for p in self.client.config.parameters.constant + if p not in self.client.config.parameters.method + ] + + def check_required_parameters(self) -> List[str]: + return [ + f"if {p.client_name} is None:\n" f" raise ValueError(\"Parameter '{p.client_name}' must not be None.\")" + for p in self.client.config.parameters.method + if not (p.optional or p.constant) + ] + + def property_descriptions(self, async_mode: bool) -> List[str]: + retval: List[str] = [] + for p in self.client.config.parameters.method: + retval.append(f":{p.description_keyword} {p.client_name}: {p.description}") + retval.append(f":{p.docstring_type_keyword} {p.client_name}: {p.docstring_type(async_mode=async_mode)}") + retval.append('"""') + return retval diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/enum_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/enum_serializer.py new file mode 100644 index 0000000000..4b9ce87e3f --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/enum_serializer.py @@ -0,0 +1,15 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from .base_serializer import BaseSerializer +from ..models import FileImport + + +class EnumSerializer(BaseSerializer): + def serialize(self) -> str: + # Generate the enum file + template = self.env.get_template("enum_container.py.jinja2") + return template.render(code_model=self.code_model, file_import=FileImport(self.code_model)) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/general_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/general_serializer.py new file mode 100644 index 0000000000..2a2c1c24d6 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/general_serializer.py @@ -0,0 +1,213 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import json +from typing import Any, List +from jinja2 import Environment +from .import_serializer import FileImportSerializer, TypingSection +from ..models.imports import MsrestImportType, FileImport +from ..models import ( + ImportType, + CodeModel, + TokenCredentialType, + Client, +) +from .client_serializer import ClientSerializer, ConfigSerializer +from .base_serializer import BaseSerializer + + +class GeneralSerializer(BaseSerializer): + """General serializer for SDK root level files""" + + def __init__(self, code_model: CodeModel, env: Environment, async_mode: bool): + super().__init__(code_model, env) + self.async_mode = async_mode + + def serialize_setup_file(self) -> str: + template = self.env.get_template("packaging_templates/setup.py.jinja2") + params = {} + params.update(self.code_model.options) + return template.render(code_model=self.code_model, **params) + + def serialize_package_file(self, template_name: str, **kwargs: Any) -> str: + template = self.env.get_template(template_name) + package_parts = (self.code_model.options["package_name"] or "").split("-")[:-1] + token_credential = any( + c for c in self.code_model.clients if isinstance(getattr(c.credential, "type", None), TokenCredentialType) + ) + version = self.code_model.options["package_version"] + if any(x in version for x in ["a", "b", "rc"]) or version[0] == "0": + dev_status = "4 - Beta" + else: + dev_status = "5 - Production/Stable" + params = { + "code_model": self.code_model, + "dev_status": dev_status, + "token_credential": token_credential, + "pkgutil_names": [".".join(package_parts[: i + 1]) for i in range(len(package_parts))], + "init_names": ["/".join(package_parts[: i + 1]) + "/__init__.py" for i in range(len(package_parts))], + "client_name": self.code_model.clients[0].name, + "namespace": self.code_model.namespace, + } + params.update(self.code_model.options) + params.update(kwargs) + return template.render(file_import=FileImport(self.code_model), **params) + + def serialize_pkgutil_init_file(self) -> str: + template = self.env.get_template("pkgutil_init.py.jinja2") + return template.render() + + def serialize_init_file(self, clients: List[Client]) -> str: + template = self.env.get_template("init.py.jinja2") + return template.render( + code_model=self.code_model, + clients=clients, + async_mode=self.async_mode, + ) + + def serialize_service_client_file(self, clients: List[Client]) -> str: + template = self.env.get_template("client_container.py.jinja2") + + imports = FileImport(self.code_model) + for client in clients: + imports.merge(client.imports(self.async_mode)) + + return template.render( + code_model=self.code_model, + clients=clients, + async_mode=self.async_mode, + get_serializer=ClientSerializer, + imports=FileImportSerializer(imports), + ) + + def serialize_vendor_file(self, clients: List[Client]) -> str: + template = self.env.get_template("vendor.py.jinja2") + + # configure imports + file_import = FileImport(self.code_model) + if self.code_model.need_mixin_abc: + file_import.add_submodule_import( + "abc", + "ABC", + ImportType.STDLIB, + ) + file_import.add_submodule_import( + "" if self.code_model.is_azure_flavor else "runtime", + f"{'Async' if self.async_mode else ''}PipelineClient", + ImportType.SDKCORE, + TypingSection.TYPING, + ) + file_import.add_msrest_import( + relative_path=".." if self.async_mode else ".", + msrest_import_type=MsrestImportType.SerializerDeserializer, + typing_section=TypingSection.TYPING, + ) + for client in clients: + if client.has_mixin: + file_import.add_submodule_import( + "._configuration", + f"{client.name}Configuration", + ImportType.LOCAL, + ) + if self.code_model.has_etag: + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB) + file_import.add_submodule_import( + "", + "MatchConditions", + ImportType.SDKCORE, + ) + if self.code_model.has_form_data and self.code_model.options["models_mode"] == "dpg" and not self.async_mode: + file_import.add_submodule_import("typing", "IO", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Tuple", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Union", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Optional", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Mapping", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB) + file_import.add_submodule_import("typing", "List", ImportType.STDLIB) + file_import.add_submodule_import( + "._model_base", + "SdkJSONEncoder", + ImportType.LOCAL, + ) + file_import.add_submodule_import( + "._model_base", + "Model", + ImportType.LOCAL, + ) + file_import.add_import("json", ImportType.STDLIB) + + return template.render( + code_model=self.code_model, + imports=FileImportSerializer( + file_import, + ), + async_mode=self.async_mode, + clients=clients, + ) + + def serialize_config_file(self, clients: List[Client]) -> str: + template = self.env.get_template("config_container.py.jinja2") + imports = FileImport(self.code_model) + for client in self.code_model.clients: + imports.merge(client.config.imports(self.async_mode)) + return template.render( + code_model=self.code_model, + async_mode=self.async_mode, + imports=FileImportSerializer(imports), + get_serializer=ConfigSerializer, + clients=clients, + ) + + def serialize_version_file(self) -> str: + template = self.env.get_template("version.py.jinja2") + return template.render(code_model=self.code_model) + + def serialize_serialization_file(self) -> str: + template = self.env.get_template("serialization.py.jinja2") + return template.render( + code_model=self.code_model, + ) + + def serialize_model_base_file(self) -> str: + template = self.env.get_template("model_base.py.jinja2") + return template.render(code_model=self.code_model, file_import=FileImport(self.code_model)) + + def serialize_validation_file(self) -> str: + template = self.env.get_template("validation.py.jinja2") + return template.render(code_model=self.code_model) + + def serialize_cross_language_definition_file(self) -> str: + cross_langauge_def_dict = { + f"{self.code_model.namespace}.models.{model.name}": model.cross_language_definition_id + for model in self.code_model.public_model_types + } + cross_langauge_def_dict.update( + { + f"{self.code_model.namespace}.models.{enum.name}": enum.cross_language_definition_id + for enum in self.code_model.enums + if not enum.internal + } + ) + cross_langauge_def_dict.update( + { + ( + f"{self.code_model.namespace}.{client.name}." + + ("" if operation_group.is_mixin else f"{operation_group.property_name}.") + + f"{operation.name}" + ): operation.cross_language_definition_id + for client in self.code_model.clients + for operation_group in client.operation_groups + for operation in operation_group.operations + if not operation.name.startswith("_") + } + ) + return json.dumps( + { + "CrossLanguagePackageId": self.code_model.cross_language_package_id, + "CrossLanguageDefinitionId": cross_langauge_def_dict, + }, + indent=4, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/import_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/import_serializer.py new file mode 100644 index 0000000000..9106897668 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/import_serializer.py @@ -0,0 +1,126 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from copy import deepcopy +from typing import List +from ..models.imports import ( + ImportType, + FileImport, + ImportModel, + TypingSection, + TypeDefinition, +) + + +def _serialize_package(imports: List[ImportModel], delimiter: str) -> str: + buffer = [] + if any(i for i in imports if i.submodule_name is None): + buffer.append(f"import {imports[0].module_name}{f' as {imports[0].alias}' if imports[0].alias else ''}") + else: + import_str = ", ".join( + sorted( + set( + f"{i.submodule_name} as {i.alias}" if i.alias else i.submodule_name for i in imports # type: ignore + ) + ) + ) + buffer.append(f"from {imports[0].module_name} import {import_str}") + return delimiter.join(buffer) + + +def _serialize_versioned_package(i: ImportModel, delimiter: str) -> str: + if not i.version_modules: + return "" + buffer = [] + for n, (version, module_name, comment) in enumerate(i.version_modules): + buffer.append("{} sys.version_info >= {}:".format("if" if n == 0 else "elif", version)) + buffer.append( + f" from {module_name} import {i.submodule_name}{f' as {i.alias}' if i.alias else ''}" + f"{f' # {comment}' if comment else ''}" + ) + buffer.append("else:") + buffer.append( + f" from {i.module_name} import {i.submodule_name}{f' as {i.alias}' if i.alias else ''}" " # type: ignore" + ) + return delimiter.join(buffer) + + +def _serialize_import_type(imports: List[ImportModel], delimiter: str) -> str: + """Serialize a given import type.""" + import_list = [] + for module_name in sorted(set(i.module_name for i in imports)): + normal_imports = [i for i in imports if i.module_name == module_name and not i.version_modules] + versioned_imports = [i for i in imports if i.module_name == module_name and i.version_modules] + if normal_imports: + import_list.append(_serialize_package(normal_imports, delimiter)) + for i in versioned_imports: + import_list.append(_serialize_versioned_package(i, delimiter)) + return delimiter.join(import_list) + + +def _get_import_clauses(imports: List[ImportModel], delimiter: str) -> List[str]: + import_clause = [] + for import_type in ImportType: + imports_with_import_type = [i for i in imports if i.import_type == import_type] + if imports_with_import_type: + import_clause.append(_serialize_import_type(imports_with_import_type, delimiter)) + return import_clause + + +class FileImportSerializer: + def __init__(self, file_import: FileImport, async_mode: bool = False) -> None: + self.file_import = file_import + self.async_mode = async_mode + + def _get_imports_list(self, baseline_typing_section: TypingSection, add_conditional_typing: bool): + # If this is a python 3 file, our regular imports include the CONDITIONAL category + # If this is not a python 3 file, our typing imports include the CONDITIONAL category + file_import_copy = deepcopy(self.file_import) + if add_conditional_typing and any(self.file_import.get_imports_from_section(TypingSection.CONDITIONAL)): + # we switch the TypingSection key for the CONDITIONAL typing imports so we can merge + # the imports together + for i in file_import_copy.imports: + if i.typing_section == TypingSection.CONDITIONAL: + i.typing_section = baseline_typing_section + return file_import_copy.get_imports_from_section(baseline_typing_section) + + def _add_type_checking_import(self): + if any(self.file_import.get_imports_from_section(TypingSection.TYPING)): + self.file_import.add_submodule_import("typing", "TYPE_CHECKING", ImportType.STDLIB) + + def get_typing_definitions(self) -> str: + def declare_definition(type_name: str, type_definition: TypeDefinition) -> List[str]: + ret: List[str] = [] + definition_value = type_definition.async_definition if self.async_mode else type_definition.sync_definition + ret.append("{} = {}".format(type_name, definition_value)) + return ret + + if not self.file_import.type_definitions: + return "" + declarations: List[str] = [""] + for type_name, value in self.file_import.type_definitions.items(): + declarations.extend(declare_definition(type_name, value)) + return "\n".join(declarations) + + def __str__(self) -> str: + self._add_type_checking_import() + regular_imports = "" + regular_imports_list = self._get_imports_list( + baseline_typing_section=TypingSection.REGULAR, + add_conditional_typing=True, + ) + + if regular_imports_list: + regular_imports = "\n\n".join(_get_import_clauses(regular_imports_list, "\n")) + + typing_imports = "" + typing_imports_list = self._get_imports_list( + baseline_typing_section=TypingSection.TYPING, + add_conditional_typing=False, + ) + if typing_imports_list: + typing_imports += "\n\nif TYPE_CHECKING:\n " + typing_imports += "\n\n ".join(_get_import_clauses(typing_imports_list, "\n ")) + return regular_imports + typing_imports + self.get_typing_definitions() diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/metadata_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/metadata_serializer.py new file mode 100644 index 0000000000..35d374983f --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/metadata_serializer.py @@ -0,0 +1,198 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools +import json +from typing import List, Optional, Set, Tuple, Dict, Union, Any +from jinja2 import Environment +from ..models import ( + OperationGroup, + LROOperation, + PagingOperation, + TypingSection, + ImportType, + CodeModel, +) +from .builder_serializer import get_operation_serializer +from .import_serializer import FileImportSerializer + + +def _to_string(data: Union[Tuple[Any], List[Any], str]) -> str: + if isinstance(data, (list, tuple)): + return "".join([_to_string(item) for item in data]) + return str(data) + + +def _json_serialize_imports( + imports: Dict[ + TypingSection, + Dict[ + ImportType, + Dict[ + str, + Set[ + Optional[ + Union[ + str, + Tuple[str, str], + Tuple[ + str, + Optional[str], + Tuple[Tuple[Tuple[int, int], str, Optional[str]]], + ], + ] + ] + ], + ], + ], + ] +) -> str: + if not imports: + return "" + + json_serialize_imports = {} + # need to make name_import set -> list to make the dictionary json serializable + # not using an OrderedDict since we're iterating through a set and the order there varies + # going to sort the list instead + + for typing_section_key, typing_section_value in imports.items(): + json_import_type_dictionary = {} + for import_type_key, import_type_value in typing_section_value.items(): + json_package_name_dictionary = {} + for package_name, name_imports in import_type_value.items(): + name_import_ordered_list = [] + if name_imports: + name_import_ordered_list = list(name_imports) + name_import_ordered_list.sort( + key=lambda e: ( + _to_string(e) # type: ignore + if isinstance(e, (list, tuple)) + else e if isinstance(e, str) else "" + ) + ) + json_package_name_dictionary[package_name] = name_import_ordered_list + json_import_type_dictionary[import_type_key] = json_package_name_dictionary + json_serialize_imports[typing_section_key] = json_import_type_dictionary + return json.dumps(json_serialize_imports) + + +def _mixin_imports( + mixin_operation_group: Optional[OperationGroup], +) -> Tuple[Optional[str], Optional[str]]: + if not mixin_operation_group: + return None, None + + sync_mixin_imports = mixin_operation_group.imports_for_multiapi(async_mode=False) + async_mixin_imports = mixin_operation_group.imports_for_multiapi(async_mode=True) + + return _json_serialize_imports(sync_mixin_imports.to_dict()), _json_serialize_imports(async_mixin_imports.to_dict()) + + +def _mixin_typing_definitions( + mixin_operation_group: Optional[OperationGroup], +) -> Tuple[Optional[str], Optional[str]]: + if not mixin_operation_group: + return None, None + + sync_mixin_imports = mixin_operation_group.imports_for_multiapi(async_mode=False) + async_mixin_imports = mixin_operation_group.imports_for_multiapi(async_mode=True) + sync_mixin_typing_definitions = FileImportSerializer(sync_mixin_imports, False).get_typing_definitions() + async_mixin_typing_definitions = FileImportSerializer(async_mixin_imports, True).get_typing_definitions() + + return sync_mixin_typing_definitions, async_mixin_typing_definitions + + +class MetadataSerializer: + def __init__(self, code_model: CodeModel, env: Environment) -> None: + self.code_model = code_model + self.client = self.code_model.clients[0] # we only do one client for multiapi + self.env = env + + def _choose_api_version(self) -> Tuple[str, List[str]]: + chosen_version = "" + total_api_version_set: Set[str] = set() + for client in self.code_model.clients: + for operation_group in client.operation_groups: + total_api_version_set.update(operation_group.api_versions) + + total_api_version_list = list(total_api_version_set) + total_api_version_list.sort() + + # switching ' to " so json can decode the dict we end up writing to file + total_api_version_list = [str(api_version).replace("'", '"') for api_version in total_api_version_list] + if len(total_api_version_list) == 1: + chosen_version = total_api_version_list[0] + elif len(total_api_version_list) > 1: + module_version = self.code_model.namespace.split(".")[-1] + for api_version in total_api_version_list: + if "v{}".format(api_version.replace("-", "_")) == module_version: + chosen_version = api_version + + return chosen_version, total_api_version_list + + def serialize(self) -> str: + def _is_lro(operation): + return isinstance(operation, LROOperation) + + def _is_paging(operation): + return isinstance(operation, PagingOperation) + + mixin_operation_group: Optional[OperationGroup] = next( + ( + operation_group + for client in self.code_model.clients + for operation_group in client.operation_groups + if operation_group.is_mixin + ), + None, + ) + mixin_operations = mixin_operation_group.operations if mixin_operation_group else [] + sync_mixin_imports, async_mixin_imports = _mixin_imports(mixin_operation_group) + ( + sync_mixin_typing_definitions, + async_mixin_typing_definitions, + ) = _mixin_typing_definitions(mixin_operation_group) + + chosen_version, total_api_version_list = self._choose_api_version() + + # setting to true, because for multiapi we always generate with a version file with version 0.1.0 + self.code_model.options["package_version"] = "0.1.0" + template = self.env.get_template("metadata.json.jinja2") + + return template.render( + code_model=self.code_model, + chosen_version=chosen_version, + total_api_version_list=total_api_version_list, + client=self.client, + global_parameters=self.client.parameters, + mixin_operations=mixin_operations, + any=any, + is_lro=_is_lro, + is_paging=_is_paging, + str=str, + sync_mixin_imports=sync_mixin_imports, + async_mixin_imports=async_mixin_imports, + sync_mixin_typing_definitions=sync_mixin_typing_definitions, + async_mixin_typing_definitions=async_mixin_typing_definitions, + sync_client_imports=_json_serialize_imports(self.client.imports_for_multiapi(async_mode=False).to_dict()), + async_client_imports=_json_serialize_imports(self.client.imports_for_multiapi(async_mode=True).to_dict()), + sync_config_imports=_json_serialize_imports( + self.client.config.imports_for_multiapi(async_mode=False).to_dict() + ), + async_config_imports=_json_serialize_imports( + self.client.config.imports_for_multiapi(async_mode=True).to_dict() + ), + get_async_operation_serializer=functools.partial( + get_operation_serializer, + code_model=self.client.code_model, + async_mode=True, + ), + get_sync_operation_serializer=functools.partial( + get_operation_serializer, + code_model=self.client.code_model, + async_mode=False, + ), + has_credential=bool(self.client.credential), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/model_init_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/model_init_serializer.py new file mode 100644 index 0000000000..5df688adbb --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/model_init_serializer.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from jinja2 import Environment +from ..models import CodeModel + + +class ModelInitSerializer: + def __init__(self, code_model: CodeModel, env: Environment) -> None: + self.code_model = code_model + self.env = env + + def serialize(self) -> str: + schemas = [s.name for s in self.code_model.public_model_types] + schemas.sort() + enums = [e.name for e in self.code_model.enums if not e.internal] if self.code_model.enums else None + + if enums: + enums.sort() + + # check to see if we have any duplicate names between enum and object schemas + model_enum_name_intersection = set(schemas).intersection(set(enums)) + if model_enum_name_intersection: + raise ValueError( + "We have models and enums sharing the following names: {}".format( + ", ".join(model_enum_name_intersection) + ) + ) + + template = self.env.get_template("model_init.py.jinja2") + return template.render(code_model=self.code_model, schemas=schemas, enums=enums) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/model_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/model_serializer.py new file mode 100644 index 0000000000..af36e16a21 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/model_serializer.py @@ -0,0 +1,314 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List +from abc import ABC, abstractmethod + +from ..models import ModelType, Property, ConstantType, EnumValue +from ..models.imports import FileImport, TypingSection, MsrestImportType, ImportType +from .import_serializer import FileImportSerializer +from .base_serializer import BaseSerializer + + +def _documentation_string(prop: Property, description_keyword: str, docstring_type_keyword: str) -> List[str]: + retval: List[str] = [] + sphinx_prefix = f":{description_keyword} {prop.client_name}:" + description = prop.description(is_operation_file=False).replace("\\", "\\\\") + retval.append(f"{sphinx_prefix} {description}" if description else sphinx_prefix) + retval.append(f":{docstring_type_keyword} {prop.client_name}: {prop.type.docstring_type()}") + return retval + + +class _ModelSerializer(BaseSerializer, ABC): + @abstractmethod + def imports(self) -> FileImport: ... + + def serialize(self) -> str: + # Generate the models + template = self.env.get_template("model_container.py.jinja2") + return template.render( + code_model=self.code_model, + imports=FileImportSerializer(self.imports()), + str=str, + serializer=self, + ) + + @abstractmethod + def declare_model(self, model: ModelType) -> str: ... + + @staticmethod + def escape_dot(s: str): + return s.replace(".", "\\\\.") + + @staticmethod + def input_documentation_string(prop: Property) -> List[str]: + # building the param line of the property doc + return _documentation_string(prop, "keyword", "paramtype") + + @staticmethod + def variable_documentation_string(prop: Property) -> List[str]: + return _documentation_string(prop, "ivar", "vartype") + + def super_call(self, model: ModelType) -> List[str]: + return [f"super().__init__({self.properties_to_pass_to_super(model)})"] + + @staticmethod + def initialize_discriminator_property(model: ModelType, prop: Property) -> str: + discriminator_value = f"'{model.discriminator_value}'" if model.discriminator_value else None + if not discriminator_value: + typing = "Optional[str]" + else: + typing = "str" + return f"self.{prop.client_name}: {typing} = {discriminator_value}" + + @staticmethod + def initialize_standard_property(prop: Property): + if not (prop.optional or prop.client_default_value is not None): + return f"{prop.client_name}: {prop.type_annotation()},{prop.pylint_disable()}" + return ( + f"{prop.client_name}: {prop.type_annotation()} = " + f"{prop.client_default_value_declaration},{prop.pylint_disable()}" + ) + + @staticmethod + def discriminator_docstring(model: ModelType) -> str: + return ( + "You probably want to use the sub-classes and not this class directly. " + f"Known sub-classes are: {', '.join(v.name for v in model.discriminated_subtypes.values())}" + ) + + @staticmethod + def _init_line_parameters(model: ModelType): + return [p for p in model.properties if not p.readonly and not p.is_discriminator and not p.constant] + + def init_line(self, model: ModelType) -> List[str]: + init_properties_declaration = [] + init_line_parameters = self._init_line_parameters(model) + init_line_parameters.sort(key=lambda x: x.optional) + if init_line_parameters: + init_properties_declaration.append("*,") + for param in init_line_parameters: + init_properties_declaration.append(self.initialize_standard_property(param)) + + return init_properties_declaration + + @staticmethod + def properties_to_pass_to_super(model: ModelType) -> str: + properties_to_pass_to_super = [] + for parent in model.parents: + for prop in model.properties: + if prop in parent.properties and not prop.is_discriminator and not prop.constant and not prop.readonly: + properties_to_pass_to_super.append(f"{prop.client_name}={prop.client_name}") + properties_to_pass_to_super.append("**kwargs") + return ", ".join(properties_to_pass_to_super) + + @abstractmethod + def initialize_properties(self, model: ModelType) -> List[str]: ... + + def need_init(self, model: ModelType) -> bool: + return (not model.internal) and bool(self.init_line(model) or model.discriminator) + + def pylint_disable(self, model: ModelType) -> str: + if model.flattened_property or self.initialize_properties(model): + return "" + if any(p for p in model.properties if p.is_discriminator and model.discriminator_value): + return "" + if model.parents and any( + "=" in prop for parent in model.parents for prop in self.init_line(parent) if self.need_init(parent) + ): + return "" + return " # pylint: disable=useless-super-delegation" + + +class MsrestModelSerializer(_ModelSerializer): + def imports(self) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_msrest_import( + relative_path="..", + msrest_import_type=MsrestImportType.Module, + typing_section=TypingSection.REGULAR, + ) + for model in self.code_model.model_types: + file_import.merge(model.imports(is_operation_file=False)) + for param in self._init_line_parameters(model): + file_import.merge(param.imports()) + + return file_import + + def declare_model(self, model: ModelType) -> str: + basename = ( + "msrest.serialization.Model" + if self.code_model.options["client_side_validation"] + else "_serialization.Model" + ) + if model.parents: + basename = ", ".join([m.name for m in model.parents]) + return f"class {model.name}({basename}):{model.pylint_disable()}" + + @staticmethod + def get_properties_to_initialize(model: ModelType) -> List[Property]: + if model.parents: + properties_to_initialize = list( + { + p.client_name: p + for bm in model.parents + for p in model.properties + if p not in bm.properties or p.is_discriminator or p.constant + }.values() + ) + else: + properties_to_initialize = model.properties + return properties_to_initialize + + def initialize_properties(self, model: ModelType) -> List[str]: + init_args = [] + for prop in self.get_properties_to_initialize(model): + if prop.is_discriminator: + init_args.append(self.initialize_discriminator_property(model, prop)) + elif prop.readonly: + init_args.append(f"self.{prop.client_name} = None") + elif not prop.constant: + init_args.append(f"self.{prop.client_name} = {prop.client_name}") + return init_args + + @staticmethod + def declare_property(prop: Property) -> str: + if prop.flattened_names: + attribute_key = ".".join(_ModelSerializer.escape_dot(n) for n in prop.flattened_names) + else: + attribute_key = _ModelSerializer.escape_dot(prop.wire_name) + if prop.type.xml_serialization_ctxt: + xml_metadata = f", 'xml': {{{prop.type.xml_serialization_ctxt}}}" + else: + xml_metadata = "" + return ( + f'"{prop.client_name}": {{"key": "{attribute_key}",' + f' "type": "{prop.msrest_deserialization_key}"{xml_metadata}}},' + ) + + +class DpgModelSerializer(_ModelSerializer): + def super_call(self, model: ModelType) -> List[str]: + super_call = f"super().__init__({self.properties_to_pass_to_super(model)})" + if model.flattened_property: + return [ + "_flattened_input = {k: kwargs.pop(k) for k in kwargs.keys() & self.__flattened_items}", + super_call, + "for k, v in _flattened_input.items():", + " setattr(self, k, v)", + ] + return [super_call] + + def imports(self) -> FileImport: + file_import = FileImport(self.code_model) + file_import.add_submodule_import( + "..", + "_model_base", + ImportType.LOCAL, + TypingSection.REGULAR, + ) + + for model in self.code_model.model_types: + if model.base == "json": + continue + file_import.merge(model.imports(is_operation_file=False)) + for prop in model.properties: + file_import.merge(prop.imports()) + if model.is_polymorphic: + file_import.add_submodule_import("typing", "Dict", ImportType.STDLIB) + if not model.internal and self.init_line(model): + file_import.add_submodule_import("typing", "overload", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Mapping", ImportType.STDLIB) + file_import.add_submodule_import("typing", "Any", ImportType.STDLIB) + return file_import + + def declare_model(self, model: ModelType) -> str: + basename = "_model_base.Model" + if model.parents: + basename = ", ".join([m.name for m in model.parents]) + if model.discriminator_value: + basename += f", discriminator='{model.discriminator_value}'" + return f"class {model.name}({basename}):{model.pylint_disable()}" + + @staticmethod + def get_properties_to_declare(model: ModelType) -> List[Property]: + if model.parents: + parent_properties = [p for bm in model.parents for p in bm.properties] + properties_to_declare = [ + p + for p in model.properties + if not any( + p.client_name == pp.client_name + and p.type_annotation() == pp.type_annotation() + and not p.is_base_discriminator + for pp in parent_properties + ) + ] + else: + properties_to_declare = model.properties + if any(p for p in properties_to_declare if p.client_name == "_"): + raise ValueError("We do not generate anonymous properties") + return properties_to_declare + + @staticmethod + def declare_property(prop: Property) -> str: + args = [] + if prop.client_name != prop.wire_name or prop.is_discriminator: + args.append(f'name="{prop.wire_name}"') + if prop.visibility: + v_list = ", ".join(f'"{x}"' for x in prop.visibility) + args.append(f"visibility=[{v_list}]") + if prop.client_default_value is not None: + args.append(f"default={prop.client_default_value_declaration}") + + if prop.is_multipart_file_input: + args.append("is_multipart_file_input=True") + elif hasattr(prop.type, "encode") and prop.type.encode: # type: ignore + args.append(f'format="{prop.type.encode}"') # type: ignore + + if prop.xml_metadata: + args.append(f"xml={prop.xml_metadata}") + + field = "rest_discriminator" if prop.is_discriminator else "rest_field" + type_ignore = ( + " # type: ignore" + if prop.is_discriminator and isinstance(prop.type, (ConstantType, EnumValue)) and prop.type.value + else "" + ) + generated_code = f'{prop.client_name}: {prop.type_annotation()} = {field}({", ".join(args)})' + decl = f"{generated_code}{type_ignore}" + if len(decl) + 4 > 120: + decl += " # pylint: disable=line-too-long" + return decl + + def initialize_properties(self, model: ModelType) -> List[str]: + init_args = [] + for prop in self.get_properties_to_declare(model): + if prop.constant and not prop.is_base_discriminator: + init_args.append(f"self.{prop.client_name}: {prop.type_annotation()} = " f"{prop.get_declaration()}") + return init_args + + @staticmethod + def _init_line_parameters(model: ModelType): + return [ + p + for p in model.properties + if p.is_base_discriminator or not p.is_discriminator and not p.constant and p.visibility != ["read"] + ] + + @staticmethod + def properties_to_pass_to_super(model: ModelType) -> str: + properties_to_pass_to_super = ["*args"] + for parent in model.parents: + for prop in model.properties: + if ( + prop.client_name in [prop.client_name for prop in parent.properties if prop.is_base_discriminator] + and prop.is_discriminator + and not prop.constant + and not prop.readonly + ): + properties_to_pass_to_super.append(f"{prop.client_name}={prop.get_declaration()}") + properties_to_pass_to_super.append("**kwargs") + return ", ".join(properties_to_pass_to_super) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/operation_groups_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/operation_groups_serializer.py new file mode 100644 index 0000000000..4c04efbcd2 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/operation_groups_serializer.py @@ -0,0 +1,89 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Optional, List, Union +import functools +from jinja2 import Environment + +from .utils import get_all_operation_groups_recursively +from ..models import ( + CodeModel, + OperationGroup, + RequestBuilder, + OverloadedRequestBuilder, + Client, + FileImport, +) +from .import_serializer import FileImportSerializer +from .builder_serializer import ( + get_operation_serializer, + RequestBuilderSerializer, +) +from .base_serializer import BaseSerializer + + +class OperationGroupsSerializer(BaseSerializer): + def __init__( + self, + code_model: CodeModel, + clients: List[Client], + env: Environment, + async_mode: bool, + operation_group: Optional[OperationGroup] = None, + ): + super().__init__(code_model, env) + self.clients = clients + self.async_mode = async_mode + self.operation_group = operation_group + + def _get_request_builders( + self, operation_group: OperationGroup + ) -> List[Union[OverloadedRequestBuilder, RequestBuilder]]: + return [ + r + for client in self.clients + for r in client.request_builders + if r.client.name == operation_group.client.name + and r.group_name == operation_group.identify_name + and not r.is_overload + and not r.abstract + and not r.is_lro # lro has already initial builder + ] + + def serialize(self) -> str: + if self.operation_group: + operation_groups = [self.operation_group] + else: + operation_groups = get_all_operation_groups_recursively(self.clients) + + imports = FileImport(self.code_model) + for operation_group in operation_groups: + imports.merge( + operation_group.imports( + async_mode=self.async_mode, + ) + ) + + template = self.env.get_or_select_template("operation_groups_container.py.jinja2") + + return template.render( + code_model=self.code_model, + operation_groups=operation_groups, + imports=FileImportSerializer( + imports, + async_mode=self.async_mode, + ), + async_mode=self.async_mode, + get_operation_serializer=functools.partial( + get_operation_serializer, + code_model=self.code_model, + async_mode=self.async_mode, + ), + request_builder_serializer=RequestBuilderSerializer( + self.code_model, + async_mode=False, + ), + get_request_builders=self._get_request_builders, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/operations_init_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/operations_init_serializer.py new file mode 100644 index 0000000000..02232c527c --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/operations_init_serializer.py @@ -0,0 +1,44 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List +from jinja2 import Environment + +from ..models.operation_group import OperationGroup +from ..models import CodeModel, Client + + +class OperationsInitSerializer: + def __init__( + self, + code_model: CodeModel, + clients: List[Client], + env: Environment, + async_mode: bool, + ) -> None: + self.code_model = code_model + self.clients = clients + self.env = env + self.async_mode = async_mode + + def operation_group_imports(self) -> List[str]: + def _get_filename(operation_group: OperationGroup) -> str: + return "_operations" if self.code_model.options["combine_operation_files"] else operation_group.filename + + return [ + f"from .{_get_filename(og)} import {og.class_name}" + for client in self.clients + for og in client.operation_groups + ] + + def serialize(self) -> str: + operation_group_init_template = self.env.get_template("operations_folder_init.py.jinja2") + + return operation_group_init_template.render( + code_model=self.code_model, + async_mode=self.async_mode, + operation_group_imports=self.operation_group_imports, + clients=self.clients, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/parameter_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/parameter_serializer.py new file mode 100644 index 0000000000..f18cd7e1ec --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/parameter_serializer.py @@ -0,0 +1,221 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List, Sequence, Union, Optional, Dict +from enum import Enum, auto + +from ..models import ( + Parameter, + ParameterLocation, + ListType, + ParameterDelimeter, + RequestBuilderParameter, + ClientParameter, + ConfigParameter, + ParameterType, +) +from ..models.parameter import _ParameterBase + + +class PopKwargType(Enum): + NO = auto() + SIMPLE = auto() + CASE_INSENSITIVE = auto() + + +SPECIAL_HEADER_SERIALIZATION: Dict[str, List[str]] = { + "repeatability-request-id": [ + """if "Repeatability-Request-ID" not in _headers:""", + """ _headers["Repeatability-Request-ID"] = str(uuid.uuid4())""", + ], + "repeatability-first-sent": [ + """if "Repeatability-First-Sent" not in _headers:""", + """ _headers["Repeatability-First-Sent"] = _SERIALIZER.serialize_data(""", + """ datetime.datetime.now(datetime.timezone.utc), "rfc-1123")""", + ], + "client-request-id": [], + "x-ms-client-request-id": [], + "return-client-request-id": [], + "etag": [ + """if_match = prep_if_match(etag, match_condition)""", + """if if_match is not None:""", + """ _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")""", + ], + "match-condition": [ + """if_none_match = prep_if_none_match(etag, match_condition)""", + """if if_none_match is not None:""", + """ _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str")""", + ], +} + + +class ParameterSerializer: + @staticmethod + def serialize_parameter(parameter: ParameterType, serializer_name: str) -> str: + optional_parameters = [] + + if parameter.skip_url_encoding: + optional_parameters.append("skip_quote=True") + + if parameter.delimiter and not parameter.explode: + if parameter.delimiter == ParameterDelimeter.COMMA: + div_char = "," + elif parameter.delimiter == ParameterDelimeter.SPACE: + div_char = " " + elif parameter.delimiter == ParameterDelimeter.PIPE: + div_char = "|" + elif parameter.delimiter == ParameterDelimeter.TAB: + div_char = "\t" + else: + raise ValueError(f"We do not support {parameter.delimiter} yet") + optional_parameters.append(f"div='{div_char}'") + + if parameter.explode: + if not isinstance(parameter.type, ListType): + raise ValueError("Got a explode boolean on a non-array schema") + type = parameter.type.element_type + else: + type = parameter.type + + serialization_constraints = type.serialization_constraints + if serialization_constraints: + optional_parameters += serialization_constraints + + origin_name = parameter.full_client_name + + parameters = [ + f'"{origin_name.lstrip("_")}"', + "q" if parameter.explode else origin_name, + f"'{type.serialization_type}'", + *optional_parameters, + ] + parameters_line = ", ".join(parameters) + + msrest_function_name = { + ParameterLocation.PATH: "url", + ParameterLocation.ENDPOINT_PATH: "url", + ParameterLocation.HEADER: "header", + ParameterLocation.QUERY: "query", + }[parameter.location] + + serialize_line = f"{serializer_name}.{msrest_function_name}({parameters_line})" + + if parameter.explode: + return f"[{serialize_line} if q is not None else '' for q in {origin_name}]" + return serialize_line + + @staticmethod + def serialize_path( + parameters: Union[ + List[Parameter], + List[RequestBuilderParameter], + List[ClientParameter], + List[ConfigParameter], + ], + serializer_name: str, + ) -> List[str]: + retval = ["path_format_arguments = {"] + retval.extend( + [ + ' "{}": {},'.format( + path_parameter.wire_name, + ParameterSerializer.serialize_parameter(path_parameter, serializer_name), + ) + for path_parameter in parameters + ] + ) + retval.append("}") + return retval + + @staticmethod + def serialize_query_header( + param: Parameter, + kwarg_name: str, + serializer_name: str, + is_legacy: bool, + ) -> List[str]: + if ( + not is_legacy + and param.location == ParameterLocation.HEADER + and param.wire_name.lower() in SPECIAL_HEADER_SERIALIZATION + ): + return SPECIAL_HEADER_SERIALIZATION[param.wire_name.lower()] + + set_parameter = "_{}['{}'] = {}".format( + kwarg_name, + param.wire_name, + ParameterSerializer.serialize_parameter(param, serializer_name), + ) + if not param.optional and (param.in_method_signature or param.constant): + retval = [set_parameter] + else: + retval = [ + f"if {param.full_client_name} is not None:", + f" {set_parameter}", + ] + return retval + + @staticmethod + def pop_kwargs_from_signature( + parameters: Sequence[_ParameterBase], + check_kwarg_dict: bool, + pop_headers_kwarg: PopKwargType, + pop_params_kwarg: PopKwargType, + check_client_input: bool = False, + operation_name: Optional[str] = None, + ) -> List[str]: + retval = [] + + def append_pop_kwarg(key: str, pop_type: PopKwargType) -> None: + if PopKwargType.CASE_INSENSITIVE == pop_type: + retval.append(f'_{key} = case_insensitive_dict(kwargs.pop("{key}", {{}}) or {{}})') + elif PopKwargType.SIMPLE == pop_type: + retval.append(f'_{key} = kwargs.pop("{key}", {{}}) or {{}}') + + append_pop_kwarg("headers", pop_headers_kwarg) + append_pop_kwarg("params", pop_params_kwarg) + if pop_headers_kwarg != PopKwargType.NO or pop_params_kwarg != PopKwargType.NO: + retval.append("") + for kwarg in parameters: + type_annotation = kwarg.type_annotation() + if kwarg.client_default_value is not None or kwarg.optional: + if check_client_input and kwarg.check_client_input: + default_value = f"self._config.{kwarg.client_name}" + else: + default_value = kwarg.client_default_value_declaration + if check_kwarg_dict and (kwarg.location in [ParameterLocation.HEADER, ParameterLocation.QUERY]): + kwarg_dict = "headers" if kwarg.location == ParameterLocation.HEADER else "params" + if ( + kwarg.client_name == "api_version" + and kwarg.code_model.options["multiapi"] + and operation_name is not None + ): + default_value = f"self._api_version{operation_name} or {default_value}" + default_value = f"_{kwarg_dict}.pop('{kwarg.wire_name}', {default_value})" + + retval.append( + f"{kwarg.client_name}: {type_annotation} = kwargs.pop('{kwarg.client_name}', " + f"{default_value})" + ) + else: + retval.append(f"{kwarg.client_name}: {type_annotation} = kwargs.pop('{kwarg.client_name}')") + return retval + + @staticmethod + def serialize_method( + *, + function_def: str, + method_name: str, + need_self_param: bool, + method_param_signatures: List[str], + pylint_disable: str = "", + ): + lines: List[str] = [] + first_line = f"{function_def} {method_name}({pylint_disable}" + lines.append(first_line) + if need_self_param: + lines.append(" self,") + lines.extend([(" " + line) for line in method_param_signatures]) + lines.append(")") + return "\n".join(lines) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/patch_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/patch_serializer.py new file mode 100644 index 0000000000..8e9c8ef8e7 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/patch_serializer.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from .import_serializer import FileImportSerializer +from ..models import ImportType, FileImport +from .base_serializer import BaseSerializer + + +class PatchSerializer(BaseSerializer): + def serialize(self) -> str: + template = self.env.get_template("patch.py.jinja2") + imports = FileImport(self.code_model) + imports.add_submodule_import("typing", "List", ImportType.STDLIB) + return template.render( + code_model=self.code_model, + imports=FileImportSerializer(imports), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/request_builders_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/request_builders_serializer.py new file mode 100644 index 0000000000..f982984364 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/request_builders_serializer.py @@ -0,0 +1,52 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import List +from jinja2 import Environment + +from ..models import FileImport +from .import_serializer import FileImportSerializer +from ..models import CodeModel, RequestBuilderType +from .builder_serializer import RequestBuilderSerializer +from .base_serializer import BaseSerializer + + +class RequestBuildersSerializer(BaseSerializer): + def __init__( + self, + code_model: CodeModel, + env: Environment, + request_builders: List[RequestBuilderType], + ) -> None: + super().__init__(code_model, env) + self.request_builders = request_builders + self.group_name = request_builders[0].group_name + + @property + def imports(self) -> FileImport: + file_import = FileImport(self.code_model) + for request_builder in self.request_builders: + if request_builder.group_name == self.group_name: + file_import.merge(request_builder.imports()) + return file_import + + def serialize_init(self) -> str: + template = self.env.get_template("rest_init.py.jinja2") + return template.render( + code_model=self.code_model, + request_builders=[r for r in self.request_builders if not r.is_overload], + ) + + def serialize_request_builders(self) -> str: + template = self.env.get_template("request_builders.py.jinja2") + + return template.render( + code_model=self.code_model, + request_builders=[rb for rb in self.request_builders if not rb.abstract], + imports=FileImportSerializer( + self.imports, + ), + request_builder_serializer=RequestBuilderSerializer(self.code_model, async_mode=False), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/sample_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/sample_serializer.py new file mode 100644 index 0000000000..024c4d4034 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/sample_serializer.py @@ -0,0 +1,168 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import logging +from typing import Dict, Any, Union, Tuple +from jinja2 import Environment + +from ..models.operation import OperationBase +from .import_serializer import FileImportSerializer +from .base_serializer import BaseSerializer +from ..models import ( + CodeModel, + KeyCredentialType, + TokenCredentialType, + ImportType, + OperationGroup, + Parameter, + BodyParameter, + FileImport, +) +from .utils import get_namespace_config, get_namespace_from_package_name + +_LOGGER = logging.getLogger(__name__) + + +class SampleSerializer(BaseSerializer): + def __init__( + self, + code_model: CodeModel, + env: Environment, + operation_group: OperationGroup, + operation: OperationBase[Any], + sample: Dict[str, Any], + file_name: str, + ) -> None: + super().__init__(code_model, env) + self.operation_group = operation_group + self.operation = operation + self.sample = sample + self.file_name = file_name + self.sample_params = sample.get("parameters", {}) + + def _imports(self) -> FileImportSerializer: + imports = FileImport(self.code_model) + namespace_from_package_name = get_namespace_from_package_name(self.code_model.options["package_name"]) + namespace_config = get_namespace_config(self.code_model.namespace, self.code_model.options["multiapi"]) + namespace = namespace_from_package_name or namespace_config + # mainly for "azure-mgmt-rdbms" + if not self.code_model.options["multiapi"] and namespace_config.count(".") > namespace_from_package_name.count( + "." + ): + namespace = namespace_config + client = self.code_model.clients[0] + imports.add_submodule_import(namespace, client.name, ImportType.LOCAL) + credential_type = getattr(client.credential, "type", None) + if isinstance(credential_type, TokenCredentialType): + imports.add_submodule_import("azure.identity", "DefaultAzureCredential", ImportType.SDKCORE) + elif isinstance(credential_type, KeyCredentialType): + imports.add_import("os", ImportType.STDLIB) + imports.add_submodule_import( + "credentials", + "AzureKeyCredential", + ImportType.SDKCORE, + ) + for param in self.operation.parameters.positional + self.operation.parameters.keyword_only: + if not param.client_default_value and not param.optional and param.wire_name in self.sample_params: + imports.merge(param.type.imports_for_sample()) + return FileImportSerializer(imports, True) + + def _client_params(self) -> Dict[str, Any]: + # client params + special_param = {} + credential_type = getattr(self.code_model.clients[0].credential, "type", None) + if isinstance(credential_type, TokenCredentialType): + special_param.update({"credential": "DefaultAzureCredential()"}) + elif isinstance(credential_type, KeyCredentialType): + special_param.update({"credential": 'AzureKeyCredential(key=os.getenv("AZURE_KEY"))'}) + + params = [ + p + for p in ( + self.code_model.clients[0].parameters.positional + self.code_model.clients[0].parameters.keyword_only + ) + if not (p.optional or p.client_default_value) + ] + client_params = { + p.client_name: special_param.get( + p.client_name, + f'"{self.sample_params.get(p.wire_name) or p.client_name.upper()}"', + ) + for p in params + } + + return client_params + + @staticmethod + def handle_param(param: Union[Parameter, BodyParameter], param_value: Any) -> str: + if isinstance(param_value, str): + if any(i in param_value for i in '\r\n"'): + return f'"""{param_value}"""' + + return param.type.serialize_sample_value(param_value) + + # prepare operation parameters + def _operation_params(self) -> Dict[str, Any]: + params = [ + p + for p in (self.operation.parameters.positional + self.operation.parameters.keyword_only) + if not p.client_default_value + ] + failure_info = "fail to find required param named {}" + operation_params = {} + for param in params: + if not param.optional: + param_value = self.sample_params.get(param.wire_name) + if not param_value: + raise Exception(failure_info.format(param.client_name)) # pylint: disable=broad-exception-raised + operation_params[param.client_name] = self.handle_param(param, param_value) + return operation_params + + def _operation_group_name(self) -> str: + if self.operation_group.is_mixin: + return "" + return f".{self.operation_group.property_name}" + + def _operation_result(self) -> Tuple[str, str]: + is_response_none = "None" in self.operation.response_type_annotation(async_mode=False) + lro = ".result()" + if is_response_none: + paging, normal_print, return_var = "", "", "" + else: + paging = "\n for item in response:\n print(item)" + normal_print = "\n print(response)" + return_var = "response = " + + if self.operation.operation_type == "paging": + return paging, return_var + if self.operation.operation_type == "lro": + return lro + normal_print, return_var + if self.operation.operation_type == "lropaging": + return lro + paging, return_var + return normal_print, return_var + + def _operation_name(self) -> str: + return f".{self.operation.name}" + + def _origin_file(self) -> str: + name = self.sample.get("x-ms-original-file", "") + if "specification" in name: + return "specification" + name.split("specification")[-1] + return name if self.code_model.options["from_typespec"] else "" + + def serialize(self) -> str: + operation_result, return_var = self._operation_result() + return self.env.get_template("sample.py.jinja2").render( + code_model=self.code_model, + file_name=self.file_name, + operation_result=operation_result, + operation_params=self._operation_params(), + operation_group_name=self._operation_group_name(), + operation_name=self._operation_name(), + imports=self._imports(), + client_params=self._client_params(), + origin_file=self._origin_file(), + return_var=return_var, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/test_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/test_serializer.py new file mode 100644 index 0000000000..074d124a5b --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/test_serializer.py @@ -0,0 +1,292 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Dict, Any, List, Optional +from jinja2 import Environment + +from .import_serializer import FileImportSerializer +from .base_serializer import BaseSerializer +from ..models import ( + CodeModel, + ImportType, + OperationGroup, + Client, + OperationType, + ModelType, + BaseType, + CombinedType, + FileImport, +) +from .utils import get_namespace_from_package_name, json_dumps_template + + +def is_lro(operation_type: str) -> bool: + return operation_type in ("lro", "lropaging") + + +def is_paging(operation_type: str) -> bool: + return operation_type in ("paging", "lropaging") + + +def is_common_operation(operation_type: str) -> bool: + return operation_type == "operation" + + +class TestName: + def __init__(self, code_model: CodeModel, client_name: str, *, is_async: bool = False) -> None: + self.code_model = code_model + self.client_name = client_name + self.is_async = is_async + + @property + def async_suffix_capt(self) -> str: + return "Async" if self.is_async else "" + + @property + def create_client_name(self) -> str: + return "create_async_client" if self.is_async else "create_client" + + @property + def prefix(self) -> str: + return self.client_name.replace("Client", "") + + @property + def preparer_name(self) -> str: + if self.code_model.options["azure_arm"]: + return "RandomNameResourceGroupPreparer" + return self.prefix + "Preparer" + + @property + def base_test_class_name(self) -> str: + if self.code_model.options["azure_arm"]: + return "AzureMgmtRecordedTestCase" + return f"{self.client_name}TestBase{self.async_suffix_capt}" + + +class TestCase: + def __init__( + self, + operation_groups: List[OperationGroup], + params: Dict[str, Any], + operation: OperationType, + *, + is_async: bool = False, + ) -> None: + self.operation_groups = operation_groups + self.params = params + self.operation = operation + self.is_async = is_async + + @property + def name(self) -> str: + if self.operation_groups[-1].is_mixin: + return self.operation.name + return "_".join([og.property_name for og in self.operation_groups] + [self.operation.name]) + + @property + def operation_group_prefix(self) -> str: + if self.operation_groups[-1].is_mixin: + return "" + return "." + ".".join([og.property_name for og in self.operation_groups]) + + @property + def response(self) -> str: + if self.is_async: + if is_lro(self.operation.operation_type): + return "response = await (await " + if is_common_operation(self.operation.operation_type): + return "response = await " + return "response = " + + @property + def lro_comment(self) -> str: + return " # call '.result()' to poll until service return final result" + + @property + def operation_suffix(self) -> str: + if is_lro(self.operation.operation_type): + extra = ")" if self.is_async else "" + return f"{extra}.result(){self.lro_comment}" + return "" + + @property + def extra_operation(self) -> str: + if is_paging(self.operation.operation_type): + async_str = "async " if self.is_async else "" + return f"result = [r {async_str}for r in response]" + return "" + + +class Test(TestName): + def __init__( + self, + code_model: CodeModel, + client_name: str, + operation_group: OperationGroup, + testcases: List[TestCase], + test_class_name: str, + *, + is_async: bool = False, + ) -> None: + super().__init__(code_model, client_name, is_async=is_async) + self.operation_group = operation_group + self.testcases = testcases + self.test_class_name = test_class_name + + +class TestGeneralSerializer(BaseSerializer): + def __init__(self, code_model: CodeModel, env: Environment, *, is_async: bool = False) -> None: + super().__init__(code_model, env) + self.is_async = is_async + + @property + def aio_str(self) -> str: + return ".aio" if self.is_async else "" + + @property + def test_names(self) -> List[TestName]: + return [TestName(self.code_model, c.name, is_async=self.is_async) for c in self.code_model.clients] + + def add_import_client(self, imports: FileImport) -> None: + namespace = get_namespace_from_package_name(self.code_model.options["package_name"]) + for client in self.code_model.clients: + imports.add_submodule_import(namespace + self.aio_str, client.name, ImportType.STDLIB) + + @property + def import_clients(self) -> FileImportSerializer: + imports = self.init_file_import() + + imports.add_submodule_import("devtools_testutils", "AzureRecordedTestCase", ImportType.STDLIB) + if not self.is_async: + imports.add_import("functools", ImportType.STDLIB) + imports.add_submodule_import("devtools_testutils", "PowerShellPreparer", ImportType.STDLIB) + self.add_import_client(imports) + + return FileImportSerializer(imports, self.is_async) + + def serialize_conftest(self) -> str: + return self.env.get_template("conftest.py.jinja2").render( + test_names=self.test_names, + code_model=self.code_model, + ) + + def serialize_testpreparer(self) -> str: + return self.env.get_template("testpreparer.py.jinja2").render( + test_names=self.test_names, + imports=self.import_clients, + code_model=self.code_model, + ) + + +class TestSerializer(TestGeneralSerializer): + def __init__( + self, + code_model: CodeModel, + env: Environment, + *, + client: Client, + operation_group: OperationGroup, + is_async: bool = False, + ) -> None: + super().__init__(code_model, env, is_async=is_async) + self.client = client + self.operation_group = operation_group + + @property + def import_test(self) -> FileImportSerializer: + imports = self.init_file_import() + test_name = TestName(self.code_model, self.client.name, is_async=self.is_async) + async_suffix = "_async" if self.is_async else "" + imports.add_submodule_import( + ("devtools_testutils" if self.code_model.options["azure_arm"] else "testpreparer" + async_suffix), + test_name.base_test_class_name, + ImportType.LOCAL, + ) + imports.add_submodule_import( + ("devtools_testutils" if self.code_model.options["azure_arm"] else "testpreparer"), + test_name.preparer_name, + ImportType.LOCAL, + ) + imports.add_submodule_import( + "devtools_testutils" + self.aio_str, + "recorded_by_proxy" + async_suffix, + ImportType.LOCAL, + ) + if self.code_model.options["azure_arm"]: + self.add_import_client(imports) + return FileImportSerializer(imports, self.is_async) + + @property + def breadth_search_operation_group(self) -> List[List[OperationGroup]]: + result = [] + queue = [[self.operation_group]] + while queue: + current = queue.pop(0) + if current[-1].operations: + result.append(current) + if current[-1].operation_groups: + queue.extend([current + [og] for og in current[-1].operation_groups]) + return result + + def get_sub_type(self, param_type: ModelType) -> ModelType: + if param_type.discriminated_subtypes: + for item in param_type.discriminated_subtypes.values(): + return self.get_sub_type(item) + return param_type + + def get_model_type(self, param_type: BaseType) -> Optional[ModelType]: + if isinstance(param_type, ModelType): + return param_type + if isinstance(param_type, CombinedType): + return param_type.target_model_subtype((ModelType,)) + return None + + def get_operation_params(self, operation: OperationType) -> Dict[str, Any]: + operation_params = {} + required_params = [p for p in operation.parameters.method if not p.optional] + for param in required_params: + model_type = self.get_model_type(param.type) + param_type = self.get_sub_type(model_type) if model_type else param.type + operation_params[param.client_name] = json_dumps_template(param_type.get_json_template_representation()) + return operation_params + + def get_test(self) -> Test: + testcases = [] + for operation_groups in self.breadth_search_operation_group: + for operation in operation_groups[-1].operations: + if operation.internal or operation.is_lro_initial_operation: + continue + operation_params = self.get_operation_params(operation) + testcase = TestCase( + operation_groups=operation_groups, + params=operation_params, + operation=operation, + is_async=self.is_async, + ) + testcases.append(testcase) + if not testcases: + raise Exception("no public operation to test") # pylint: disable=broad-exception-raised + + return Test( + code_model=self.code_model, + client_name=self.client.name, + operation_group=self.operation_group, + testcases=testcases, + test_class_name=self.test_class_name, + is_async=self.is_async, + ) + + @property + def test_class_name(self) -> str: + test_name = TestName(self.code_model, self.client.name, is_async=self.is_async) + class_name = "" if self.operation_group.is_mixin else self.operation_group.class_name + return f"Test{test_name.prefix}{class_name}{test_name.async_suffix_capt}" + + def serialize_test(self) -> str: + return self.env.get_template("test.py.jinja2").render( + imports=self.import_test, + code_model=self.code_model, + test=self.get_test(), + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/types_serializer.py b/packages/http-client-python/generator/pygen/codegen/serializers/types_serializer.py new file mode 100644 index 0000000000..3e143f5209 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/types_serializer.py @@ -0,0 +1,31 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from ..models.imports import FileImport, ImportType +from .import_serializer import FileImportSerializer +from .base_serializer import BaseSerializer + + +class TypesSerializer(BaseSerializer): + def imports(self) -> FileImport: + file_import = FileImport(self.code_model) + if self.code_model.named_unions: + file_import.add_submodule_import( + "typing", + "Union", + ImportType.STDLIB, + ) + for nu in self.code_model.named_unions: + file_import.merge(nu.imports(relative_path=".", model_typing=True, is_types_file=True)) + return file_import + + def serialize(self) -> str: + # Generate the models + template = self.env.get_template("types.py.jinja2") + return template.render( + code_model=self.code_model, + imports=FileImportSerializer(self.imports()), + serializer=self, + ) diff --git a/packages/http-client-python/generator/pygen/codegen/serializers/utils.py b/packages/http-client-python/generator/pygen/codegen/serializers/utils.py new file mode 100644 index 0000000000..7cd6f7c926 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/serializers/utils.py @@ -0,0 +1,68 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import json +from typing import Optional, List, Any +from pathlib import Path + +from ..models import Client, OperationGroup + + +def method_signature_and_response_type_annotation_template( + *, + method_signature: str, + response_type_annotation: str, +) -> str: + return f"{method_signature} -> {response_type_annotation}:" + + +def extract_sample_name(file_path: str) -> str: + file = file_path.split("specification")[-1] + return Path(file).parts[-1].replace(".json", "") + + +def strip_end(namespace: str) -> str: + return ".".join(namespace.split(".")[:-1]) + + +def get_namespace_config(namespace: str, multiapi: bool) -> str: + return strip_end(namespace) if multiapi else namespace + + +def get_namespace_from_package_name(package_name: Optional[str]) -> str: + return (package_name or "").replace("-", ".") + + +def get_all_operation_groups_recursively(clients: List[Client]) -> List[OperationGroup]: + operation_groups = [] + queue = [] + for client in clients: + queue.extend(client.operation_groups) + while queue: + operation_groups.append(queue.pop(0)) + if operation_groups[-1].operation_groups: + queue.extend(operation_groups[-1].operation_groups) + return operation_groups + + +def _improve_json_string(template_representation: str) -> Any: + origin = template_representation.split("\n") + final = [] + for line in origin: + idx0 = line.find("#") + idx1 = line.rfind('"') + modified_line = "" + if idx0 > -1 and idx1 > -1: + modified_line = line[:idx0] + line[idx1:] + " " + line[idx0:idx1] + "\n" + else: + modified_line = line + "\n" + modified_line = modified_line.replace('"', "").replace("\\", '"') + final.append(modified_line) + return "".join(final) + + +def json_dumps_template(template_representation: Any) -> Any: + # only for template use, since it wraps everything in strings + return _improve_json_string(json.dumps(template_representation, indent=4)) diff --git a/packages/http-client-python/generator/pygen/codegen/templates/client.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/client.py.jinja2 new file mode 100644 index 0000000000..1b5d3c0982 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/client.py.jinja2 @@ -0,0 +1,37 @@ +{{ serializer.class_definition() }} + """{{ op_tools.wrap_string(client.description, "\n") | indent }} + + {{ op_tools.serialize_with_wrap(serializer.property_descriptions(async_mode), "\n ") | indent }} + {{ serializer.init_signature_and_response_type_annotation(async_mode) | indent }} + {% if serializer.should_init_super %} + super().__init__() + {% endif %} + {% if client.has_parameterized_host %} + {{ serializer.host_variable_name }} = {{ keywords.escape_str(client.url) }}{{ client.url_pylint_disable }} + {% endif %} + {{ serializer.initialize_config() }} + {{ op_tools.serialize(serializer.initialize_pipeline_client(async_mode)) | indent(8) }} + + {{ op_tools.serialize(serializer.serializers_and_operation_groups_properties()) | indent(8) }} + + {% set http_response = keywords.async_class + "HttpResponse" %} + {{ serializer.send_request_signature_and_response_type_annotation(async_mode) | indent }} + {{ op_tools.serialize(serializer.send_request_description(async_mode)) | indent(8) }} + request_copy = deepcopy(request) + {% if client.parameters.path %} + {{ op_tools.serialize(serializer.serialize_path()) | indent(8) }} + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + {% else %} + request_copy.url = self._client.format_url(request_copy.url) + {% endif %} + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + {{ keywords.def }} close(self) -> None: + {{ keywords.await }}self._client.close() + + {{ keywords.def }} __{{ keywords.async_prefix }}enter__(self) -> Self: + {{ keywords.await }}self._client.__{{ keywords.async_prefix }}enter__() + return self + + {{ keywords.def }} __{{ keywords.async_prefix }}exit__(self, *exc_details: Any) -> None: + {{ keywords.await }}self._client.__{{ keywords.async_prefix }}exit__(*exc_details) diff --git a/packages/http-client-python/generator/pygen/codegen/templates/client_container.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/client_container.py.jinja2 new file mode 100644 index 0000000000..33de339aff --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/client_container.py.jinja2 @@ -0,0 +1,12 @@ +{% import 'keywords.jinja2' as keywords with context %} +{% import 'operation_tools.jinja2' as op_tools %} +{# actual template starts here #} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{{ imports }} + +{% for client in clients %} + {% set serializer = get_serializer(client) %} +{% include "client.py.jinja2" %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/config.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/config.py.jinja2 new file mode 100644 index 0000000000..57e8daa314 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/config.py.jinja2 @@ -0,0 +1,73 @@ +class {{ client.name }}Configuration: {{ client.config.pylint_disable() }} + """Configuration for {{ client.name }}. + + Note that all parameters used to create this instance are saved as instance + attributes. +{% if client.config.parameters.method | first %} + +{% endif %} + {{ op_tools.serialize_with_wrap(serializer.property_descriptions(async_mode), "\n ") | indent }} + {{ serializer.init_signature_and_response_type_annotation(async_mode) | indent }} + {% if client.config.parameters.kwargs_to_pop %} + {{ op_tools.serialize(serializer.pop_kwargs_from_signature()) | indent(8) }} + {% endif %} +{% if serializer.check_required_parameters() %} + {{ op_tools.serialize(serializer.check_required_parameters()) | indent(8) -}} +{% endif %} + +{% for parameter in client.config.parameters.method %} + self.{{ parameter.client_name }} = {{ parameter.client_name }} +{% endfor %} +{% if serializer.set_constants() %} + {{ op_tools.serialize(serializer.set_constants()) | indent(8) -}} +{% endif %} +{% if client.credential %} + {% set cred_scopes = client.credential.type if client.credential.type.policy is defined and client.credential.type.policy.credential_scopes is defined %} + {% if not cred_scopes %} + {% set cred_scopes = client.credential.type.types | selectattr("policy.credential_scopes") | first if client.credential.type.types is defined %} + {% endif %} + {% if cred_scopes %} + self.credential_scopes = kwargs.pop('credential_scopes', {{ cred_scopes.policy.credential_scopes }}) + {% endif %} +{% endif %} + kwargs.setdefault('sdk_moniker', '{{ client.config.sdk_moniker }}/{}'.format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + +{% if client.credential and client.credential.type.types is defined %} + def _infer_policy(self, **kwargs): + {% for cred_type in client.credential.type.types %} + if {{ cred_type.instance_check_template.format("self.credential") }}: + return {{ cred_type.policy.call(async_mode) }} + {% endfor %} + raise TypeError(f"Unsupported credential: {self.credential}") +{% endif %} + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + {% if code_model.is_azure_flavor %} + self.http_logging_policy = kwargs.get('http_logging_policy') or {{ "ARM" if client.code_model.options['azure_arm'] else "policies." }}HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.{{ keywords.async_class }}RedirectPolicy(**kwargs) + {% endif %} + self.retry_policy = kwargs.get('retry_policy') or policies.{{ keywords.async_class }}RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + {% if client.credential and client.credential.type.policy is defined %} + {# only adding this if credential_scopes is not passed during code generation #} + {% if client.credential.type.policy.credential_scopes is defined and client.credential.type.policy.credential_scopes | length == 0 %} + if not self.credential_scopes and not self.authentication_policy: + raise ValueError("You must provide either credential_scopes or authentication_policy as kwargs") + {% endif %} + if self.credential and not self.authentication_policy: + self.authentication_policy = {{ client.credential.type.policy.call(async_mode) }} + {% endif %} + {% if client.credential and client.credential.type.types is defined %} + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/config_container.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/config_container.py.jinja2 new file mode 100644 index 0000000000..9a3c263e2f --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/config_container.py.jinja2 @@ -0,0 +1,16 @@ +{% import 'keywords.jinja2' as keywords with context %} +{% import 'operation_tools.jinja2' as op_tools %} +{# actual template starts here #} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{{ imports }} + +{% if not code_model.options['package_version'] %} +VERSION = "unknown" +{% endif %} + +{% for client in clients %} + {% set serializer = get_serializer(client) %} +{% include "config.py.jinja2" %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/conftest.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/conftest.py.jinja2 new file mode 100644 index 0000000000..70932734f3 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/conftest.py.jinja2 @@ -0,0 +1,28 @@ +# coding=utf-8 +{{ code_model.options['license_header'] }} +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import test_proxy, add_general_regex_sanitizer, add_body_key_sanitizer, add_header_regex_sanitizer + +load_dotenv() + +# avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + {% for test_name in test_names %} + {% set prefix_upper = "AZURE" if code_model.options["azure_arm"] else test_name.prefix|upper %} + {% set prefix_lower = test_name.prefix|lower %} + {{ prefix_lower }}_subscription_id = os.environ.get("{{ prefix_upper }}_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + {{ prefix_lower }}_tenant_id = os.environ.get("{{ prefix_upper }}_TENANT_ID", "00000000-0000-0000-0000-000000000000") + {{ prefix_lower }}_client_id = os.environ.get("{{ prefix_upper }}_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + {{ prefix_lower }}_client_secret = os.environ.get("{{ prefix_upper }}_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex={{ prefix_lower }}_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex={{ prefix_lower }}_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex={{ prefix_lower }}_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex={{ prefix_lower }}_client_secret, value="00000000-0000-0000-0000-000000000000") + + {% endfor %} + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/packages/http-client-python/generator/pygen/codegen/templates/enum.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/enum.py.jinja2 new file mode 100644 index 0000000000..047d64c30d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/enum.py.jinja2 @@ -0,0 +1,13 @@ + +class {{ enum.name }}({{ enum.value_type.type_annotation(is_operation_file=False) }}, Enum, metaclass=CaseInsensitiveEnumMeta): + {% if enum.yaml_data.get("description") %} + """{{ op_tools.wrap_string(enum.yaml_data["description"], "\n ") }} + """ + {% endif %} + + {% for value in enum.values %} + {{ value.name }} = {{ enum.value_type.get_declaration(value.value) }} + {% if value.description(is_operation_file=False) %} + """{{ op_tools.wrap_string(value.description(is_operation_file=False), "\n ") }}""" + {% endif %} + {% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/enum_container.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/enum_container.py.jinja2 new file mode 100644 index 0000000000..099fbb072a --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/enum_container.py.jinja2 @@ -0,0 +1,10 @@ +{% import 'operation_tools.jinja2' as op_tools %} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +from enum import Enum +from {{ code_model.core_library }}{{ "" if code_model.is_azure_flavor else ".utils" }} import CaseInsensitiveEnumMeta + +{% for enum in code_model.enums | sort %} +{% include "enum.py.jinja2" %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/init.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/init.py.jinja2 new file mode 100644 index 0000000000..9f70f40bd6 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/init.py.jinja2 @@ -0,0 +1,24 @@ +{% import 'keywords.jinja2' as keywords %} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{% if clients %} + {% for client in clients %} +from .{{ client.filename }} import {{ client.name }} + {% endfor %} +{% endif %} +{% if not async_mode and code_model.options['package_version']%} +from ._version import VERSION + +__version__ = VERSION +{% endif %} + +{{ keywords.patch_imports(try_except=True) }} +__all__ = [ + {% for client in clients %} + {{ keywords.escape_str(client.name) }}, + {% endfor %} +] +{{ keywords.extend_all }} + +_patch_sdk() diff --git a/packages/http-client-python/generator/pygen/codegen/templates/keywords.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/keywords.jinja2 new file mode 100644 index 0000000000..6ee92a4141 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/keywords.jinja2 @@ -0,0 +1,19 @@ +{% set def = "async def" if async_mode else "def" %} +{% set async_prefix = "a" if async_mode else "" %} +{% set await = "await " if async_mode else "" %} +{% set async_class = "Async" if async_mode else "" %} +{% macro escape_str(s) %}'{{ s|replace("'", "\\'") }}'{% endmacro %} +{% set kwargs_declaration = "**kwargs: Any" %} +{% set extend_all = "__all__.extend([p for p in _patch_all if p not in __all__])" %} +{% macro patch_imports(try_except=False) %} +{% set indentation = " " if try_except else "" %} +{% if try_except %} +try: +{% endif %} +{{ indentation }}from ._patch import __all__ as _patch_all +{{ indentation }}from ._patch import * # pylint: disable=unused-wildcard-import +{% if try_except %} +except ImportError: + _patch_all = [] +{% endif %} +from ._patch import patch_sdk as _patch_sdk{% endmacro %} \ No newline at end of file diff --git a/packages/http-client-python/generator/pygen/codegen/templates/lro_operation.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/lro_operation.py.jinja2 new file mode 100644 index 0000000000..bca9dcf2e4 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/lro_operation.py.jinja2 @@ -0,0 +1,16 @@ +{% import 'operation_tools.jinja2' as op_tools with context %} +{# actual template starts here #} +{% if operation.overloads and operation.include_documentation %} +{{ op_tools.generate_overloads(operation_serializer, operation) }} +{% endif %} +{{ operation_serializer.method_signature_and_response_type_annotation(operation) }} + {{ op_tools.description(operation, operation_serializer) | indent -}} + {% if not operation.abstract %} + {% if operation_serializer.pop_kwargs_from_signature(operation) %} + {{ op_tools.serialize(operation_serializer.pop_kwargs_from_signature(operation)) | indent }} + {%- endif %} + {{ op_tools.serialize(operation_serializer.initial_call(operation)) | indent }} + {{ op_tools.serialize(operation_serializer.get_long_running_output(operation)) | indent }} + + {{ op_tools.serialize(operation_serializer.return_lro_poller(operation)) | indent }} + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/lro_paging_operation.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/lro_paging_operation.py.jinja2 new file mode 100644 index 0000000000..09b50a00d4 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/lro_paging_operation.py.jinja2 @@ -0,0 +1,18 @@ +{% import 'operation_tools.jinja2' as op_tools with context %} +{% import 'keywords.jinja2' as keywords with context %} +{# actual template starts here #} +{% if operation.overloads and operation.include_documentation %} +{{ op_tools.generate_overloads(operation_serializer, operation) }} +{% endif %} +{{ operation_serializer.method_signature_and_response_type_annotation(operation) }} + {{ op_tools.description(operation, operation_serializer) | indent }} + {% if not operation.abstract %} + {% if operation_serializer.pop_kwargs_from_signature(operation) %} + {{ op_tools.serialize(operation_serializer.pop_kwargs_from_signature(operation)) | indent }} + {% endif %} + {{ op_tools.serialize(operation_serializer.set_up_params_for_pager(operation)) | indent }} + + {{ op_tools.serialize(operation_serializer.initial_call(operation)) | indent }} + {{ op_tools.serialize(operation_serializer.get_long_running_output(operation)) | indent }} + {{ op_tools.serialize(operation_serializer.return_lro_poller(operation)) | indent }} + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/macros.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/macros.jinja2 new file mode 100644 index 0000000000..e1f30e2731 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/macros.jinja2 @@ -0,0 +1,12 @@ +{% macro wrap_model_string(doc_string, wrap_string, suffix_string="") %} +{% set original_result = doc_string | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring=wrap_string) %} +{% set list_result = original_result.split('\n') %} +{% for line in list_result %} + {% set suffix = suffix_string if list_result | length == loop.index %} + {% if line | length > 120 %} +{{ line + " # pylint: disable=line-too-long" }}{{ suffix }} + {% else %} +{{ line }}{{ suffix }} + {% endif %} +{% endfor %} +{% endmacro %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/metadata.json.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/metadata.json.jinja2 new file mode 100644 index 0000000000..946baa7917 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/metadata.json.jinja2 @@ -0,0 +1,167 @@ +{% import 'operation_tools.jinja2' as op_tools %} +{% import 'keywords.jinja2' as keywords %} +{ + "chosen_version": {{ chosen_version | tojson }}, + "total_api_version_list": {{ total_api_version_list | tojson }}, + "client": { + "name": {{ client.name | tojson }}, + "filename": {{ ("_" + client.legacy_filename) | tojson }}, + "description": {{ client.description | tojson }}, + "host_value": {{ (client.parameters.host.client_default_value_declaration if not client.has_parameterized_host else None) | tojson }}, + "parameterized_host_template": {{ (keywords.escape_str(client.url) if client.has_parameterized_host else None) | tojson }}, + "azure_arm": {{ client.code_model.options["azure_arm"] | tojson }}, + "has_public_lro_operations": {{ client.has_public_lro_operations | tojson }}, + "client_side_validation": {{ client.code_model.options["client_side_validation"] | tojson }}, + "sync_imports": {{ sync_client_imports | tojson }}, + "async_imports": {{ async_client_imports | tojson }} + }, + "global_parameters": { + "sync": { + {% for gp in global_parameters.method | rejectattr("client_name", "equalto", "api_version") | rejectattr("is_host") %} + {{ gp.client_name | tojson }}: { + "signature": {{ gp.method_signature(async_mode=False) | tojson }}, + "description": {{ gp.description | tojson }}, + "docstring_type": {{ gp.docstring_type(async_mode=False) | tojson }}, + "required": {{ (not gp.optional) | tojson }}, + "method_location": {{ gp.method_location | tojson }} + }{{ "," if not loop.last else "" }} + {% endfor %} + }, + "async": { + {% for gp in global_parameters.method | rejectattr("client_name", "equalto", "api_version") | rejectattr("is_host") %} + {{ gp.client_name | tojson }}: { + "signature": {{ (gp.method_signature(async_mode=True)) | tojson }}, + "description": {{ gp.description | tojson }}, + "docstring_type": {{ gp.docstring_type(async_mode=True) | tojson }}, + "required": {{ (not gp.optional) | tojson }} + }{{ "," if not loop.last else "" }} + {% endfor %} + }, + "constant": { + {% for gp in client.parameters.constant | rejectattr("client_name", "equalto", "api_version") %} + {{ gp.client_name | tojson }}: {{ gp.constant_declaration | tojson }}{{ "," if not loop.last else "" }} + {% endfor %} + }, + "call": {{ client.parameters.method | rejectattr("client_name", "equalto", "api_version") | rejectattr("is_host") | map(attribute="client_name") | join(', ') | tojson }}, + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version: Optional[str]=None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false, + "method_location": "positional" + }, + {% if not client.has_parameterized_host %} + "base_url": { + "signature": {{ client.parameters.host.method_signature(async_mode=False) | tojson }}, + "description": "Service URL", + "docstring_type": "str", + "required": false, + "method_location": "positional" + }, + {% endif %} + "profile": { + "signature": "profile: KnownProfiles=KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false, + "method_location": "positional" + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false, + "method_location": "positional" + }, + {% if not client.has_parameterized_host %} + "base_url": { + "signature": {{ client.parameters.host.method_signature(async_mode=True) | tojson }}, + "description": "Service URL", + "docstring_type": "str", + "required": false, + "method_location": "positional" + }, + {% endif %} + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false, + "method_location": "positional" + } + } + } + }, + "config": { + "credential": {{ has_credential | tojson }}, + "credential_scopes": {{ (client.credential.type.policy.credential_scopes if has_credential and client.credential.type.policy.credential_scopes is defined else None)| tojson}}, + "credential_call_sync": {{ (client.credential.type.policy.call(async_mode=False) if has_credential else None) | tojson }}, + "credential_call_async": {{ (client.credential.type.policy.call(async_mode=True) if has_credential else None) | tojson }}, + "sync_imports": {{ sync_config_imports | tojson }}, + "async_imports": {{ async_config_imports | tojson }} + }, + "operation_groups": { + {% for operation_group in client.operation_groups | rejectattr('is_mixin') %} + {{ operation_group.property_name | tojson }}: {{ operation_group.class_name | tojson }}{{ "," if not loop.last else "" }} + {% endfor %} + }{{ "," if mixin_operations }} + {% if mixin_operations %} + "operation_mixins": { + "sync_imports": {{ str(sync_mixin_imports) | tojson }}, + "async_imports": {{ str(async_mixin_imports) | tojson }}, + "sync_mixin_typing_definitions": {{ str(sync_mixin_typing_definitions) | tojson }}, + "async_mixin_typing_definitions": {{ str(async_mixin_typing_definitions) | tojson }}, + "operations": { + {% for operation in mixin_operations %} + {{ operation.name | tojson }} : { + {% set request_builder = operation.request_builder %} + "sync": { + {% set operation_serializer = get_sync_operation_serializer(operation) %} + {% if is_lro(operation) and is_paging(operation) %} + {% from "lro_paging_operation.py.jinja2" import operation_docstring with context %} + {% set sync_return_type_wrapper = [operation.get_poller(async_mode=False), operation.get_pager(async_mode=False)] %} + {% elif is_lro(operation) %} + {% from "lro_operation.py.jinja2" import operation_docstring with context %} + {% set sync_return_type_wrapper = [operation.get_poller(async_mode=False)] %} + {% elif is_paging(operation) %} + {% from "paging_operation.py.jinja2" import operation_docstring with context %} + {% set sync_return_type_wrapper = [operation.get_pager(async_mode=False)] %} + {% else %} + {% from "operation.py.jinja2" import operation_docstring with context %} + {% set sync_return_type_wrapper = "" %} + {% endif %} + "signature": {{ (operation_serializer.method_signature_and_response_type_annotation(operation, want_decorators=False) + "\n") | tojson }}, + "doc": {{ op_tools.description(operation, operation_serializer).rstrip("\n") | tojson }}, + "call": {{ operation.parameters.call | join(', ') | tojson }} + }, + "async": { + {% set coroutine = False if (is_paging(operation) and not is_lro(operation)) else True %} + {% set operation_serializer = get_async_operation_serializer(operation) %} + "coroutine": {{ coroutine | tojson }}, + {% if is_lro(operation) and is_paging(operation) %} + {% from "lro_paging_operation.py.jinja2" import operation_docstring with context %} + {% set async_return_type_wrapper = [operation.get_poller(async_mode=True), operation.get_pager(async_mode=True)] %} + {% elif is_lro(operation) %} + {% from "lro_operation.py.jinja2" import operation_docstring with context %} + {% set async_return_type_wrapper = [operation.get_poller(async_mode=True)] %} + {% elif is_paging(operation) %} + {% from "paging_operation.py.jinja2" import operation_docstring with context %} + {% set async_return_type_wrapper = [operation.get_pager(async_mode=True)] %} + {% else %} + {% from "operation.py.jinja2" import operation_docstring with context %} + {% set async_return_type_wrapper = "" %} + {% endif %} + "signature": {{ (operation_serializer.method_signature_and_response_type_annotation(operation, want_decorators=False) + "\n") | tojson }}, + "doc": {{ op_tools.description(operation, operation_serializer).rstrip("\n") | tojson }}, + "call": {{ operation.parameters.call | join(', ') | tojson }} + } + }{{ "," if not loop.last else "" }} + {% endfor %} + } + } + {% endif %} +} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/model_base.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/model_base.py.jinja2 new file mode 100644 index 0000000000..abe48c407d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/model_base.py.jinja2 @@ -0,0 +1,1157 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) {{ code_model.options["company_name"] }} Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, arguments-differ, signature-differs, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from typing_extensions import Self +import isodate +from {{ code_model.core_library }}.exceptions import DeserializationError +from {{ code_model.core_library }}{{ "" if code_model.is_azure_flavor else ".utils" }} import CaseInsensitiveEnumMeta +from {{ code_model.core_library }}.{{ "" if code_model.is_azure_flavor else "runtime." }}pipeline import PipelineResponse +from {{ code_model.core_library }}.serialization import _Null + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + return self._data.popitem() + + def clear(self) -> None: + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) # pylint: disable=no-value-for-parameter + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/packages/http-client-python/generator/pygen/codegen/templates/model_container.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/model_container.py.jinja2 new file mode 100644 index 0000000000..2e273974a7 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/model_container.py.jinja2 @@ -0,0 +1,12 @@ +{% import 'operation_tools.jinja2' as op_tools %} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{{ imports }} +{% for model in code_model.model_types %} +{% if model.base == "dpg" %} +{% include "model_dpg.py.jinja2" %} +{% elif model.base == "msrest" %} +{% include "model_msrest.py.jinja2" %} +{% endif %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/model_dpg.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/model_dpg.py.jinja2 new file mode 100644 index 0000000000..3640d213d8 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/model_dpg.py.jinja2 @@ -0,0 +1,97 @@ +{# actual template starts here #} +{% import "macros.jinja2" as macros %} + + +{{ serializer.declare_model(model) }} + """{{ op_tools.wrap_string(model.description(is_operation_file=False), "\n ") }} + {% if model.discriminated_subtypes %} + + {{ serializer.discriminator_docstring(model) | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring='\n ') }} + {% endif %} + {% if model.has_readonly_or_constant_property %} + + Readonly variables are only populated by the server, and will be ignored when sending a request. + {% endif %} + {% if (model.properties | selectattr('optional', "equalto", false) | first) is defined %} + + {% if not model.is_usage_output %} + All required parameters must be populated in order to send to server. + {% endif %} + {% endif %} + + {% if model.properties != None %} + {% for p in model.properties %} + {% for line in serializer.variable_documentation_string(p) %} + {% for doc_string in line.replace('\n', '\n ').split('\n') %} + {{ macros.wrap_model_string(doc_string, '\n ') -}} + {% endfor %} + {% endfor %} + {% endfor %} + {% endif %} + """ + + {% if model.is_polymorphic %} + __mapping__: Dict[str, _model_base.Model] = {} + {% endif %} + {% for p in serializer.get_properties_to_declare(model)%} + {{ serializer.declare_property(p) }} + {% set prop_description = p.description(is_operation_file=False).replace('"', '\\"') %} + {% if prop_description %} + """{{ macros.wrap_model_string(prop_description, '\n ', '\"\"\"') -}} + {% endif %} + {% endfor %} + + {% if code_model.options["models_mode"] == "dpg" and model.flattened_property %} + __flattened_items = ["{{ model.flattened_items|join('\", \"') }}"] + {% endif %} + + {% if model.xml_metadata %} + _xml = {{model.xml_metadata}} + {% endif %} + + + {% if serializer.need_init(model) %} + @overload + def __init__({{ model.init_pylint_disable }} + self, + {% for param_signature in serializer.init_line(model) %} + {{ param_signature }} + {% endfor %} + ): + ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + {% endif %} + {% set initialize_properties = serializer.initialize_properties(model) %} + {% if serializer.need_init(model) or initialize_properties %} + def __init__(self, *args: Any, **kwargs: Any) -> None:{{ serializer.pylint_disable(model) }} + {% for line in serializer.super_call(model) %} + {{ line }} + {% endfor %} + {% for initialize_property in initialize_properties %} + {{ initialize_property }} + {% endfor %} + {% if code_model.options["models_mode"] == "dpg" and model.flattened_property %} + {% set flattened_property_attr = model.flattened_property.client_name %} + + def __getattr__(self, name: str) -> Any: + if name in self.__flattened_items: + if self.{{ flattened_property_attr }} is None: return None + return getattr(self.{{ flattened_property_attr }}, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") + + def __setattr__(self, key: str, value: Any) -> None: + if key in self.__flattened_items: + if self.{{ flattened_property_attr }} is None: + self.{{ flattened_property_attr }} = self._attr_to_rest_field["{{ flattened_property_attr }}"]._class_type() + setattr(self.properties, key, value) + else: + super().__setattr__(key, value) + {% endif %} + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/model_init.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/model_init.py.jinja2 new file mode 100644 index 0000000000..f471068084 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/model_init.py.jinja2 @@ -0,0 +1,28 @@ +{% import 'keywords.jinja2' as keywords %} +# coding=utf-8 +{{ code_model.options['license_header'] }} +{% if schemas %} + + {% for schema in schemas %} +from .{{ code_model.models_filename }} import {{ schema }} + {% endfor %} +{% endif %} +{% if enums %} + +{% for enum in enums %} +from .{{ code_model.enums_filename }} import {{ enum }} +{% endfor %} +{% endif %} +{{ keywords.patch_imports() }} +__all__ = [ + {% for schema in schemas %} + '{{ schema }}', + {% endfor %} + {% if enums %} + {% for enum in enums %} + '{{ enum }}', + {% endfor %} +{% endif %} +] +{{ keywords.extend_all }} +_patch_sdk() diff --git a/packages/http-client-python/generator/pygen/codegen/templates/model_msrest.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/model_msrest.py.jinja2 new file mode 100644 index 0000000000..15ccc8a2d9 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/model_msrest.py.jinja2 @@ -0,0 +1,92 @@ +{# actual template starts here #} +{% import "macros.jinja2" as macros %} +{% set initialize_properties = serializer.initialize_properties(model) %} +{% set exist_constant = (model.properties | selectattr('constant') | first) is defined %} + +{{ serializer.declare_model(model) }} + """{{ op_tools.wrap_string(model.description(is_operation_file=False), "\n ") }} + {% if model.discriminated_subtypes %} + + {{ serializer.discriminator_docstring(model) | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring='\n ') }} + {% endif %} + {% if model.has_readonly_or_constant_property %} + + Variables are only populated by the server, and will be ignored when sending a request. + {% endif %} + {% if (model.properties | selectattr('optional', "equalto", false) | first) is defined %} + + All required parameters must be populated in order to send to server. + {% endif %} + + {% if model.properties != None %} + {% for p in model.properties %} + {% for line in serializer.variable_documentation_string(p) %} + {% for doc_string in line.replace('\n', '\n ').split('\n') %} + {{ macros.wrap_model_string(doc_string, '\n ') -}} + {% endfor %} + {% endfor %} + {% endfor %} + {% endif %} + """ +{% if initialize_properties or exist_constant %} + {% if (model.properties | selectattr('validation') ) | first %} + + _validation = { + {% for p in model.properties | selectattr('validation')%} + '{{ p.client_name }}': {{ str(p.validation) }}, + {% endfor %} + } + {% endif %} + + _attribute_map = { + {% if model.properties %} + {% for p in model.properties %} + {{ serializer.declare_property(p) }} + {% endfor %} + {% endif %} + } + {% if model.discriminated_subtypes %} + + _subtype_map = { + '{{ model.discriminator.client_name }}': {{ str(model.discriminated_subtypes_name_mapping) }} + } + {% endif %} + {% if model.xml_map_content %} + _xml_map = { + {{ model.xml_map_content }} + } + {% endif %} + {% if exist_constant %} + + {% for p in model.properties | selectattr('constant')%} + {{ p.client_name }} = {{ p.type.get_declaration() }} + {% endfor %} + {% endif %} + + def __init__({{ model.init_pylint_disable }} + self, + {% for param_signature in serializer.init_line(model) %} + {{ param_signature }} + {% endfor %} + **kwargs: Any + ) -> None: + """ + {% if model.properties %} + {% for p in model.properties %} + {% if p.is_input %} + {% for line in serializer.input_documentation_string(p) %} + {% for doc_string in line.replace('\n', '\n ').split('\n') %} + {{ macros.wrap_model_string(doc_string, '\n ') -}} + {% endfor %} + {% endfor %} + {% endif %} + {% endfor %} + {% endif %} + """ + {% for line in serializer.super_call(model) %} + {{ line }} + {% endfor %} + {% for initialize_property in initialize_properties %} + {{ initialize_property }} + {% endfor %} +{% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/operation.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/operation.py.jinja2 new file mode 100644 index 0000000000..aec6199880 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/operation.py.jinja2 @@ -0,0 +1,21 @@ +{% import 'keywords.jinja2' as keywords with context %} +{% import 'operation_tools.jinja2' as op_tools %} +{# actual template starts here #} +{% if operation.overloads and operation.include_documentation %} +{{ op_tools.generate_overloads(operation_serializer, operation) }} +{% endif %} +{{ operation_serializer.method_signature_and_response_type_annotation(operation) }} +{% if operation.include_documentation %} + {{ op_tools.description(operation, operation_serializer) | indent }}{% endif %} + {% if not operation.abstract %} + {% if operation.deprecated %} + warnings.warn('Method {{operation.name}} is deprecated', DeprecationWarning) + {% endif %} + {{ op_tools.serialize(operation_serializer.error_map(operation)) | indent }} + {% if operation_serializer.pop_kwargs_from_signature(operation) %} + {{ op_tools.serialize(operation_serializer.pop_kwargs_from_signature(operation)) | indent }} + {% endif %} + {{ op_tools.serialize(operation_serializer.call_request_builder(operation)) | indent }} + {{ op_tools.serialize(operation_serializer.make_pipeline_call(operation)) | indent }} + {{ op_tools.serialize(operation_serializer.handle_response(operation)) | indent }} + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/operation_group.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/operation_group.py.jinja2 new file mode 100644 index 0000000000..1ee2846535 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/operation_group.py.jinja2 @@ -0,0 +1,75 @@ +{% set base_class = ("(" + operation_group.base_class + ")") if operation_group.base_class else "" %} +{% macro check_abstract_methods() %} +{% if operation_group.has_abstract_operations %} + raise_if_not_implemented(self.__class__, [ + {% for operation in operation_group.operations if operation.abstract %} + '{{operation.name}}', + {% endfor %} + ]) +{% endif %} +{% endmacro %} +{% if operation_group.base_class %} +class {{ operation_group.class_name }}( {{ operation_group.pylint_disable() }} + {{ operation_group.base_class }} +): +{% else %} +class {{ operation_group.class_name }}: {{ operation_group.pylint_disable() }} +{% endif %} +{% if not operation_group.is_mixin %} + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`{{ "~" + code_model.namespace + (".aio." if async_mode else ".") + operation_group.client.name }}`'s + :attr:`{{ operation_group.property_name }}` attribute. + """ + +{% if code_model.public_model_types and code_model.options["models_mode"] == "msrest" %} + models = _models + +{% endif %} + def __init__(self, *args, **kwargs){{ return_none_type_annotation }}: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + {% if code_model.options["multiapi"] %} + self._api_version = input_args.pop(0) if input_args else kwargs.pop("api_version") + {% endif %} + + {% for og in operation_group.operation_groups %} + self.{{ og.property_name }} = {{ og.class_name }}( + self._client, self._config, self._serialize, self._deserialize{{ ", self._api_version" if code_model.options["multiapi"] else "" }} + ) + {% endfor %} + +{{ check_abstract_methods() }} +{% elif operation_group.has_abstract_operations %} + + def __init__(self){{ return_none_type_annotation }}: +{{ check_abstract_methods() }} +{% endif %} +{% if operation_group.is_mixin and code_model.options["multiapi"] %} + def _api_version(self, op_name: str) -> str: # pylint: disable=unused-argument + try: + return self._config.api_version + except: # pylint: disable=bare-except + return "" +{% endif %} +{% for operation in operation_group.operations if not operation.abstract %} + +{% set request_builder = operation.request_builder %} +{% set operation_serializer = get_operation_serializer(operation) %} + {% if operation.operation_type == "lropaging" %} + {%- macro some_op() %}{% include "lro_paging_operation.py.jinja2" %}{% endmacro %} + {% elif operation.operation_type == "lro" %} + {%- macro some_op() %}{% include "lro_operation.py.jinja2" %}{% endmacro %} + {% elif operation.operation_type == "paging" %} + {% macro some_op() %}{% include "paging_operation.py.jinja2" %}{% endmacro %} + {% else %} + {% macro some_op() %}{% include "operation.py.jinja2" %}{% endmacro %} + {% endif %} + {{ some_op()|indent }} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/operation_groups_container.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/operation_groups_container.py.jinja2 new file mode 100644 index 0000000000..6fdaf41371 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/operation_groups_container.py.jinja2 @@ -0,0 +1,19 @@ +{% import 'operation_tools.jinja2' as op_tools %} +{% set operations_description = "async operations" if async_mode else "operations" %} +{% set return_none_type_annotation = " -> None" if async_mode else "" %} +# coding=utf-8 +{{ code_model.options['license_header'] }} +{{ imports }} +{{ unset }} +{% if code_model.options["builders_visibility"] == "embedded" and not async_mode %} +{{ op_tools.declare_serializer(code_model) }} + {% for operation_group in operation_groups %} + {% for request_builder in get_request_builders(operation_group) %} + +{% include "request_builder.py.jinja2" %} + {% endfor %} + {% endfor %} +{% endif %} +{% for operation_group in operation_groups %} + {% include "operation_group.py.jinja2" %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/operation_tools.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/operation_tools.jinja2 new file mode 100644 index 0000000000..598da57e4d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/operation_tools.jinja2 @@ -0,0 +1,74 @@ +{% macro wrap_string(string, wrapstring, width=95) %}{{ string | replace("\\", "\\\\") | wordwrap(width=width, break_long_words=False, break_on_hyphens=False, wrapstring=wrapstring)}}{% endmacro %} + +{% macro description(builder, serializer) %} +{% set example_template = serializer.example_template(builder) %} + {% for description in serializer.description_and_summary(builder) %} + {% if description %} +{% set description = wrap_string(description, wrapstring='\n') %} + {% if serializer.line_too_long(example_template) and loop.first %} +# pylint: disable=line-too-long + {% endif %} +{{ '"""' + description if loop.first else description }} + {% else %} + + {% endif %} + {% endfor %} + {% for description in serializer.param_description_and_response_docstring(builder) %} + {% if description %} +{{ wrap_string(description, wrapstring='\n ') }} + {% else %} + + {% endif %} +{% endfor %} +{% if example_template %} + +Example: + .. code-block:: python + {% for template_line in example_template %} + {% if template_line %} + {% set wrap_amount = (template_line | length) - (template_line.lstrip() | length) + 10 %} + {{ wrap_string(template_line, wrapstring='\n' + " " * wrap_amount, width=(95 - wrap_amount)) }} + {% else %} + + {% endif %} + {% endfor %} +{% endif %} +""" +{% endmacro %} + +{% macro serialize(lines) %} +{% for line in lines %} + {% if line %} +{{ line }} + {% else %} + + {% endif %} +{% endfor %}{% endmacro %} + +{% macro serialize_with_wrap(lines, wrapstring) %} +{% for line in lines %} + {% if line %} +{{ wrap_string(line, wrapstring=wrapstring) }} + {% else %} + + {% endif %} +{% endfor %}{% endmacro %} + +{% macro declare_serializer(code_model) %} +{% if code_model.has_non_abstract_operations %} +_SERIALIZER = Serializer() + {% if not code_model.options["client_side_validation"] %} +_SERIALIZER.client_side_validation = False + {% endif %} +{% endif %} +{% endmacro %} + +{% macro generate_overloads(operation_serializer, operation) %} +{% for overload in operation.overloads %} +{{ operation_serializer.method_signature_and_response_type_annotation(overload) }} +{% if not operation.internal %} + {{ description(overload, operation_serializer) | indent }} +{% else %} + ... +{% endif %} +{% endfor %}{% endmacro %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/operations_folder_init.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/operations_folder_init.py.jinja2 new file mode 100644 index 0000000000..bb38196f4a --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/operations_folder_init.py.jinja2 @@ -0,0 +1,17 @@ +{% import 'operation_tools.jinja2' as op_tools %} +{% import 'keywords.jinja2' as keywords %} +{# actual template starts here #} +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{{ op_tools.serialize(operation_group_imports()) }} +{{ keywords.patch_imports() }} +__all__ = [ + {% for client in clients %} + {% for operation_group in client.operation_groups %} + '{{ operation_group.class_name }}', + {% endfor %} + {% endfor %} +] +{{ keywords.extend_all }} +_patch_sdk() diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/CHANGELOG.md.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/CHANGELOG.md.jinja2 new file mode 100644 index 0000000000..bddf9a22c7 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/CHANGELOG.md.jinja2 @@ -0,0 +1,6 @@ +# Release History + +## 1.0.0b1 (1970-01-01) + +- Initial version + diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/LICENSE.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/LICENSE.jinja2 new file mode 100644 index 0000000000..3499e00ae5 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/LICENSE.jinja2 @@ -0,0 +1,21 @@ +Copyright (c) {{ code_model.options["company_name"] }} Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/MANIFEST.in.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/MANIFEST.in.jinja2 new file mode 100644 index 0000000000..454a9ad271 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/MANIFEST.in.jinja2 @@ -0,0 +1,8 @@ +include *.md +include LICENSE +include {{ package_name.replace('-', '/') }}/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +{%- for init_name in init_names %} +include {{ init_name }} +{%- endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/README.md.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/README.md.jinja2 new file mode 100644 index 0000000000..fbea591360 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/README.md.jinja2 @@ -0,0 +1,107 @@ +{% if code_model.is_azure_flavor %} +{% if package_mode == "mgmtplane" -%} +# Microsoft Azure SDK for Python + +This is the Microsoft {{package_pprint_name}} Client Library. +This package has been tested with Python 3.8+. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + +# Usage + +To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt) + +For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure) +Code samples for this package can be found at [{{package_pprint_name}}](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com. +Additional code samples for different Azure services are available at [Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + +# Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2F{{package_name}}%2FREADME.png) +{% else %} +# {{ package_pprint_name }} client library for Python + + +## Getting started + +### Install the package + +```bash +python -m pip install {{ package_name }} +``` + +#### Prequisites + +- Python 3.8 or later is required to use this package. +- You need an [Azure subscription][azure_sub] to use this package. +- An existing {{ package_pprint_name }} instance. + +{%- if token_credential %} +#### Create with an Azure Active Directory Credential +To use an [Azure Active Directory (AAD) token credential][authenticate_with_token], +provide an instance of the desired credential type obtained from the +[azure-identity][azure_identity_credentials] library. + +To authenticate with AAD, you must first [pip][pip] install [`azure-identity`][azure_identity_pip] + +After setup, you can choose which type of [credential][azure_identity_credentials] from azure.identity to use. +As an example, [DefaultAzureCredential][default_azure_credential] can be used to authenticate the client: + +Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables: +`AZURE_CLIENT_ID`, `AZURE_TENANT_ID`, `AZURE_CLIENT_SECRET` + +Use the returned token credential to authenticate the client: + +```python +>>> from {{ namespace }} import {{ client_name }} +>>> from azure.identity import DefaultAzureCredential +>>> client = {{ client_name }}(endpoint='', credential=DefaultAzureCredential()) +``` + +## Examples + +```python +>>> from {{ namespace }} import {{ client_name }} +>>> from azure.identity import DefaultAzureCredential +>>> from {{ code_model.core_library }}.exceptions import HttpResponseError + +>>> client = {{ client_name }}(endpoint='', credential=DefaultAzureCredential()) +>>> try: + + except HttpResponseError as e: + print('service responds error: {}'.format(e.response.json())) + +``` +{%- endif %} + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. + + +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ +[authenticate_with_token]: https://docs.microsoft.com/azure/cognitive-services/authentication?tabs=powershell#authenticate-with-an-authentication-token +[azure_identity_credentials]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#credentials +[azure_identity_pip]: https://pypi.org/project/azure-identity/ +[default_azure_credential]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential +[pip]: https://pypi.org/project/pip/ +[azure_sub]: https://azure.microsoft.com/free/ +{% endif %} +{% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/dev_requirements.txt.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/dev_requirements.txt.jinja2 new file mode 100644 index 0000000000..a9782cabd5 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/dev_requirements.txt.jinja2 @@ -0,0 +1,9 @@ +-e ../../../tools/azure-sdk-tools +../../core/azure-core +{% if token_credential -%} +../../identity/azure-identity +{% endif -%} +{% if azure_arm -%} +../../core/azure-mgmt-core +{% endif -%} +aiohttp diff --git a/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/setup.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/setup.py.jinja2 new file mode 100644 index 0000000000..45239f0dac --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/packaging_templates/setup.py.jinja2 @@ -0,0 +1,108 @@ +# coding=utf-8 +{{ license_header }} +# coding: utf-8 +{% if package_mode %} +import os +import re +{% endif -%} +from setuptools import setup, find_packages + +{% set package_name = package_name or code_model.clients[0].name %} + +PACKAGE_NAME = "{{ package_name|lower }}" +{% if package_mode -%} +PACKAGE_PPRINT_NAME = "{{ package_pprint_name }}" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace("-", "/") + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: + version = re.search( + r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE + ).group(1) + +if not version: + raise RuntimeError("Cannot find version information") +{% set description = "\"" + code_model.options["company_name"] + " {} Client Library for Python\".format(PACKAGE_PPRINT_NAME)" %} +{% set author_email = "azpysdkhelp@microsoft.com" %} +{% set url = "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk" %} +{% else %} +version = "{{ package_version }}" +{% set description = "\"%s\""|format(package_name) %} +{% set long_description = code_model.description %} +{% set author_email = "" %} +{% set url = "" %} +{% endif -%} + +setup( + name=PACKAGE_NAME, + version=version, + description={{ description }}, + {% if package_mode %} + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="{{ code_model.options["company_name"] }} Corporation", + {% endif %} + {% if code_model.is_azure_flavor %} + author_email="{{ author_email }}", + url="{{ url }}", + keywords="azure, azure sdk", + {% endif %} + {% if package_mode %} + classifiers=[ + "Development Status :: {{ dev_status }}", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", + ], + zip_safe=False, + packages=find_packages( + exclude=[ + "tests", + {% if pkgutil_names %} + # Exclude packages that will be covered by PEP420 or nspkg + {% endif %} + {%- for pkgutil_name in pkgutil_names %} + "{{ pkgutil_name }}", + {%- endfor %} + ] + ), + include_package_data=True, + package_data={ + '{{ code_model.namespace }}': ['py.typed'], + }, + {% else %} + packages=find_packages(), + include_package_data=True, + {% endif %} + install_requires=[ + {% if code_model.is_legacy %} + "msrest>=0.7.1", + {% else %} + "isodate>=0.6.1", + {% endif %} + {% if azure_arm %} + "azure-mgmt-core>=1.3.2", + {% elif code_model.is_azure_flavor %} + "azure-core>=1.30.0", + {% else %} + "corehttp[requests]", + {% endif %} + "typing-extensions>=4.6.0", + ], + {% if package_mode %} + python_requires=">=3.8", + {% else %} + long_description="""\ + {{ code_model.description }} + """ + {% endif %} +) diff --git a/packages/http-client-python/generator/pygen/codegen/templates/paging_operation.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/paging_operation.py.jinja2 new file mode 100644 index 0000000000..d6f0d6f14d --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/paging_operation.py.jinja2 @@ -0,0 +1,21 @@ +{% import 'operation_tools.jinja2' as op_tools with context %} +{# actual template starts here #} +{% if operation.overloads and operation.include_documentation %} +{{ op_tools.generate_overloads(operation_serializer, operation) }} +{% endif %} +{{ operation_serializer.method_signature_and_response_type_annotation(operation) }} +{% if operation.include_documentation %} + {{ op_tools.description(operation, operation_serializer) | indent }}{% endif %} + {% if not operation.abstract %} + {% if operation.deprecated %} + warnings.warn('Method {{operation.name}} is deprecated', DeprecationWarning) + {% endif %} + {% if operation_serializer.pop_kwargs_from_signature(operation) %} + {{ op_tools.serialize(operation_serializer.pop_kwargs_from_signature(operation)) | indent }} + {% endif %} + {{ op_tools.serialize(operation_serializer.set_up_params_for_pager(operation)) | indent }} + + return {{ operation.get_pager(async_mode) }}( + get_next, extract_data + ) + {% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/patch.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/patch.py.jinja2 new file mode 100644 index 0000000000..87c0954871 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/patch.py.jinja2 @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) {{ code_model.options["company_name"] }} Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +{{ imports }} + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/packages/http-client-python/generator/pygen/codegen/templates/pkgutil_init.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/pkgutil_init.py.jinja2 new file mode 100644 index 0000000000..5960c353a8 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/pkgutil_init.py.jinja2 @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/packages/http-client-python/generator/pygen/codegen/templates/request_builder.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/request_builder.py.jinja2 new file mode 100644 index 0000000000..ed9af24814 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/request_builder.py.jinja2 @@ -0,0 +1,28 @@ +{% import 'keywords.jinja2' as keywords with context %} +{% import 'operation_tools.jinja2' as op_tools with context %} +{{ request_builder_serializer.method_signature_and_response_type_annotation(request_builder) }} +{% if code_model.options["builders_visibility"] == "public" %} + {{ op_tools.description(request_builder, request_builder_serializer) | indent }} +{% endif %} +{% if not request_builder.is_overload %} + {% if request_builder_serializer.pop_kwargs_from_signature(request_builder) %} + {{ op_tools.serialize(request_builder_serializer.pop_kwargs_from_signature(request_builder)) | indent }} + {%- endif -%} + {% if request_builder_serializer.declare_non_inputtable_headers_queries(request_builder) %} + {{ op_tools.serialize(request_builder_serializer.declare_non_inputtable_headers_queries(request_builder)) | indent }} + {% endif %} + # Construct URL + {{ request_builder_serializer.construct_url(request_builder) }} + {% if request_builder.parameters.path %} + {{ op_tools.serialize(request_builder_serializer.serialize_path(request_builder)) | indent }} + _url: str = _url.format(**path_format_arguments) # type: ignore + {% endif %} + + {% if request_builder.parameters.query %} + {{ op_tools.serialize(request_builder_serializer.serialize_query(request_builder)) | indent }} + {% endif %} + {% if request_builder.parameters.headers %} + {{ op_tools.serialize(request_builder_serializer.serialize_headers(request_builder)) | indent }} + {% endif %} + {{ op_tools.serialize(request_builder_serializer.create_http_request(request_builder)) | indent }} +{% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/request_builders.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/request_builders.py.jinja2 new file mode 100644 index 0000000000..3c72ec2ac4 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/request_builders.py.jinja2 @@ -0,0 +1,10 @@ +{% import 'operation_tools.jinja2' as op_tools %} +# coding=utf-8 +{{ code_model.options['license_header'] }} +{{ imports }} + +{{ op_tools.declare_serializer(code_model) }} +{% for request_builder in request_builders %} + +{% include "request_builder.py.jinja2" %} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/rest_init.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/rest_init.py.jinja2 new file mode 100644 index 0000000000..9833f3fdc4 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/rest_init.py.jinja2 @@ -0,0 +1,12 @@ +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{% for request_builder in request_builders %} +from ._request_builders import {{ request_builder.name }} +{% endfor %} + +__all__ = [ + {% for request_builder in request_builders %} + '{{ request_builder.name }}', + {% endfor %} +] diff --git a/packages/http-client-python/generator/pygen/codegen/templates/sample.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/sample.py.jinja2 new file mode 100644 index 0000000000..421a3a7c55 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/sample.py.jinja2 @@ -0,0 +1,44 @@ +# coding=utf-8 +{% set aad_token = "DefaultAzureCredential" %} +{% set azure_key = "AzureKeyCredential" %} +{{ code_model.options['license_header'] }} + +{{ imports }} +""" +# PREREQUISITES +{% if "credential" in client_params and aad_token in client_params["credential"] %} + pip install azure-identity +{% endif %} + pip install {{ (code_model.options["package_name"] or code_model.clients[0].name)|lower }} +# USAGE + python {{ file_name }} + {% if "credential" in client_params and aad_token in client_params["credential"] %} + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal + {% elif "credential" in client_params and azure_key in client_params["credential"] %} + + Before run the sample, please set environment variables AZURE_KEY with real value + which can access your service + {% endif %} +""" +def main(): + client = {{ code_model.clients[0].name }}( + {% for key,value in client_params.items() %} + {{ key }}={{ value }}, + {% endfor %} + ) + + {{ return_var }}client{{ operation_group_name }}{{ operation_name }}( + {% for key, value in operation_params.items() %} + {{ key }}={{ value|indent(8) }}, + {% endfor %} + ){{ operation_result }} + +{% if origin_file %} +# x-ms-original-file: {{ origin_file }} +{% endif %} +if __name__ == "__main__": + main() diff --git a/packages/http-client-python/generator/pygen/codegen/templates/serialization.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/serialization.py.jinja2 new file mode 100644 index 0000000000..97a0f5a5a7 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/serialization.py.jinja2 @@ -0,0 +1,2114 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) {{ code_model.options["company_name"] }} Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from {{ code_model.core_library }}.exceptions import DeserializationError, SerializationError +from {{ code_model.core_library }}.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0. + + :param datetime.datetime dt: The datetime + :returns: The offset + :rtype: datetime.timedelta + """ + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation. + + :param datetime.datetime dt: The datetime + :returns: The timestamp representation + :rtype: str + """ + return "Z" + + def dst(self, dt): + """No daylight saving for UTC. + + :param datetime.datetime dt: The datetime + :returns: The daylight saving time + :rtype: datetime.timedelta + """ + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + :rtype: ModelType + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer(object): # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{% raw %}{{{}}}{}{% endraw %}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{% raw %}{{{}}}{}{% endraw %}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{% raw %}{{{}}}{}{% endraw %}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k for k, v in response._validation.items() if v.get("readonly") # pylint: disable=protected-access + ] + const = [ + k for k, v in response._validation.items() if v.get("constant") # pylint: disable=protected-access + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises: DeserializationError if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/packages/http-client-python/generator/pygen/codegen/templates/test.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/test.py.jinja2 new file mode 100644 index 0000000000..25f8da7b53 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/test.py.jinja2 @@ -0,0 +1,50 @@ +{% set prefix_lower = test.prefix|lower %} +{% set client_var = "self.client" if code_model.options["azure_arm"] else "client" %} +{% set async = "async " if test.is_async else "" %} +{% set async_suffix = "_async" if test.is_async else "" %} +# coding=utf-8 +{{ code_model.options['license_header'] }} +import pytest +{{ imports }} + +{% if code_model.options["azure_arm"] %} +AZURE_LOCATION = "eastus" +{% endif %} + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class {{ test.test_class_name }}({{ test.base_test_class_name }}): +{% if code_model.options["azure_arm"] %} + def setup_method(self, method): + {% if test.is_async %} + self.client = self.create_mgmt_client({{ test.client_name }}, is_async=True) + {% else %} + self.client = self.create_mgmt_client({{ test.client_name }}) + {% endif %} +{% endif %} +{% for testcase in test.testcases %} + {% if code_model.options["azure_arm"] %} + @{{ test.preparer_name }}(location=AZURE_LOCATION) + {% else %} + @{{ test.preparer_name }}() + {% endif %} + @recorded_by_proxy{{ async_suffix }} + {% if code_model.options["azure_arm"] %} + {{ async }}def test_{{ testcase.name }}(self, resource_group): + {% else %} + {{ async }}def test_{{ testcase.name }}(self, {{ prefix_lower }}_endpoint): + {{ client_var }} = self.{{ test.create_client_name }}(endpoint={{ prefix_lower }}_endpoint) + {% endif %} + {{testcase.response }}{{ client_var }}{{ testcase.operation_group_prefix }}.{{ testcase.operation.name }}( + {% for key, value in testcase.params.items() %} + {% if code_model.options["azure_arm"] and key == "resource_group_name" %} + {{ key }}=resource_group.name, + {% else %} + {{ key }}={{ value|indent(12) }}, + {% endif %} + {% endfor %} + ){{ testcase.operation_suffix }} + {{ testcase.extra_operation }} + # please add some check logic here by yourself + # ... + +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/testpreparer.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/testpreparer.py.jinja2 new file mode 100644 index 0000000000..b3b15f3727 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/testpreparer.py.jinja2 @@ -0,0 +1,26 @@ +# coding=utf-8 +{{ code_model.options['license_header'] }} +{{ imports }} + +{% for test_name in test_names %} +{% set extra_async = ", is_async=True" if test_name.is_async else ""%} +{% set prefix_lower = test_name.prefix|lower %} +class {{ test_name.base_test_class_name }}(AzureRecordedTestCase): + + def {{ test_name.create_client_name }}(self, endpoint): + credential = self.get_credential({{ test_name.client_name }}{{ extra_async }}) + return self.create_client_from_credential( + {{ test_name.client_name }}, + credential=credential, + endpoint=endpoint, + ) + +{% if not test_name.is_async %} +{{ test_name.preparer_name }} = functools.partial( + PowerShellPreparer, + "{{ prefix_lower }}", + {{ prefix_lower }}_endpoint="https://fake_{{ prefix_lower }}_endpoint.com" +) +{% endif %} + +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/types.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/types.py.jinja2 new file mode 100644 index 0000000000..26e461d235 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/types.py.jinja2 @@ -0,0 +1,7 @@ +# coding=utf-8 +{{ code_model.options['license_header'] }} + +{{ imports }} +{% for nu in code_model.named_unions %} +{{nu.name}} = {{nu.type_definition()}} +{% endfor %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/validation.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/validation.py.jinja2 new file mode 100644 index 0000000000..ebc4b24388 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/validation.py.jinja2 @@ -0,0 +1,38 @@ +{{ code_model.options['license_header'] }} +import functools + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if method_added_on > client_api_version: + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and api_version > client_api_version + } + if unsupported: + raise ValueError("".join([ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ])) + return func(*args, **kwargs) + return wrapper + return decorator diff --git a/packages/http-client-python/generator/pygen/codegen/templates/vendor.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/vendor.py.jinja2 new file mode 100644 index 0000000000..58c77ec932 --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/vendor.py.jinja2 @@ -0,0 +1,95 @@ +{% import 'keywords.jinja2' as keywords with context %} +{{ code_model.options['license_header'] }} + +{{ imports }} + +{% if code_model.need_mixin_abc %} + {% for client in clients | selectattr("has_mixin") %} +class {{ client.name }}MixinABC( + ABC +): + """DO NOT use this class. It is for internal typing use only.""" + _client: "{{ keywords.async_class }}PipelineClient" + _config: {{ client.name }}Configuration + _serialize: "Serializer" + _deserialize: "Deserializer" + {% endfor %} +{% endif %} +{% if code_model.has_abstract_operations %} + +def raise_if_not_implemented(cls, abstract_methods): + not_implemented = [f for f in abstract_methods if not callable(getattr(cls, f, None))] + if not_implemented: + raise NotImplementedError("The following methods on operation group '{}' are not implemented: '{}'." + " Please refer to https://aka.ms/azsdk/python/dpcodegen/python/customize to learn how to customize.".format( + cls.__name__, '\', \''.join(not_implemented)) + ) +{% endif %} + +{% if code_model.has_etag %} +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None +{% endif %} +{% if code_model.has_form_data and code_model.options["models_mode"] == "dpg" and not async_mode %} +# file-like tuple could be `(filename, IO (or bytes))` or `(filename, IO (or bytes), content_type)` +FileContent = Union[str, bytes, IO[str], IO[bytes]] + +FileType = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], +] + +def serialize_multipart_data_entry(data_entry: Any) -> Any: + if isinstance(data_entry, (list, tuple, dict, Model)): + return json.dumps(data_entry, cls=SdkJSONEncoder, exclude_readonly=True) + return data_entry + +def prepare_multipart_form_data( + body: Mapping[str, Any], multipart_fields: List[str], data_fields: List[str] +) -> Tuple[List[FileType], Dict[str, Any]]: + files: List[FileType] = [] + data: Dict[str, Any] = {} + for multipart_field in multipart_fields: + multipart_entry = body.get(multipart_field) + if isinstance(multipart_entry, list): + files.extend([(multipart_field, e) for e in multipart_entry ]) + elif multipart_entry: + files.append((multipart_field, multipart_entry)) + + for data_field in data_fields: + data_entry = body.get(data_field) + if data_entry: + data[data_field] = serialize_multipart_data_entry(data_entry) + + return files, data +{% endif %} diff --git a/packages/http-client-python/generator/pygen/codegen/templates/version.py.jinja2 b/packages/http-client-python/generator/pygen/codegen/templates/version.py.jinja2 new file mode 100644 index 0000000000..2ea06fccbe --- /dev/null +++ b/packages/http-client-python/generator/pygen/codegen/templates/version.py.jinja2 @@ -0,0 +1,4 @@ +# coding=utf-8 +{{ code_model.options['license_header'] }} + +VERSION = "{{ code_model.options['package_version'] }}" diff --git a/packages/http-client-python/generator/pygen/m2r.py b/packages/http-client-python/generator/pygen/m2r.py new file mode 100644 index 0000000000..a61fd4cb0a --- /dev/null +++ b/packages/http-client-python/generator/pygen/m2r.py @@ -0,0 +1,65 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +"""An MD to RST plugin. +""" +import logging +from typing import Any, Dict, Set, Union + +import m2r2 + +from . import YamlUpdatePlugin +from .utils import parse_args + + +_LOGGER = logging.getLogger(__name__) + + +class GeneratorRenderer(m2r2.RestRenderer): + """Redefine the concept of inline HTML in the renderer, we don't want to define a new format + in the description/summary. + """ + + def inline_html(self, html: str) -> str: + """Do not render inline HTML with a role definition.""" + return f":code:`{html}`" + + +class M2R(YamlUpdatePlugin): + """A plugin to convert any description and summary from MD to RST.""" + + def update_yaml(self, yaml_data: Dict[str, Any]) -> None: + """Convert in place the YAML str.""" + self._convert_docstring_no_cycles(yaml_data, set()) + + def _convert_docstring_no_cycles(self, yaml_data: Union[Dict[str, Any], str], node_list: Set[int]) -> None: + """Walk the YAML tree to convert MD to RST.""" + if id(yaml_data) in node_list: + return + node_list.add(id(yaml_data)) + + if isinstance(yaml_data, list): + for elt in yaml_data: + self._convert_docstring_no_cycles(elt, node_list) + elif isinstance(yaml_data, dict): + for key, value in yaml_data.items(): + if key in ["description", "summary"]: + yaml_data[key] = self.convert_to_rst(value) + continue + self._convert_docstring_no_cycles(value, node_list) + + @staticmethod + def convert_to_rst(string_to_convert: str) -> str: + """Convert that string from MD to RST.""" + try: + return m2r2.convert(string_to_convert, renderer=GeneratorRenderer()).strip() + except Exception: # pylint: disable=broad-except + return string_to_convert + + +if __name__ == "__main__": + # CADL pipeline will call this + args, unknown_args = parse_args() + M2R(output_folder=args.output_folder, cadl_file=args.cadl_file, **unknown_args).process() diff --git a/packages/http-client-python/generator/pygen/postprocess/__init__.py b/packages/http-client-python/generator/pygen/postprocess/__init__.py new file mode 100644 index 0000000000..8e9439de64 --- /dev/null +++ b/packages/http-client-python/generator/pygen/postprocess/__init__.py @@ -0,0 +1,183 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Tuple, Any +from pathlib import Path +import os +import shutil +from venv import EnvBuilder +import black +from black.report import NothingChanged +from .venvtools import ExtendedEnvBuilder, python_run + +from .. import Plugin + +_BLACK_MODE = black.Mode() # pyright: ignore [reportPrivateImportUsage] +_BLACK_MODE.line_length = 120 + + +def format_file(file: Path, file_content: str) -> str: + if not file.suffix == ".py": + return file_content + try: + file_content = black.format_file_contents(file_content, fast=True, mode=_BLACK_MODE) + except NothingChanged: + pass + return file_content + + +class PostProcessPlugin(Plugin): + def __init__(self, **kwargs: Any): + super().__init__(**kwargs) + output_folder_uri = self.options["outputFolderUri"] + if output_folder_uri.startswith("file:"): + output_folder_uri = output_folder_uri[5:] + if os.name == "nt" and output_folder_uri.startswith("///"): + output_folder_uri = output_folder_uri[3:] + self.output_folder = Path(output_folder_uri) # path to where the setup.py is + self.setup_venv() + + # set up the venv + # base folder is where the code starts, i.e. where we + self.base_folder, self.namespace = self.get_namespace(self.output_folder, "") + + def setup_venv(self): + venv_path = self.output_folder / Path(".temp_folder") / Path("temp_venv") + + if venv_path.exists(): + env_builder = EnvBuilder(with_pip=True) + self.venv_context = env_builder.ensure_directories(venv_path) + else: + env_builder = ExtendedEnvBuilder(with_pip=True, upgrade_deps=True) + env_builder.create(venv_path) + self.venv_context = env_builder.context + python_run( + self.venv_context, + "pip", + ["install", "-e", str(self.output_folder)], + directory=self.output_folder, + ) + + def get_namespace(self, dir: Path, namespace: str) -> Tuple[Path, str]: + try: + init_file = next(d for d in dir.iterdir() if d.name == "__init__.py") + # we don't care about pkgutil inits, we skip over them + file_content = self.read_file(init_file.relative_to(self.output_folder)) + if "pkgutil" not in file_content: + return dir, namespace + except StopIteration: + pass + + try: + # first, see if we can get a folder that has the same name as the current output folder + start = self.output_folder.stem.split("-")[0] + next_dir = next(d for d in dir.iterdir() if d.is_dir() and d.name == start) + except StopIteration: + invalid_start_chars = [".", "_"] + invalid_dirs = [ + "swagger", + "out", + "tests", + "samples", + ] + + next_dir = next( + d + for d in dir.iterdir() + if d.is_dir() + and not str(d).endswith("egg-info") + and d.name[0] not in invalid_start_chars + and d.name not in invalid_dirs + ) + + namespace = f"{namespace}.{next_dir.name}" if namespace else next_dir.name + return self.get_namespace(next_dir, namespace) + + def process(self) -> bool: + folders = [f for f in self.base_folder.glob("**/*") if f.is_dir() and not f.stem.startswith("__")] + # will always have the root + self.fix_imports_in_init( + generated_file_name="_client", + folder_path=self.base_folder, + namespace=self.namespace, + ) + try: + aio_folder = next(f for f in folders if f.stem == "aio") + self.fix_imports_in_init( + generated_file_name="_client", + folder_path=aio_folder, + namespace=f"{self.namespace}.aio", + ) + except StopIteration: + pass + + try: + models_folder = next(f for f in folders if f.stem == "models") + self.fix_imports_in_init( + generated_file_name="_models", + folder_path=models_folder, + namespace=f"{self.namespace}.models", + ) + except StopIteration: + pass + operations_folders = [f for f in folders if f.stem in ["operations", "_operations"]] + for operations_folder in operations_folders: + sub_namespace = ".".join(str(operations_folder.relative_to(self.base_folder)).split(os.sep)) + self.fix_imports_in_init( + generated_file_name="_operations", + folder_path=operations_folder, + namespace=f"{self.namespace}.{sub_namespace}", + ) + shutil.rmtree(f"{str(self.output_folder)}/.temp_folder") + return True + + def fix_imports_in_init(self, generated_file_name: str, folder_path: Path, namespace: str) -> None: + customized_objects_str = python_run( + self.venv_context, + command=[namespace, str(self.output_folder)], + module="get_all", + ) + + if not customized_objects_str: + return + customized_objects = {k: None for k in customized_objects_str.split(",")}.keys() # filter out duplicates + file = (folder_path / "__init__.py").relative_to(self.output_folder) + file_content = self.read_file(file).replace("\r\n", "\n") + added_objs = [] + for obj in customized_objects: + if f" import {obj}\n" in file_content: + # means we're overriding a generated model + file_content = file_content.replace( + f"from .{generated_file_name} import {obj}\n", + f"from ._patch import {obj}\n", + ) + else: + added_objs.append(obj) + file_content = file_content.replace( + "try:\n from ._patch import __all__ as _patch_all\n " + "from ._patch import * # pylint: disable=unused-wildcard-import" + "\nexcept ImportError:\n _patch_all = []", + "", + ) + file_content = file_content.replace("from ._patch import __all__ as _patch_all", "") + file_content = file_content.replace( + "from ._patch import * # pylint: disable=unused-wildcard-import\n", + "", + ) + file_content = file_content.replace("__all__.extend([p for p in _patch_all if p not in __all__])", "") + if added_objs: + # add import + patch_sdk_import = "from ._patch import patch_sdk as _patch_sdk" + imports = "\n".join([f"from ._patch import {obj}" for obj in added_objs]) + if imports: + replacement = f"{imports}\n{patch_sdk_import}" + else: + replacement = patch_sdk_import + file_content = file_content.replace(patch_sdk_import, replacement) + # add to __all__ + added_objs_all = "\n".join([f' "{obj}",' for obj in added_objs]) + "\n" + file_content = file_content.replace("__all__ = [", f"__all__ = [\n{added_objs_all}", 1) + formatted_file = format_file(file, file_content) + self.write_file(file, formatted_file) diff --git a/packages/http-client-python/generator/pygen/postprocess/get_all.py b/packages/http-client-python/generator/pygen/postprocess/get_all.py new file mode 100644 index 0000000000..4206e36a9c --- /dev/null +++ b/packages/http-client-python/generator/pygen/postprocess/get_all.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys +import importlib + + +def main(namespace): + sdk = importlib.import_module(namespace) + return sdk._patch.__all__ # pylint: disable=protected-access + + +if __name__ == "__main__": + patched = ",".join(main(sys.argv[1])) + output_folder = sys.argv[2] + with open(f"{output_folder}/.temp_folder/patched.txt", "w", encoding="utf-8-sig") as f: + f.write(patched) diff --git a/packages/http-client-python/generator/pygen/postprocess/venvtools.py b/packages/http-client-python/generator/pygen/postprocess/venvtools.py new file mode 100644 index 0000000000..ef286bf085 --- /dev/null +++ b/packages/http-client-python/generator/pygen/postprocess/venvtools.py @@ -0,0 +1,75 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Optional +import subprocess +import venv +import sys +from pathlib import Path + + +_ROOT_DIR = Path(__file__).parent + + +class ExtendedEnvBuilder(venv.EnvBuilder): + """An extended env builder which saves the context, to have access + easily to bin path and such. + """ + + def __init__(self, *args, **kwargs): + self.context = None + if sys.version_info < (3, 9, 0): + # Not supported on Python 3.8, and we don't need it + kwargs.pop("upgrade_deps", None) + super().__init__(*args, **kwargs) + + def ensure_directories(self, env_dir): + self.context = super(ExtendedEnvBuilder, self).ensure_directories(env_dir) + return self.context + + +def create( + env_dir, + system_site_packages=False, + clear=False, + symlinks=False, + with_pip=False, + prompt=None, + upgrade_deps=False, +): + """Create a virtual environment in a directory.""" + builder = ExtendedEnvBuilder( + system_site_packages=system_site_packages, + clear=clear, + symlinks=symlinks, + with_pip=with_pip, + prompt=prompt, + upgrade_deps=upgrade_deps, + ) + builder.create(env_dir) + return builder.context + + +def python_run(venv_context, module, command, directory=_ROOT_DIR) -> Optional[str]: + try: + cmd_line = [ + venv_context.env_exe, + "-m", + module, + ] + command + print("Executing: {}".format(" ".join(cmd_line))) + subprocess.run( + cmd_line, + cwd=directory, + check=True, + stdout=False, + ) + if module == "get_all": + with open(f"{command[1]}/.temp_folder/patched.txt", "r", encoding="utf-8-sig") as f: + return f.read() + except subprocess.CalledProcessError as err: + print(err) + sys.exit(1) + return None diff --git a/packages/http-client-python/generator/pygen/preprocess/__init__.py b/packages/http-client-python/generator/pygen/preprocess/__init__.py new file mode 100644 index 0000000000..cfddddb860 --- /dev/null +++ b/packages/http-client-python/generator/pygen/preprocess/__init__.py @@ -0,0 +1,515 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +"""The preprocessing autorest plugin. +""" +import copy +from typing import Callable, Dict, Any, List, Optional + +from ..utils import to_snake_case, extract_original_name +from .helpers import ( + add_redefined_builtin_info, + pad_builtin_namespaces, + pad_special_chars, +) +from .python_mappings import CADL_RESERVED_WORDS, RESERVED_WORDS, PadType + +from .. import YamlUpdatePlugin +from ..utils import ( + parse_args, + get_body_type_for_description, + JSON_REGEXP, + KNOWN_TYPES, + update_enum_value, +) + + +def update_overload_section( + overload: Dict[str, Any], + yaml_data: Dict[str, Any], + section: str, +): + try: + for overload_s, original_s in zip(overload[section], yaml_data[section]): + if overload_s.get("type"): + overload_s["type"] = original_s["type"] + if overload_s.get("headers"): + for overload_h, original_h in zip(overload_s["headers"], original_s["headers"]): + if overload_h.get("type"): + overload_h["type"] = original_h["type"] + except KeyError as exc: + raise ValueError(overload["name"]) from exc + + +def add_overload(yaml_data: Dict[str, Any], body_type: Dict[str, Any], for_flatten_params=False): + overload = copy.deepcopy(yaml_data) + overload["isOverload"] = True + overload["bodyParameter"]["type"] = body_type + overload["bodyParameter"]["defaultToUnsetSentinel"] = False + overload["overloads"] = [] + if yaml_data.get("initialOperation"): + overload["initialOperation"] = yaml_data["initialOperation"] + + if for_flatten_params: + overload["bodyParameter"]["flattened"] = True + else: + overload["parameters"] = [p for p in overload["parameters"] if not p.get("inFlattenedBody")] + # for yaml sync, we need to make sure all of the responses, parameters, and exceptions' types have the same yaml id + for overload_p, original_p in zip(overload["parameters"], yaml_data["parameters"]): + overload_p["type"] = original_p["type"] + update_overload_section(overload, yaml_data, "responses") + update_overload_section(overload, yaml_data, "exceptions") + + # update content type to be an overloads content type + content_type_param = next(p for p in overload["parameters"] if p["wireName"].lower() == "content-type") + content_type_param["inOverload"] = True + content_type_param["inDocstring"] = True + body_type_description = get_body_type_for_description(overload["bodyParameter"]) + content_type_param["description"] = ( + f"Body Parameter content-type. Content type parameter for {body_type_description} body." + ) + content_types = yaml_data["bodyParameter"]["contentTypes"] + if body_type["type"] == "binary" and len(content_types) > 1: + content_types = "'" + "', '".join(content_types) + "'" + content_type_param["description"] += f" Known values are: {content_types}." + overload["bodyParameter"]["inOverload"] = True + for parameter in overload["parameters"]: + parameter["inOverload"] = True + parameter["defaultToUnsetSentinel"] = False + return overload + + +def add_overloads_for_body_param(yaml_data: Dict[str, Any]) -> None: + """If we added a body parameter type, add overloads for that type""" + body_parameter = yaml_data["bodyParameter"] + if not ( + body_parameter["type"]["type"] == "combined" + and len(yaml_data["bodyParameter"]["type"]["types"]) > len(yaml_data["overloads"]) + ): + return + for body_type in body_parameter["type"]["types"]: + if any(o for o in yaml_data["overloads"] if id(o["bodyParameter"]["type"]) == id(body_type)): + continue + yaml_data["overloads"].append(add_overload(yaml_data, body_type)) + if body_type.get("type") == "model" and body_type.get("base") == "json": + yaml_data["overloads"].append(add_overload(yaml_data, body_type, for_flatten_params=True)) + content_type_param = next(p for p in yaml_data["parameters"] if p["wireName"].lower() == "content-type") + content_type_param["inOverload"] = False + content_type_param["inOverridden"] = True + content_type_param["inDocstring"] = True + content_type_param["clientDefaultValue"] = ( + None # make it none bc it will be overridden, we depend on default of overloads + ) + content_type_param["optional"] = True + + +def update_description(description: Optional[str], default_description: str = "") -> str: + if not description: + description = default_description + description.rstrip(" ") + if description and description[-1] != ".": + description += "." + return description + + +def update_operation_group_class_name(prefix: str, class_name: str) -> str: + if class_name == "": + return prefix + "OperationsMixin" + if class_name == "Operations": + return "Operations" + return class_name + "Operations" + + +def update_paging_response(yaml_data: Dict[str, Any]) -> None: + yaml_data["discriminator"] = "paging" + + +HEADERS_HIDE_IN_METHOD = ( + "repeatability-request-id", + "repeatability-first-sent", + "x-ms-client-request-id", + "client-request-id", + "return-client-request-id", +) +HEADERS_CONVERT_IN_METHOD = { + "if-match": { + "clientName": "etag", + "wireName": "etag", + "description": "check if resource is changed. Set None to skip checking etag.", + }, + "if-none-match": { + "clientName": "match_condition", + "wireName": "match-condition", + "description": "The match condition to use upon the etag.", + "type": { + "type": "sdkcore", + "name": "MatchConditions", + }, + }, +} + + +def get_wire_name_lower(parameter: Dict[str, Any]) -> str: + return (parameter.get("wireName") or "").lower() + + +def headers_convert(yaml_data: Dict[str, Any], replace_data: Any) -> None: + if isinstance(replace_data, dict): + for k, v in replace_data.items(): + yaml_data[k] = v + + +def has_json_content_type(yaml_data: Dict[str, Any]) -> bool: + return any(ct for ct in yaml_data.get("contentTypes", []) if JSON_REGEXP.match(ct)) + + +def has_multi_part_content_type(yaml_data: Dict[str, Any]) -> bool: + return any(ct for ct in yaml_data.get("contentTypes", []) if ct == "multipart/form-data") + + +class PreProcessPlugin(YamlUpdatePlugin): + """Add Python naming information.""" + + @property + def azure_arm(self) -> bool: + return self.options.get("azure-arm", False) + + @property + def version_tolerant(self) -> bool: + return self.options.get("version-tolerant", True) + + @property + def models_mode(self) -> Optional[str]: + return self.options.get("models-mode", "dpg" if self.is_cadl else None) + + @property + def is_cadl(self) -> bool: + return self.options.get("cadl_file", False) + + def add_body_param_type( + self, + code_model: Dict[str, Any], + body_parameter: Dict[str, Any], + ): + # only add overload for special content type + if ( # pylint: disable=too-many-boolean-expressions + body_parameter + and body_parameter["type"]["type"] in ("model", "dict", "list") + and ( + has_json_content_type(body_parameter) or (self.is_cadl and has_multi_part_content_type(body_parameter)) + ) + and not body_parameter["type"].get("xmlMetadata") + and not any(t for t in ["flattened", "groupedBy"] if body_parameter.get(t)) + ): + origin_type = body_parameter["type"]["type"] + is_dpg_model = body_parameter["type"].get("base") == "dpg" + body_parameter["type"] = { + "type": "combined", + "types": [body_parameter["type"]], + } + # don't add binary overload for multipart content type + if not (self.is_cadl and has_multi_part_content_type(body_parameter)): + body_parameter["type"]["types"].append(KNOWN_TYPES["binary"]) + + if origin_type == "model" and is_dpg_model and self.models_mode == "dpg": + body_parameter["type"]["types"].insert(1, KNOWN_TYPES["any-object"]) + code_model["types"].append(body_parameter["type"]) + + def pad_reserved_words(self, name: str, pad_type: PadType): + # we want to pad hidden variables as well + if not name: + # we'll pass in empty operation groups sometime etc. + return name + + if self.is_cadl: + reserved_words = {k: (v + CADL_RESERVED_WORDS.get(k, [])) for k, v in RESERVED_WORDS.items()} + else: + reserved_words = RESERVED_WORDS + name = pad_special_chars(name) + name_prefix = "_" if name[0] == "_" else "" + name = name[1:] if name[0] == "_" else name + if name.lower() in reserved_words[pad_type]: + return name_prefix + name + pad_type + return name_prefix + name + + def update_types(self, yaml_data: List[Dict[str, Any]]) -> None: + for type in yaml_data: + for property in type.get("properties", []): + property["description"] = update_description(property.get("description", "")) + property["clientName"] = self.pad_reserved_words(property["clientName"].lower(), PadType.PROPERTY) + add_redefined_builtin_info(property["clientName"], property) + if type.get("name"): + pad_type = PadType.MODEL if type["type"] == "model" else PadType.ENUM_CLASS + name = self.pad_reserved_words(type["name"], pad_type) + type["name"] = name[0].upper() + name[1:] + type["description"] = update_description(type.get("description", ""), type["name"]) + type["snakeCaseName"] = to_snake_case(type["name"]) + if type.get("values"): + # we're enums + values_to_add = [] + for value in type["values"]: + padded_name = self.pad_reserved_words(value["name"].lower(), PadType.ENUM_VALUE).upper() + if self.version_tolerant: + if padded_name[0] in "0123456789": + padded_name = "ENUM_" + padded_name + value["name"] = padded_name + else: + if value["name"] != padded_name: + values_to_add.append( + update_enum_value( + name=padded_name, + value=value["value"], + description=value["description"], + enum_type=value["enumType"], + ) + ) + type["values"].extend(values_to_add) + + # add type for reference + for v in HEADERS_CONVERT_IN_METHOD.values(): + if isinstance(v, dict) and "type" in v: + yaml_data.append(v["type"]) + + def update_client(self, yaml_data: Dict[str, Any]) -> None: + yaml_data["description"] = update_description(yaml_data["description"], default_description=yaml_data["name"]) + yaml_data["legacyFilename"] = to_snake_case(yaml_data["name"].replace(" ", "_")) + parameters = yaml_data["parameters"] + for parameter in parameters: + self.update_parameter(parameter) + if parameter["clientName"] == "credential": + policy = parameter["type"].get("policy") + if policy and policy["type"] == "BearerTokenCredentialPolicy" and self.azure_arm: + policy["type"] = "ARMChallengeAuthenticationPolicy" + policy["credentialScopes"] = ["https://management.azure.com/.default"] + if ( + (not self.version_tolerant or self.azure_arm) + and parameters + and parameters[-1]["clientName"] == "credential" + ): + # we need to move credential to the front in mgmt mode for backcompat reasons + yaml_data["parameters"] = [parameters[-1]] + parameters[:-1] + prop_name = yaml_data["name"] + if prop_name.endswith("Client"): + prop_name = prop_name[: len(prop_name) - len("Client")] + yaml_data["builderPadName"] = to_snake_case(prop_name) + for og in yaml_data.get("operationGroups", []): + for o in og["operations"]: + property_if_match = None + property_if_none_match = None + for p in o["parameters"]: + wire_name_lower = get_wire_name_lower(p) + if p["location"] == "header" and wire_name_lower == "client-request-id": + yaml_data["requestIdHeaderName"] = wire_name_lower + if self.version_tolerant and p["location"] == "header": + if wire_name_lower == "if-match": + property_if_match = p + elif wire_name_lower == "if-none-match": + property_if_none_match = p + # pylint: disable=line-too-long + # some service(e.g. https://github.com/Azure/azure-rest-api-specs/blob/main/specification/cosmos-db/data-plane/Microsoft.Tables/preview/2019-02-02/table.json) + # only has one, so we need to add "if-none-match" or "if-match" if it's missing + if not property_if_match and property_if_none_match: + property_if_match = property_if_none_match.copy() + property_if_match["wireName"] = "if-match" + if not property_if_none_match and property_if_match: + property_if_none_match = property_if_match.copy() + property_if_none_match["wireName"] = "if-none-match" + + if property_if_match and property_if_none_match: + # arrange if-match and if-none-match to the end of parameters + o["parameters"] = [ + item + for item in o["parameters"] + if get_wire_name_lower(item) not in ("if-match", "if-none-match") + ] + [property_if_match, property_if_none_match] + + o["hasEtag"] = True + yaml_data["hasEtag"] = True + + def get_operation_updater(self, yaml_data: Dict[str, Any]) -> Callable[[Dict[str, Any], Dict[str, Any]], None]: + if yaml_data["discriminator"] == "lropaging": + return self.update_lro_paging_operation + if yaml_data["discriminator"] == "lro": + return self.update_lro_operation + if yaml_data["discriminator"] == "paging": + return self.update_paging_operation + return self.update_operation + + def update_parameter(self, yaml_data: Dict[str, Any]) -> None: + yaml_data["description"] = update_description(yaml_data.get("description", "")) + if not (yaml_data["location"] == "header" and yaml_data["clientName"] in ("content_type", "accept")): + yaml_data["clientName"] = self.pad_reserved_words(yaml_data["clientName"].lower(), PadType.PARAMETER) + if yaml_data.get("propertyToParameterName"): + # need to create a new one with padded keys and values + yaml_data["propertyToParameterName"] = { + self.pad_reserved_words(prop, PadType.PROPERTY): self.pad_reserved_words( + param_name, PadType.PARAMETER + ).lower() + for prop, param_name in yaml_data["propertyToParameterName"].items() + } + wire_name_lower = (yaml_data.get("wireName") or "").lower() + if yaml_data["location"] == "header" and ( + wire_name_lower in HEADERS_HIDE_IN_METHOD or yaml_data.get("clientDefaultValue") == "multipart/form-data" + ): + yaml_data["hideInMethod"] = True + if self.version_tolerant and yaml_data["location"] == "header" and wire_name_lower in HEADERS_CONVERT_IN_METHOD: + headers_convert(yaml_data, HEADERS_CONVERT_IN_METHOD[wire_name_lower]) + if wire_name_lower in ["$host", "content-type", "accept"] and yaml_data["type"]["type"] == "constant": + yaml_data["clientDefaultValue"] = yaml_data["type"]["value"] + + def update_operation( + self, + code_model: Dict[str, Any], + yaml_data: Dict[str, Any], + *, + is_overload: bool = False, + ) -> None: + yaml_data["groupName"] = self.pad_reserved_words(yaml_data["groupName"], PadType.OPERATION_GROUP) + yaml_data["groupName"] = to_snake_case(yaml_data["groupName"]) + yaml_data["name"] = yaml_data["name"].lower() + if yaml_data.get("isLroInitialOperation") is True: + yaml_data["name"] = ( + "_" + self.pad_reserved_words(extract_original_name(yaml_data["name"]), PadType.METHOD) + "_initial" + ) + else: + yaml_data["name"] = self.pad_reserved_words(yaml_data["name"], PadType.METHOD) + yaml_data["description"] = update_description(yaml_data["description"], yaml_data["name"]) + yaml_data["summary"] = update_description(yaml_data.get("summary", "")) + body_parameter = yaml_data.get("bodyParameter") + for parameter in yaml_data["parameters"]: + self.update_parameter(parameter) + if yaml_data.get("bodyParameter"): + self.update_parameter(yaml_data["bodyParameter"]) + for entry in yaml_data["bodyParameter"].get("entries", []): + self.update_parameter(entry) + for overload in yaml_data.get("overloads", []): + self.update_operation(code_model, overload, is_overload=True) + for response in yaml_data.get("responses", []): + response["discriminator"] = "operation" + if body_parameter and not is_overload: + # if we have a JSON body, we add a binary overload + self.add_body_param_type(code_model, body_parameter) + add_overloads_for_body_param(yaml_data) + + def _update_lro_operation_helper(self, yaml_data: Dict[str, Any]) -> None: + for response in yaml_data.get("responses", []): + response["discriminator"] = "lro" + response["pollerSync"] = response.get("pollerSync") or "azure.core.polling.LROPoller" + response["pollerAsync"] = response.get("pollerAsync") or "azure.core.polling.AsyncLROPoller" + if not response.get("pollingMethodSync"): + response["pollingMethodSync"] = ( + "azure.mgmt.core.polling.arm_polling.ARMPolling" + if self.azure_arm + else "azure.core.polling.base_polling.LROBasePolling" + ) + if not response.get("pollingMethodAsync"): + response["pollingMethodAsync"] = ( + "azure.mgmt.core.polling.async_arm_polling.AsyncARMPolling" + if self.azure_arm + else "azure.core.polling.async_base_polling.AsyncLROBasePolling" + ) + + def update_lro_paging_operation( + self, + code_model: Dict[str, Any], + yaml_data: Dict[str, Any], + is_overload: bool = False, + item_type: Optional[Dict[str, Any]] = None, + ) -> None: + self.update_lro_operation(code_model, yaml_data, is_overload=is_overload) + self.update_paging_operation(code_model, yaml_data, is_overload=is_overload, item_type=item_type) + yaml_data["discriminator"] = "lropaging" + for response in yaml_data.get("responses", []): + response["discriminator"] = "lropaging" + for overload in yaml_data.get("overloads", []): + self.update_lro_paging_operation( + code_model, + overload, + is_overload=True, + item_type=yaml_data["responses"][0]["itemType"], + ) + + def update_lro_operation( + self, + code_model: Dict[str, Any], + yaml_data: Dict[str, Any], + is_overload: bool = False, + ) -> None: + def convert_initial_operation_response_type(data: Dict[str, Any]) -> None: + for response in data.get("responses", []): + response["type"] = KNOWN_TYPES["binary"] + + self.update_operation(code_model, yaml_data, is_overload=is_overload) + self.update_operation(code_model, yaml_data["initialOperation"], is_overload=is_overload) + convert_initial_operation_response_type(yaml_data["initialOperation"]) + self._update_lro_operation_helper(yaml_data) + for overload in yaml_data.get("overloads", []): + self._update_lro_operation_helper(overload) + self.update_operation(code_model, overload["initialOperation"], is_overload=True) + convert_initial_operation_response_type(overload["initialOperation"]) + + def update_paging_operation( + self, + code_model: Dict[str, Any], + yaml_data: Dict[str, Any], + is_overload: bool = False, + item_type: Optional[Dict[str, Any]] = None, + ) -> None: + self.update_operation(code_model, yaml_data, is_overload=is_overload) + item_type = item_type or yaml_data["itemType"]["elementType"] + if yaml_data.get("nextOperation"): + yaml_data["nextOperation"]["groupName"] = self.pad_reserved_words( + yaml_data["nextOperation"]["groupName"], PadType.OPERATION_GROUP + ) + yaml_data["nextOperation"]["groupName"] = to_snake_case(yaml_data["nextOperation"]["groupName"]) + for response in yaml_data["nextOperation"].get("responses", []): + update_paging_response(response) + response["itemType"] = item_type + for response in yaml_data.get("responses", []): + update_paging_response(response) + response["itemType"] = item_type + for overload in yaml_data.get("overloads", []): + self.update_paging_operation(code_model, overload, is_overload=True, item_type=item_type) + + def update_operation_groups(self, code_model: Dict[str, Any], client: Dict[str, Any]) -> None: + operation_groups_yaml_data = client.get("operationGroups", []) + for operation_group in operation_groups_yaml_data: + operation_group["identifyName"] = self.pad_reserved_words( + operation_group.get("name", operation_group["propertyName"]), + PadType.OPERATION_GROUP, + ) + operation_group["identifyName"] = to_snake_case(operation_group["identifyName"]) + operation_group["propertyName"] = self.pad_reserved_words( + operation_group["propertyName"], PadType.OPERATION_GROUP + ) + operation_group["propertyName"] = to_snake_case(operation_group["propertyName"]) + operation_group["className"] = update_operation_group_class_name( + client["name"], operation_group["className"] + ) + for operation in operation_group["operations"]: + self.get_operation_updater(operation)(code_model, operation) + + if operation_group.get("operationGroups"): + self.update_operation_groups(code_model, operation_group) + + def update_yaml(self, yaml_data: Dict[str, Any]) -> None: + """Convert in place the YAML str.""" + self.update_types(yaml_data["types"]) + yaml_data["types"] += KNOWN_TYPES.values() + for client in yaml_data["clients"]: + self.update_client(client) + self.update_operation_groups(yaml_data, client) + for clients in yaml_data["subnamespaceToClients"].values(): + for client in clients: + self.update_client(client) + self.update_operation_groups(yaml_data, client) + if yaml_data.get("namespace"): + yaml_data["namespace"] = pad_builtin_namespaces(yaml_data["namespace"]) + + +if __name__ == "__main__": + # CADL pipeline will call this + args, unknown_args = parse_args() + PreProcessPlugin(output_folder=args.output_folder, cadl_file=args.cadl_file, **unknown_args).process() diff --git a/packages/http-client-python/generator/pygen/preprocess/helpers.py b/packages/http-client-python/generator/pygen/preprocess/helpers.py new file mode 100644 index 0000000000..cb9a664a77 --- /dev/null +++ b/packages/http-client-python/generator/pygen/preprocess/helpers.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import re +from typing import Any, Dict +from .python_mappings import ( + REDEFINED_BUILTINS, + BUILTIN_PACKAGES, +) + + +def add_redefined_builtin_info(name: str, yaml_data: Dict[str, Any]) -> None: + if name in REDEFINED_BUILTINS: + yaml_data["pylintDisable"] = "redefined-builtin" + + +def pad_builtin_namespaces(namespace: str) -> str: + items = namespace.split(".") + if items[0] in BUILTIN_PACKAGES: + items[0] = items[0] + "_" + return ".".join(items) + + +def pad_special_chars(name: str) -> str: + return re.sub(r"[^A-z0-9_]", "_", name) diff --git a/packages/http-client-python/generator/pygen/preprocess/python_mappings.py b/packages/http-client-python/generator/pygen/preprocess/python_mappings.py new file mode 100644 index 0000000000..b2beaf2378 --- /dev/null +++ b/packages/http-client-python/generator/pygen/preprocess/python_mappings.py @@ -0,0 +1,224 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from enum import Enum + +basic_latin_chars = { + " ": "Space", + "!": "ExclamationMark", + '"': "QuotationMark", + "#": "NumberSign", + "$": "DollarSign", + "%": "PercentSign", + "&": "Ampersand", + "'": "Apostrophe", + "(": "LeftParenthesis", + ")": "RightParenthesis", + "*": "Asterisk", + "+": "PlusSign", + ",": "Comma", + "-": "HyphenMinus", + ".": "FullStop", + "/": "Slash", + "0": "Zero", + "1": "One", + "2": "Two", + "3": "Three", + "4": "Four", + "5": "Five", + "6": "Six", + "7": "Seven", + "8": "Eight", + "9": "Nine", + ":": "Colon", + ";": "Semicolon", + "<": "LessThanSign", + "=": "EqualSign", + ">": "GreaterThanSign", + "?": "QuestionMark", + "@": "AtSign", + "[": "LeftSquareBracket", + "\\": "Backslash", + "]": "RightSquareBracket", + "^": "CircumflexAccent", + "`": "GraveAccent", + "{": "LeftCurlyBracket", + "|": "VerticalBar", + "}": "RightCurlyBracket", + "~": "Tilde", +} + + +class PadType(str, Enum): + MODEL = "Model" + ENUM_CLASS = "Enum" + METHOD = "_method" + PARAMETER = "_parameter" + ENUM_VALUE = "_enum" + PROPERTY = "_property" + OPERATION_GROUP = "Operations" + + +_always_reserved = [ + "and", + "as", + "assert", + "break", + "class", + "continue", + "def", + "del", + "elif", + "else", + "except", + "exec", + "finally", + "for", + "from", + "global", + "if", + "import", + "in", + "is", + "lambda", + "not", + "or", + "pass", + "raise", + "return", + "try", + "while", + "with", + "yield", + "async", + "await", + "int", +] + +RESERVED_MODEL_PROPERTIES = [ + "keys", + "items", + "values", + "popitem", + "clear", + "update", + "setdefault", + "pop", + "get", +] + +RESERVED_WORDS = { + PadType.METHOD: [*_always_reserved], + PadType.PARAMETER: [ + "self", + # these are kwargs we've reserved for our generated operations + "content_type", + "accept", + "cls", + "polling", + "continuation_token", # for LRO calls + # these are transport kwargs + # https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#transport + "connection_timeout", + "connection_verify", + "connection_cert", + "connection_data_block_size", + "use_env_settings", + # the following aren't in the readme, but @xiangyan99 said these are also transport kwargs + "read_timeout", + "proxies", + "cookies", + # these are policy kwargs + # https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/core/azure-core/CLIENT_LIBRARY_DEVELOPER.md#available-policies + "base_headers", + "headers", + "request_id", + "auto_request_id", + "base_user_agent", + "user_agent", + "user_agent_overwrite", + "user_agent_use_env", + "user_agent", + "sdk_moniker", + "logging_enable", + "logger", + "response_encoding", + "proxies", + "raw_request_hook", + "raw_response_hook", + "network_span_namer", + "tracing_attributes", + "permit_redirects", + "redirect_max", + "redirect_remove_headers", + "redirect_on_status_codes", + "permit_redirects", + "redirect_max", + "redirect_remove_headers", + "redirect_on_status_codes", + "retry_total", + "retry_connect", + "retry_read", + "retry_status", + "retry_backoff_factor", + "retry_backoff_max", + "retry_mode", + "retry_on_status_codes", + "retry_total", + "retry_connect", + "retry_read", + "retry_status", + "retry_backoff_factor", + "retry_backoff_max", + "retry_mode", + "retry_on_status_codes", + *_always_reserved, + ], + PadType.MODEL: ["enum", *_always_reserved], + PadType.PROPERTY: ["self", *_always_reserved], + PadType.ENUM_CLASS: ["enum", *_always_reserved], + PadType.ENUM_VALUE: ["mro", *_always_reserved], + PadType.OPERATION_GROUP: [*_always_reserved], +} + +CADL_RESERVED_WORDS = { + PadType.PARAMETER: ["stream"], + PadType.PROPERTY: RESERVED_MODEL_PROPERTIES, +} + +REDEFINED_BUILTINS = [ # we don't pad, but we need to do lint ignores + "id", + "min", + "max", + "filter", + "property", +] + +BUILTIN_PACKAGES = [ + "array", + "atexit", + "binascii", + "builtins", + "cmath", + "errno", + "faulthandler", + "fcntl", + "gc", + "grp", + "itertools", + "marshal", + "math", + "posix", + "pwd", + "pyexpat", + "select", + "spwd", + "sys", + "syslog", + "time", + "unicodedata", + "xxsubtype", + "zlib", +] diff --git a/packages/http-client-python/generator/pygen/utils.py b/packages/http-client-python/generator/pygen/utils.py new file mode 100644 index 0000000000..6588b99df2 --- /dev/null +++ b/packages/http-client-python/generator/pygen/utils.py @@ -0,0 +1,163 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any, Dict, Tuple, List +import re +import argparse + + +def update_enum_value(name: str, value: Any, description: str, enum_type: Dict[str, Any]) -> Dict[str, Any]: + return { + "name": name, + "type": "enumvalue", + "value": value, + "description": description, + "enumType": enum_type, + "valueType": enum_type["valueType"], + } + + +def to_snake_case(name: str) -> str: + def replace_upper_characters(m) -> str: + match_str = m.group().lower() + if m.start() > 0 and name[m.start() - 1] == "_": + # we are good if a '_' already exists + return match_str + # the first letter should not have _ + prefix = "_" if m.start() > 0 else "" + + # we will add an extra _ if there are multiple upper case chars together + next_non_upper_case_char_location = m.start() + len(match_str) + if ( + len(match_str) > 2 + and len(name) - next_non_upper_case_char_location > 1 + and name[next_non_upper_case_char_location].isalpha() + ): + return prefix + match_str[: len(match_str) - 1] + "_" + match_str[len(match_str) - 1] + + return prefix + match_str + + result = re.sub("[A-Z]+", replace_upper_characters, name) + return result.replace(" ", "_").replace("__", "_").replace("-", "") + + +def parse_args( + need_cadl_file: bool = True, +) -> Tuple[argparse.Namespace, Dict[str, Any]]: + parser = argparse.ArgumentParser( + description="Run mypy against target folder. Add a local custom plugin to the path prior to execution. " + ) + parser.add_argument( + "--output-folder", + dest="output_folder", + help="Output folder for generated SDK", + required=True, + ) + parser.add_argument( + "--cadl-file", + dest="cadl_file", + help="Serialized cadl file", + required=need_cadl_file, + ) + parser.add_argument( + "--debug", + dest="debug", + help="Debug mode", + required=False, + action="store", + ) + args, unknown_args = parser.parse_known_args() + + def _get_value(value: Any) -> Any: + if value == "true": + return True + if value == "false": + return False + try: + return int(value) + except ValueError: + pass + return value + + unknown_args_ret = { + ua.strip("--").split("=", maxsplit=1)[0]: _get_value(ua.strip("--").split("=", maxsplit=1)[1]) + for ua in unknown_args + } + return args, unknown_args_ret + + +def get_body_type_for_description(body_parameter: Dict[str, Any]) -> str: + if body_parameter["type"]["type"] == "binary": + return "binary" + if body_parameter["type"]["type"] == "string": + return "string" + return "JSON" + + +# used if we want to get a string / binary type etc +KNOWN_TYPES: Dict[str, Dict[str, Any]] = { + "string": {"type": "string"}, + "binary": {"type": "binary"}, + "anydict": {"type": "dict", "elementType": {"type": "any"}}, + "any-object": {"type": "any-object"}, +} + +JSON_REGEXP = re.compile(r"^(application|text)/(.+\+)?json$") +XML_REGEXP = re.compile(r"^(application|text)/(.+\+)?xml$") + + +def build_policies( + is_arm: bool, + async_mode: bool, + *, + is_azure_flavor: bool = False, + tracing: bool = True, +) -> List[str]: + if is_azure_flavor: + # for Azure + async_prefix = "Async" if async_mode else "" + policies = [ + "policies.RequestIdPolicy(**kwargs)", + "self._config.headers_policy", + "self._config.user_agent_policy", + "self._config.proxy_policy", + "policies.ContentDecodePolicy(**kwargs)", + (f"{async_prefix}ARMAutoResourceProviderRegistrationPolicy()" if is_arm else None), + "self._config.redirect_policy", + "self._config.retry_policy", + "self._config.authentication_policy", + "self._config.custom_hook_policy", + "self._config.logging_policy", + "policies.DistributedTracingPolicy(**kwargs)" if tracing else None, + "policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None", + "self._config.http_logging_policy", + ] + else: + # for non-Azure + policies = [ + "self._config.headers_policy", + "self._config.user_agent_policy", + "self._config.proxy_policy", + "policies.ContentDecodePolicy(**kwargs)", + "self._config.retry_policy", + "self._config.authentication_policy", + "self._config.logging_policy", + ] + return [p for p in policies if p] + + +def extract_original_name(name: str) -> str: + return name[1 : -len("_initial")] + + +def json_serializable(content_type: str) -> bool: + return bool(JSON_REGEXP.match(content_type.split(";")[0].strip().lower())) + + +def xml_serializable(content_type: str) -> bool: + return bool(XML_REGEXP.match(content_type.split(";")[0].strip().lower())) + + +NAME_LENGTH_LIMIT = 40 diff --git a/packages/http-client-python/generator/requirements.txt b/packages/http-client-python/generator/requirements.txt new file mode 100644 index 0000000000..bebbbb5a64 --- /dev/null +++ b/packages/http-client-python/generator/requirements.txt @@ -0,0 +1,12 @@ +black==24.4.0 +click==8.1.3 +docutils==0.19 +Jinja2==3.1.4 +m2r2==0.3.3 +MarkupSafe==2.1.2 +mistune==0.8.4 +pathspec==0.11.1 +platformdirs==3.2.0 +PyYAML==6.0.1 +tomli==2.0.1 +setuptools==69.2.0 diff --git a/packages/http-client-python/generator/setup.py b/packages/http-client-python/generator/setup.py new file mode 100644 index 0000000000..92c28c3a57 --- /dev/null +++ b/packages/http-client-python/generator/setup.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + + +import os +import re + +from setuptools import setup, find_packages + + +# Version extraction inspired from 'requests' +with open(os.path.join("pygen", "_version.py"), "r") as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) # type: ignore + +if not version: + raise RuntimeError("Cannot find version information") + +setup( + name="pygen", + version=version, + description="Core Library for Python Generation", + long_description=open("README.md", "r").read(), + long_description_content_type="text/markdown", + license="MIT License", + author="Microsoft Corporation", + author_email="azpysdkhelp@microsoft.com", + url="https://github.com/Azure/autorest.python/packages/core", + classifiers=[ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: MIT License", + ], + packages=find_packages( + exclude=[ + "test", + ] + ), + install_requires=[ + "Jinja2 >= 2.11", # I need "include" and auto-context + blank line are not indented by default + "pyyaml", + "m2r2", + "black", + ], +) diff --git a/packages/http-client-python/package-lock.json b/packages/http-client-python/package-lock.json new file mode 100644 index 0000000000..19272042a4 --- /dev/null +++ b/packages/http-client-python/package-lock.json @@ -0,0 +1,9011 @@ +{ + "name": "@typespec/http-client-python", + "version": "0.27.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@typespec/http-client-python", + "version": "0.27.1", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@typespec/openapi3": "~0.59.0", + "js-yaml": "~4.1.0", + "semver": "~7.6.2", + "tsx": "4.17.0" + }, + "devDependencies": { + "@azure-tools/cadl-ranch-expect": "0.15.3", + "@azure-tools/cadl-ranch-specs": "0.37.1", + "@azure-tools/typespec-autorest": "~0.45.0", + "@azure-tools/typespec-azure-core": "~0.45.0", + "@azure-tools/typespec-azure-resource-manager": "~0.45.0", + "@azure-tools/typespec-azure-rulesets": "0.45.0", + "@azure-tools/typespec-client-generator-core": "0.45.4", + "@types/js-yaml": "~4.0.5", + "@types/node": "^18.16.3", + "@types/semver": "7.5.8", + "@typespec/compiler": "~0.59.1", + "@typespec/eslint-config-typespec": "~0.55.0", + "@typespec/http": "~0.59.0", + "@typespec/openapi": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0", + "c8": "~7.13.0", + "chalk": "5.3.0", + "rimraf": "~5.0.0", + "typescript": "~5.5.4", + "vitest": "^1.4.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@azure-tools/typespec-autorest": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-azure-core": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-azure-resource-manager": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-azure-rulesets": ">=0.45.0 <3.0.0", + "@azure-tools/typespec-client-generator-core": ">=0.45.4 <1.0.0", + "@typespec/compiler": ">=0.59.1 <1.0.0", + "@typespec/http": ">=0.59.0 <1.0.0", + "@typespec/openapi": ">=0.59.0 <1.0.0", + "@typespec/rest": ">=0.59.0 <1.0.0", + "@typespec/versioning": ">=0.59.0 <1.0.0" + } + }, + "node_modules/@apidevtools/swagger-methods": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@apidevtools/swagger-methods/-/swagger-methods-3.0.2.tgz", + "integrity": "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg==" + }, + "node_modules/@azure-tools/cadl-ranch": { + "version": "0.14.6", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch/-/cadl-ranch-0.14.6.tgz", + "integrity": "sha512-FSI0REbSzLEqkvoes/SoAHw4TTYKNkigghwVqPWF6kTlOJW1bwYzVnRDZJFS4A3jSUxXkCpVSM2MD00nwI1IKw==", + "dev": true, + "dependencies": { + "@azure-tools/cadl-ranch-api": "~0.4.6", + "@azure-tools/cadl-ranch-coverage-sdk": "~0.8.4", + "@azure-tools/cadl-ranch-expect": "~0.15.4", + "@azure/identity": "^4.4.1", + "@types/js-yaml": "^4.0.5", + "@typespec/compiler": "~0.60.0", + "@typespec/http": "~0.60.0", + "@typespec/rest": "~0.60.0", + "ajv": "8.17.1", + "body-parser": "^1.20.2", + "deep-equal": "^2.2.0", + "express": "^4.19.2", + "express-promise-router": "^4.1.1", + "glob": "^11.0.0", + "jackspeak": "4.0.1", + "js-yaml": "^4.1.0", + "morgan": "^1.10.0", + "multer": "^1.4.5-lts.1", + "node-fetch": "^3.3.1", + "picocolors": "^1.0.0", + "source-map-support": "^0.5.21", + "winston": "^3.14.0", + "xml2js": "^0.6.2", + "yargs": "^17.7.1" + }, + "bin": { + "cadl-ranch": "cmd/cli.mjs" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@azure-tools/cadl-ranch-api": { + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch-api/-/cadl-ranch-api-0.4.6.tgz", + "integrity": "sha512-IwIpl+wZYXWdDuY3hoI81n7rkm90CcjMWxQLhUYjBhppvc4o1YYgkV9jfxMBaclrDgS1R2TrAq2Xul/+kY99lg==", + "dev": true, + "dependencies": { + "body-parser": "^1.20.2", + "deep-equal": "^2.2.0", + "express": "^4.19.2", + "express-promise-router": "^4.1.1", + "glob": "^11.0.0", + "morgan": "^1.10.0", + "multer": "^1.4.5-lts.1", + "picocolors": "^1.0.0", + "winston": "^3.14.0", + "xml-formatter": "^3.6.3", + "xml2js": "^0.6.2", + "yargs": "^17.7.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@azure-tools/cadl-ranch-coverage-sdk": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch-coverage-sdk/-/cadl-ranch-coverage-sdk-0.8.4.tgz", + "integrity": "sha512-N207EZEdJrXDKUVmi5Cnw/4y+/Ou9dTbdhMPDoLaalUxZp8T/YK+Y057/M88G0dY76PEAwWPPDolLchW62LZNQ==", + "dev": true, + "dependencies": { + "@azure/identity": "^4.4.1", + "@azure/storage-blob": "^12.24.0", + "@types/node": "^22.1.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@azure-tools/cadl-ranch-coverage-sdk/node_modules/@types/node": { + "version": "22.5.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.5.tgz", + "integrity": "sha512-Xjs4y5UPO/CLdzpgR6GirZJx36yScjh73+2NlLlkFRSoQN8B0DpfXPdZGnvVmLRLOsqDpOfTNv7D9trgGhmOIA==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@azure-tools/cadl-ranch-coverage-sdk/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true + }, + "node_modules/@azure-tools/cadl-ranch-expect": { + "version": "0.15.3", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch-expect/-/cadl-ranch-expect-0.15.3.tgz", + "integrity": "sha512-ulUf2aN9UznF71NMwqVjcvEOw3F5BlL1HqeTwHZl3ZgRs8x2+HRLE+lwIEjfQi6h1ISn9u3kr+wslB03uOaoIQ==", + "dev": true, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0" + } + }, + "node_modules/@azure-tools/cadl-ranch-specs": { + "version": "0.37.1", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch-specs/-/cadl-ranch-specs-0.37.1.tgz", + "integrity": "sha512-XR8UxsbTQTSYbgyObcytRP0PLNWrU6cA8dTwQYh+VA/92HrSQYaJ8cQZZ/EyIFjFuSEVGQ74Rx6hpGvfKUrh2w==", + "dev": true, + "dependencies": { + "@azure-tools/cadl-ranch": "~0.14.5", + "@azure-tools/cadl-ranch-api": "~0.4.6" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@azure-tools/cadl-ranch-expect": "~0.15.3", + "@azure-tools/typespec-azure-core": "~0.45.0", + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0", + "@typespec/xml": "~0.59.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/@azure-tools/cadl-ranch-expect": { + "version": "0.15.4", + "resolved": "https://registry.npmjs.org/@azure-tools/cadl-ranch-expect/-/cadl-ranch-expect-0.15.4.tgz", + "integrity": "sha512-dluMUSFgANVyNhFT/uMst+lpxeh0DUcw0IiLmy8ZCgps+xJ5UEGCV0XIDzACbZb4JUJXgDxlLpsCUWXdL/ARlg==", + "dev": true, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.60.0", + "@typespec/http": "~0.60.0", + "@typespec/rest": "~0.60.0", + "@typespec/versioning": "~0.60.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/@typespec/compiler": { + "version": "0.60.1", + "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-0.60.1.tgz", + "integrity": "sha512-I6Vcpvd7mBP7SI5vCBh9rZGXAtVy95BKhAd33Enw32psswiSzRpA7zdyZhOMekTOGVXNS/+E5l2PGGCzQddB4w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "~7.24.7", + "ajv": "~8.17.1", + "change-case": "~5.4.4", + "globby": "~14.0.2", + "mustache": "~4.2.0", + "picocolors": "~1.0.1", + "prettier": "~3.3.3", + "prompts": "~2.4.2", + "semver": "^7.6.3", + "temporal-polyfill": "^0.2.5", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.11", + "yaml": "~2.4.5", + "yargs": "~17.7.2" + }, + "bin": { + "tsp": "cmd/tsp.js", + "tsp-server": "cmd/tsp-server.js" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/@typespec/http": { + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@typespec/http/-/http-0.60.0.tgz", + "integrity": "sha512-ktfS9vpHfltyeAaQLNAZdqrn6Per3vmB/HDH/iyudYLA5wWblT1siKvpFCMWq53CJorRO7yeOKv+Q/M26zwEtg==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.60.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/@typespec/rest": { + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.60.0.tgz", + "integrity": "sha512-mHYubyuBvwdV2xkHrJfPwV7b/Ksyb9lA1Q/AQwpVFa7Qu1X075TBVALmH+hK3V0EdUG1CGJZ5Sw4BWgl8ZS0BA==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.60.0", + "@typespec/http": "~0.60.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/@typespec/versioning": { + "version": "0.60.1", + "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.60.1.tgz", + "integrity": "sha512-HogYL7P9uOPoSvkLLDjF22S6E9td6EY3c6TcIHhCzDTAQoi54csikD0gNrtcCkFG0UeQk29HgQymV397j+vp4g==", + "dev": true, + "peer": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.60.0" + } + }, + "node_modules/@azure-tools/cadl-ranch/node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true + }, + "node_modules/@azure-tools/typespec-autorest": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.45.0.tgz", + "integrity": "sha512-6ycZ0bEfXC0U26FHHEt9smAhxh78SACIDY+u7zLAopRzmxjTuthDdGgYSShuRDu3J+vEBi1fOKpz4cYQkgRkBQ==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@azure-tools/typespec-azure-core": "~0.45.0", + "@azure-tools/typespec-azure-resource-manager": "~0.45.0", + "@azure-tools/typespec-client-generator-core": "~0.45.0", + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/openapi": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0" + } + }, + "node_modules/@azure-tools/typespec-azure-core": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.45.0.tgz", + "integrity": "sha512-GycGMCmaIVSN+TftPtlPJLyeOrglbLmH08ZiZaVMjSih/TQEJM21RGR6d8QdjlkQWN61ntNDRD+RP2uv9tHmqw==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/rest": "~0.59.0" + } + }, + "node_modules/@azure-tools/typespec-azure-resource-manager": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.45.0.tgz", + "integrity": "sha512-PdhB03P8PoOlUoUWd+CF5WipGzu2Q3ZjT0EAzgQe878DmXvxMq+zYaPJQtvkq9R6jCxFauDSr5gG7Yd4NINAuA==", + "dev": true, + "dependencies": { + "change-case": "~5.4.4", + "pluralize": "^8.0.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@azure-tools/typespec-azure-core": "~0.45.0", + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/openapi": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0" + } + }, + "node_modules/@azure-tools/typespec-azure-rulesets": { + "version": "0.45.0", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.45.0.tgz", + "integrity": "sha512-OpMYYc0ElxnswABud22GSqE24ZoJCRGh9fwSA8SoqsJr0uXRX7D6D5pA1FHFT3b5uBVHy0l+FFHvjz9wxfsbUw==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@azure-tools/typespec-azure-core": "~0.45.0", + "@azure-tools/typespec-azure-resource-manager": "~0.45.0", + "@azure-tools/typespec-client-generator-core": "~0.45.0", + "@typespec/compiler": "~0.59.0" + } + }, + "node_modules/@azure-tools/typespec-client-generator-core": { + "version": "0.45.4", + "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.45.4.tgz", + "integrity": "sha512-QJygwMqhEtBi2tPYs/HAfs0QTowXAwp6QpP/Vd2pHnJAncTV1BN17n/9LLAlMu2CnLimqvTuIN+FfliM28AX9w==", + "dev": true, + "dependencies": { + "change-case": "~5.4.4", + "pluralize": "^8.0.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@azure-tools/typespec-azure-core": "~0.45.0", + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0", + "@typespec/openapi": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0" + } + }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dev": true, + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.8.0.tgz", + "integrity": "sha512-YvFMowkXzLbXNM11yZtVLhUCmuG0ex7JKOH366ipjmHBhL3vpDcPAeWF+jf0X+jVXwFqo3UhsWUq4kH0ZPdu/g==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.9.2.tgz", + "integrity": "sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.6.1", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-client/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http-compat": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.1.2.tgz", + "integrity": "sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-client": "^1.3.0", + "@azure/core-rest-pipeline": "^1.3.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http-compat/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.17.0.tgz", + "integrity": "sha512-62Vv8nC+uPId3j86XJ0WI+sBf0jlqTqPUFCBNrGtlaUeQUIXWV/D8GE5A1d+Qx8H7OQojn2WguC8kChD6v0shA==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.8.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.9.0", + "@azure/logger": "^1.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.1.2.tgz", + "integrity": "sha512-dawW9ifvWAWmUm9/h+/UQ2jrdvjCJ7VJEuCJ6XVNudzcOwm53BFZH4Q845vjfgoUAM8ZxokvVNxNxAITc502YA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.10.0.tgz", + "integrity": "sha512-dqLWQsh9Nro1YQU+405POVtXnwrIVqPyfUzc4zXCbThTg7+vNNaiMkwbX9AMXKyoFYFClxmB3s25ZFr3+jZkww==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-xml": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.3.tgz", + "integrity": "sha512-D6G7FEmDiTctPKuWegX2WTrS1enKZwqYwdKTO6ZN6JMigcCehlT0/CYl+zWpI9vQ9frwwp7GQT3/owaEXgnOsA==", + "dev": true, + "dependencies": { + "fast-xml-parser": "^4.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/identity": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@azure/identity/-/identity-4.4.1.tgz", + "integrity": "sha512-DwnG4cKFEM7S3T+9u05NstXU/HN0dk45kPOinUyNKsn5VWwpXd9sbPKEg6kgJzGbm1lMuhx9o31PVbCtM5sfBA==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.5.0", + "@azure/core-client": "^1.9.2", + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.3.0", + "@azure/logger": "^1.0.0", + "@azure/msal-browser": "^3.14.0", + "@azure/msal-node": "^2.9.2", + "events": "^3.0.0", + "jws": "^4.0.0", + "open": "^8.0.0", + "stoppable": "^1.1.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.4.tgz", + "integrity": "sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==", + "dev": true, + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/msal-browser": { + "version": "3.23.0", + "resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-3.23.0.tgz", + "integrity": "sha512-+QgdMvaeEpdtgRTD7AHHq9aw8uga7mXVHV1KshO1RQ2uI5B55xJ4aEpGlg/ga3H+0arEVcRfT4ZVmX7QLXiCVw==", + "dev": true, + "dependencies": { + "@azure/msal-common": "14.14.2" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-common": { + "version": "14.14.2", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.14.2.tgz", + "integrity": "sha512-XV0P5kSNwDwCA/SjIxTe9mEAsKB0NqGNSuaVrkCCE2lAyBr/D6YtD80Vkdp4tjWnPFwjzkwldjr1xU/facOJog==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-node": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/@azure/msal-node/-/msal-node-2.13.1.tgz", + "integrity": "sha512-sijfzPNorKt6+9g1/miHwhj6Iapff4mPQx1azmmZExgzUROqWTM1o3ACyxDja0g47VpowFy/sxTM/WsuCyXTiw==", + "dev": true, + "dependencies": { + "@azure/msal-common": "14.14.2", + "jsonwebtoken": "^9.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.24.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.24.0.tgz", + "integrity": "sha512-l8cmWM4C7RoNCBOImoFMxhTXe1Lr+8uQ/IgnhRNMpfoA9bAFWoLG4XrWm6O5rKXortreVQuD+fc1hbzWklOZbw==", + "dev": true, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-client": "^1.6.2", + "@azure/core-http-compat": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-rest-pipeline": "^1.10.1", + "@azure/core-tracing": "^1.1.2", + "@azure/core-util": "^1.6.1", + "@azure/core-xml": "^1.3.2", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "dependencies": { + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@colors/colors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", + "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@dabh/diagnostics": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", + "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", + "dev": true, + "dependencies": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", + "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", + "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", + "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", + "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", + "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", + "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", + "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", + "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", + "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", + "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", + "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", + "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", + "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", + "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", + "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", + "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", + "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", + "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", + "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", + "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", + "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", + "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", + "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", + "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", + "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.3", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/momoa": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-2.0.4.tgz", + "integrity": "sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==", + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", + "dev": true + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@jsdevtools/ono": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", + "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@pkgr/core": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", + "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@readme/better-ajv-errors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@readme/better-ajv-errors/-/better-ajv-errors-1.6.0.tgz", + "integrity": "sha512-9gO9rld84Jgu13kcbKRU+WHseNhaVt76wYMeRDGsUGYxwJtI3RmEJ9LY9dZCYQGI8eUZLuxb5qDja0nqklpFjQ==", + "dependencies": { + "@babel/code-frame": "^7.16.0", + "@babel/runtime": "^7.21.0", + "@humanwhocodes/momoa": "^2.0.3", + "chalk": "^4.1.2", + "json-to-ast": "^2.0.3", + "jsonpointer": "^5.0.0", + "leven": "^3.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "ajv": "4.11.8 - 8" + } + }, + "node_modules/@readme/better-ajv-errors/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@readme/better-ajv-errors/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@readme/better-ajv-errors/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@readme/better-ajv-errors/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/@readme/json-schema-ref-parser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@readme/json-schema-ref-parser/-/json-schema-ref-parser-1.2.0.tgz", + "integrity": "sha512-Bt3QVovFSua4QmHa65EHUmh2xS0XJ3rgTEUPH998f4OW4VVJke3BuS16f+kM0ZLOGdvIrzrPRqwihuv5BAjtrA==", + "dependencies": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + } + }, + "node_modules/@readme/openapi-parser": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@readme/openapi-parser/-/openapi-parser-2.6.0.tgz", + "integrity": "sha512-pyFJXezWj9WI1O+gdp95CoxfY+i+Uq3kKk4zXIFuRAZi9YnHpHOpjumWWr67wkmRTw19Hskh9spyY0Iyikf3fA==", + "dependencies": { + "@apidevtools/swagger-methods": "^3.0.2", + "@jsdevtools/ono": "^7.1.3", + "@readme/better-ajv-errors": "^1.6.0", + "@readme/json-schema-ref-parser": "^1.2.0", + "@readme/openapi-schemas": "^3.1.0", + "ajv": "^8.12.0", + "ajv-draft-04": "^1.0.0", + "call-me-maybe": "^1.0.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "openapi-types": ">=7" + } + }, + "node_modules/@readme/openapi-schemas": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@readme/openapi-schemas/-/openapi-schemas-3.1.0.tgz", + "integrity": "sha512-9FC/6ho8uFa8fV50+FPy/ngWN53jaUu4GRXlAjcxIRrzhltJnpKkBG2Tp0IDraFJeWrOpk84RJ9EMEEYzaI1Bw==", + "engines": { + "node": ">=18" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.3.tgz", + "integrity": "sha512-MmKSfaB9GX+zXl6E8z4koOr/xU63AMVleLEa64v7R0QF/ZloMs5vcD1sHgM64GXXS1csaJutG+ddtzcueI/BLg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.3.tgz", + "integrity": "sha512-zrt8ecH07PE3sB4jPOggweBjJMzI1JG5xI2DIsUbkA+7K+Gkjys6eV7i9pOenNSDJH3eOr/jLb/PzqtmdwDq5g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.3.tgz", + "integrity": "sha512-P0UxIOrKNBFTQaXTxOH4RxuEBVCgEA5UTNV6Yz7z9QHnUJ7eLX9reOd/NYMO3+XZO2cco19mXTxDMXxit4R/eQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.3.tgz", + "integrity": "sha512-L1M0vKGO5ASKntqtsFEjTq/fD91vAqnzeaF6sfNAy55aD+Hi2pBI5DKwCO+UNDQHWsDViJLqshxOahXyLSh3EA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.3.tgz", + "integrity": "sha512-btVgIsCjuYFKUjopPoWiDqmoUXQDiW2A4C3Mtmp5vACm7/GnyuprqIDPNczeyR5W8rTXEbkmrJux7cJmD99D2g==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.3.tgz", + "integrity": "sha512-zmjbSphplZlau6ZTkxd3+NMtE4UKVy7U4aVFMmHcgO5CUbw17ZP6QCgyxhzGaU/wFFdTfiojjbLG3/0p9HhAqA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.3.tgz", + "integrity": "sha512-nSZfcZtAnQPRZmUkUQwZq2OjQciR6tEoJaZVFvLHsj0MF6QhNMg0fQ6mUOsiCUpTqxTx0/O6gX0V/nYc7LrgPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.3.tgz", + "integrity": "sha512-MnvSPGO8KJXIMGlQDYfvYS3IosFN2rKsvxRpPO2l2cum+Z3exiExLwVU+GExL96pn8IP+GdH8Tz70EpBhO0sIQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.3.tgz", + "integrity": "sha512-+W+p/9QNDr2vE2AXU0qIy0qQE75E8RTwTwgqS2G5CRQ11vzq0tbnfBd6brWhS9bCRjAjepJe2fvvkvS3dno+iw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.3.tgz", + "integrity": "sha512-yXH6K6KfqGXaxHrtr+Uoy+JpNlUlI46BKVyonGiaD74ravdnF9BUNC+vV+SIuB96hUMGShhKV693rF9QDfO6nQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.3.tgz", + "integrity": "sha512-R8cwY9wcnApN/KDYWTH4gV/ypvy9yZUHlbJvfaiXSB48JO3KpwSpjOGqO4jnGkLDSk1hgjYkTbTt6Q7uvPf8eg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.3.tgz", + "integrity": "sha512-kZPbX/NOPh0vhS5sI+dR8L1bU2cSO9FgxwM8r7wHzGydzfSjLRCFAT87GR5U9scj2rhzN3JPYVC7NoBbl4FZ0g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.3.tgz", + "integrity": "sha512-S0Yq+xA1VEH66uiMNhijsWAafffydd2X5b77eLHfRmfLsRSpbiAWiRHV6DEpz6aOToPsgid7TI9rGd6zB1rhbg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.3.tgz", + "integrity": "sha512-9isNzeL34yquCPyerog+IMCNxKR8XYmGd0tHSV+OVx0TmE0aJOo9uw4fZfUuk2qxobP5sug6vNdZR6u7Mw7Q+Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.3.tgz", + "integrity": "sha512-nMIdKnfZfzn1Vsk+RuOvl43ONTZXoAPUUxgcU0tXooqg4YrAqzfKzVenqqk2g5efWh46/D28cKFrOzDSW28gTA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.3.tgz", + "integrity": "sha512-fOvu7PCQjAj4eWDEuD8Xz5gpzFqXzGlxHZozHP4b9Jxv9APtdxL6STqztDzMLuRXEc4UpXGGhx029Xgm91QBeA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rushstack/eslint-patch": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.10.1.tgz", + "integrity": "sha512-S3Kq8e7LqxkA9s7HKLqXGTGck1uwis5vAXan3FnU5yw1Ec5hsSGnq4s/UCaSqABPOnOTg7zASLyst7+ohgWexg==", + "dev": true + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "dev": true + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true + }, + "node_modules/@types/js-yaml": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "dev": true + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==" + }, + "node_modules/@types/node": { + "version": "18.19.50", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.50.tgz", + "integrity": "sha512-xonK+NRrMBRtkL1hVCc3G+uXtjh1Al4opBLjqVmipe5ZAaBYWW6cNAiBVZ1BvmkBhep698rP3UM3aRAdSALuhg==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true + }, + "node_modules/@types/semver": { + "version": "7.5.8", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", + "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", + "dev": true + }, + "node_modules/@types/triple-beam": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", + "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz", + "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/type-utils": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz", + "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz", + "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.18.0.tgz", + "integrity": "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "7.18.0", + "@typescript-eslint/utils": "7.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz", + "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==", + "dev": true, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz", + "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/visitor-keys": "7.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.18.0.tgz", + "integrity": "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "7.18.0", + "@typescript-eslint/types": "7.18.0", + "@typescript-eslint/typescript-estree": "7.18.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz", + "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "7.18.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typespec/compiler": { + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-0.59.1.tgz", + "integrity": "sha512-O2ljgr6YoFaIH6a8lWc90/czdv4B2X6N9wz4WsnQnVvgO0Tj0s+3xkvp4Tv59RKMhT0f3fK6dL8oEGO32FYk1A==", + "dependencies": { + "@babel/code-frame": "~7.24.7", + "ajv": "~8.17.1", + "change-case": "~5.4.4", + "globby": "~14.0.2", + "mustache": "~4.2.0", + "picocolors": "~1.0.1", + "prettier": "~3.3.3", + "prompts": "~2.4.2", + "semver": "^7.6.3", + "temporal-polyfill": "^0.2.5", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.11", + "yaml": "~2.4.5", + "yargs": "~17.7.2" + }, + "bin": { + "tsp": "cmd/tsp.js", + "tsp-server": "cmd/tsp-server.js" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@typespec/compiler/node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==" + }, + "node_modules/@typespec/eslint-config-typespec": { + "version": "0.55.0", + "resolved": "https://registry.npmjs.org/@typespec/eslint-config-typespec/-/eslint-config-typespec-0.55.0.tgz", + "integrity": "sha512-zZI2ERGdgM9T6neL+Qdht3z89elGI38h68vSYnq5KFR3J500llSJI0Yb5NnE1G2Y7pjmBrnYWhL7UoOaGpW42A==", + "deprecated": "Package is deprecated as it was meant for TypeSpec internal use only", + "dev": true, + "dependencies": { + "@rushstack/eslint-patch": "1.10.1", + "@typescript-eslint/eslint-plugin": "^7.4.0", + "@typescript-eslint/parser": "^7.4.0", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-deprecation": "^2.0.0", + "eslint-plugin-prettier": "^5.1.3", + "eslint-plugin-unicorn": "^51.0.1", + "eslint-plugin-vitest": "~0.4.0", + "typescript": "~5.4.3" + } + }, + "node_modules/@typespec/eslint-config-typespec/node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/@typespec/http": { + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/http/-/http-0.59.1.tgz", + "integrity": "sha512-Ai8oCAO+Bw1HMSZ9gOI5Od4fNn/ul4HrVtTB01xFuLK6FQj854pxhzao8ylPnr7gIRQ327FV12/QfXR87yCiYQ==", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0" + } + }, + "node_modules/@typespec/openapi": { + "version": "0.59.0", + "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-0.59.0.tgz", + "integrity": "sha512-do1Dm5w0MuK3994gYTBg6qMfgeIxmmsDqnz3zimYKMPpbnUBi4F6/o4iCfn0Fn9kaNl+H6UlOzZpsZW9xHui1Q==", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0" + } + }, + "node_modules/@typespec/openapi3": { + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/openapi3/-/openapi3-0.59.1.tgz", + "integrity": "sha512-89VbUbkWKxeFgE0w0hpVyk1UZ6ZHRxOhcAHvF5MgxQxEhs2ALXKAqapWjFQsYrLBhAUoWzdPFrJJUMbwF9kX0Q==", + "dependencies": { + "@readme/openapi-parser": "~2.6.0", + "yaml": "~2.4.5" + }, + "bin": { + "tsp-openapi3": "cmd/tsp-openapi3.js" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.1", + "@typespec/openapi": "~0.59.0", + "@typespec/versioning": "~0.59.0" + } + }, + "node_modules/@typespec/rest": { + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.59.1.tgz", + "integrity": "sha512-uKU431jBYL2tVQWG5THA75+OtXDa1e8cMAafYK/JJRRiVRd8D/Epd8fp07dzlB8tFGrhCaGlekRMqFPFrHh2/A==", + "dev": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.1" + } + }, + "node_modules/@typespec/versioning": { + "version": "0.59.0", + "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.59.0.tgz", + "integrity": "sha512-aihO/ux0lLmsuYAdGVkiBflSudcZokYG42SELk1FtMFo609G3Pd7ep7hau6unBnMIceQZejB0ow5UGRupK4X5A==", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0" + } + }, + "node_modules/@typespec/xml": { + "version": "0.59.0", + "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.59.0.tgz", + "integrity": "sha512-UoSsEmm7SXEtL9OXsqotu1TjruJSobqZMhUKAAlC9EP2WfQIHLRfBu7xaZB0sgwq7CM6zy/Hq1RZfQyL1KqEvg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@typespec/compiler": "~0.59.0" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/@vitest/expect": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.6.0.tgz", + "integrity": "sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==", + "dev": true, + "dependencies": { + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "chai": "^4.3.10" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.6.0.tgz", + "integrity": "sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==", + "dev": true, + "dependencies": { + "@vitest/utils": "1.6.0", + "p-limit": "^5.0.0", + "pathe": "^1.1.1" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner/node_modules/p-limit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/runner/node_modules/yocto-queue": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.1.1.tgz", + "integrity": "sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/snapshot": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.6.0.tgz", + "integrity": "sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==", + "dev": true, + "dependencies": { + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.6.0.tgz", + "integrity": "sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==", + "dev": true, + "dependencies": { + "tinyspy": "^2.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.6.0.tgz", + "integrity": "sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==", + "dev": true, + "dependencies": { + "diff-sequences": "^29.6.3", + "estree-walker": "^3.0.3", + "loupe": "^2.3.7", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dev": true, + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-draft-04": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ajv-draft-04/-/ajv-draft-04-1.0.0.tgz", + "integrity": "sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==", + "peerDependencies": { + "ajv": "^8.5.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/append-field": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", + "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "dev": true + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dev": true, + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/basic-auth/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dev": true, + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", + "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "dev": true + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dev": true, + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/c8": { + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/c8/-/c8-7.13.0.tgz", + "integrity": "sha512-/NL4hQTv1gBL6J6ei80zu3IiTrmePDKXKXOTLpHvcIWZTVYQlDhVWjjWvkhICylE8EwwnMVzDZugCvdx0/DIIA==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@istanbuljs/schema": "^0.1.3", + "find-up": "^5.0.0", + "foreground-child": "^2.0.0", + "istanbul-lib-coverage": "^3.2.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-reports": "^3.1.4", + "rimraf": "^3.0.2", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.0.0", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9" + }, + "bin": { + "c8": "bin/c8.js" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/c8/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/c8/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/c8/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/c8/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/c8/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/c8/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/c8/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/c8/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/c8/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/c8/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/c8/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/c8/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-me-maybe": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz", + "integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==" + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001660", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001660.tgz", + "integrity": "sha512-GacvNTTuATm26qC74pt+ad1fW15mlQ/zuTzzY1ZoIzECTP8HURDfF43kNxPgf7H1jmelCBQTTbBNxdSXOA7Bqg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chai": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/change-case": { + "version": "5.4.4", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", + "integrity": "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==" + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ci-info": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.0.0.tgz", + "integrity": "sha512-TdHqgGf9odd8SXNuxtUBVx8Nv+qZOejE6qyqiy5NtbYYQOeFa6zmHkxlPzmaLxWWHsU6nJmB7AETdVPi+2NBUg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/clean-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz", + "integrity": "sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/clean-regexp/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/cliui/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/code-error-fragment": { + "version": "0.0.230", + "resolved": "https://registry.npmjs.org/code-error-fragment/-/code-error-fragment-0.0.230.tgz", + "integrity": "sha512-cadkfKp6932H8UkhzE/gcUqhRMNf8jHzkAN7+5Myabswaghu4xABTgPHDCjW+dBAJxj/SpkTYokpzDqY4pCzQw==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/color": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", + "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.3", + "color-string": "^1.6.0" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "dev": true, + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "node_modules/colorspace": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", + "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", + "dev": true, + "dependencies": { + "color": "^3.1.3", + "text-hex": "1.0.x" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "engines": [ + "node >= 0.8" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/confbox": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.7.tgz", + "integrity": "sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==", + "dev": true + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dev": true, + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "dev": true + }, + "node_modules/core-js-compat": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.38.1.tgz", + "integrity": "sha512-JRH6gfXxGmrzF3tZ57lFx97YARxCXPaMzPo6jELZhv88pBH5VXpQ+y0znKGlFnzuaihqhLbefxSJxWJMPtfDzw==", + "dev": true, + "dependencies": { + "browserslist": "^4.23.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/deep-equal": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.3.tgz", + "integrity": "sha512-ZIwpnevOurS8bpT4192sqAowWM76JDKSHYzMLty3BZGSswgq6pBaH3DhCSW5xVAZICZyKdOBPjwww5wfgT/6PA==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.5", + "es-get-iterator": "^1.1.3", + "get-intrinsic": "^1.2.2", + "is-arguments": "^1.1.1", + "is-array-buffer": "^3.0.2", + "is-date-object": "^1.0.5", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "isarray": "^2.0.5", + "object-is": "^1.1.5", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.5.1", + "side-channel": "^1.0.4", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "dev": true, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dir-glob/node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "dev": true + }, + "node_modules/electron-to-chromium": { + "version": "1.5.23", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.23.tgz", + "integrity": "sha512-mBhODedOXg4v5QWwl21DjM5amzjmI1zw9EPrPK/5Wx7C8jt33bpZNrC7OhHUG3pxRtbLpr3W2dXT+Ph1SsfRZA==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/enabled": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", + "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==", + "dev": true + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-get-iterator": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", + "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "is-arguments": "^1.1.1", + "is-map": "^2.0.2", + "is-set": "^2.0.2", + "is-string": "^1.0.7", + "isarray": "^2.0.5", + "stop-iteration-iterator": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/esbuild": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", + "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.23.1", + "@esbuild/android-arm": "0.23.1", + "@esbuild/android-arm64": "0.23.1", + "@esbuild/android-x64": "0.23.1", + "@esbuild/darwin-arm64": "0.23.1", + "@esbuild/darwin-x64": "0.23.1", + "@esbuild/freebsd-arm64": "0.23.1", + "@esbuild/freebsd-x64": "0.23.1", + "@esbuild/linux-arm": "0.23.1", + "@esbuild/linux-arm64": "0.23.1", + "@esbuild/linux-ia32": "0.23.1", + "@esbuild/linux-loong64": "0.23.1", + "@esbuild/linux-mips64el": "0.23.1", + "@esbuild/linux-ppc64": "0.23.1", + "@esbuild/linux-riscv64": "0.23.1", + "@esbuild/linux-s390x": "0.23.1", + "@esbuild/linux-x64": "0.23.1", + "@esbuild/netbsd-x64": "0.23.1", + "@esbuild/openbsd-arm64": "0.23.1", + "@esbuild/openbsd-x64": "0.23.1", + "@esbuild/sunos-x64": "0.23.1", + "@esbuild/win32-arm64": "0.23.1", + "@esbuild/win32-ia32": "0.23.1", + "@esbuild/win32-x64": "0.23.1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "dev": true + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-deprecation": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-deprecation/-/eslint-plugin-deprecation-2.0.0.tgz", + "integrity": "sha512-OAm9Ohzbj11/ZFyICyR5N6LbOIvQMp7ZU2zI7Ej0jIc8kiGUERXPNMfw2QqqHD1ZHtjMub3yPZILovYEYucgoQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "^6.0.0", + "tslib": "^2.3.1", + "tsutils": "^3.21.0" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0", + "typescript": "^4.2.4 || ^5.0.0" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/@typescript-eslint/scope-manager": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz", + "integrity": "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/@typescript-eslint/types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.21.0.tgz", + "integrity": "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz", + "integrity": "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/@typescript-eslint/utils": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.21.0.tgz", + "integrity": "sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.21.0", + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/typescript-estree": "6.21.0", + "semver": "^7.5.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz", + "integrity": "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-plugin-deprecation/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", + "integrity": "sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==", + "dev": true, + "dependencies": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.9.1" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" + }, + "peerDependencies": { + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": "*", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-unicorn": { + "version": "51.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-51.0.1.tgz", + "integrity": "sha512-MuR/+9VuB0fydoI0nIn2RDA5WISRn4AsJyNSaNKLVwie9/ONvQhxOBbkfSICBPnzKrB77Fh6CZZXjgTt/4Latw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.20", + "@eslint-community/eslint-utils": "^4.4.0", + "@eslint/eslintrc": "^2.1.4", + "ci-info": "^4.0.0", + "clean-regexp": "^1.0.0", + "core-js-compat": "^3.34.0", + "esquery": "^1.5.0", + "indent-string": "^4.0.0", + "is-builtin-module": "^3.2.1", + "jsesc": "^3.0.2", + "pluralize": "^8.0.0", + "read-pkg-up": "^7.0.1", + "regexp-tree": "^0.1.27", + "regjsparser": "^0.10.0", + "semver": "^7.5.4", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" + }, + "peerDependencies": { + "eslint": ">=8.56.0" + } + }, + "node_modules/eslint-plugin-vitest": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-vitest/-/eslint-plugin-vitest-0.4.1.tgz", + "integrity": "sha512-+PnZ2u/BS+f5FiuHXz4zKsHPcMKHie+K+1Uvu/x91ovkCMEOJqEI8E9Tw1Wzx2QRz4MHOBHYf1ypO8N1K0aNAA==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "^7.4.0" + }, + "engines": { + "node": "^18.0.0 || >= 20.0.0" + }, + "peerDependencies": { + "eslint": ">=8.0.0", + "vitest": "*" + }, + "peerDependenciesMeta": { + "@typescript-eslint/eslint-plugin": { + "optional": true + }, + "vitest": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/eslint/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/express": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", + "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "dev": true, + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.6.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.10", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/express-promise-router": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/express-promise-router/-/express-promise-router-4.1.1.tgz", + "integrity": "sha512-Lkvcy/ZGrBhzkl3y7uYBHLMtLI4D6XQ2kiFg9dq7fbktBch5gjqJ0+KovX0cvCAvTJw92raWunRLM/OM+5l4fA==", + "dev": true, + "dependencies": { + "is-promise": "^4.0.0", + "lodash.flattendeep": "^4.0.0", + "methods": "^1.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/express": "^4.0.0", + "express": "^4.0.0" + }, + "peerDependenciesMeta": { + "@types/express": { + "optional": true + } + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fast-uri": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz", + "integrity": "sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==" + }, + "node_modules/fast-xml-parser": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fecha": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", + "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==", + "dev": true + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dev": true, + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flat-cache/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/flat-cache/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/flat-cache/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", + "dev": true + }, + "node_modules/fn.name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", + "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==", + "dev": true + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/foreground-child": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dev": true, + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.8.1.tgz", + "integrity": "sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.0.tgz", + "integrity": "sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^4.0.1", + "minimatch": "^10.0.0", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/foreground-child": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", + "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "14.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", + "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.2", + "ignore": "^5.2.4", + "path-type": "^5.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dev": true, + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "dev": true, + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/internal-slot": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "dev": true, + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "dev": true + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", + "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.0.1.tgz", + "integrity": "sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/json-to-ast": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/json-to-ast/-/json-to-ast-2.1.0.tgz", + "integrity": "sha512-W9Lq347r8tA1DfMvAGn9QNcgYm4Wm7Yc+k8e6vezpMnRT+NHbtlxgNBXRVjXe9YM6eTn6+p/MKOlV/aABJcSnQ==", + "dependencies": { + "code-error-fragment": "0.0.230", + "grapheme-splitter": "^1.0.4" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dev": true, + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dev": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dev": true, + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dev": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dev": true, + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "engines": { + "node": ">=6" + } + }, + "node_modules/kuler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", + "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==", + "dev": true + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/local-pkg": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz", + "integrity": "sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==", + "dev": true, + "dependencies": { + "mlly": "^1.4.2", + "pkg-types": "^1.0.3" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.flattendeep": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==", + "dev": true + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "dev": true + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "dev": true + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "dev": true + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "dev": true + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "dev": true + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "node_modules/logform": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.1.tgz", + "integrity": "sha512-CdaO738xRapbKIMVn2m4F6KTj4j7ooJ8POVnebSgKo3KBz5axNXRAL7ZdRjIV6NOr2Uf4vjtRkxrFETOioCqSA==", + "dev": true, + "dependencies": { + "@colors/colors": "1.6.0", + "@types/triple-beam": "^1.3.2", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^2.3.1", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "node_modules/lru-cache": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.0.1.tgz", + "integrity": "sha512-CgeuL5uom6j/ZVrg7G/+1IXqRY8JXX4Hghfy5YE0EhoYQWvndP1kufu58cmZLNIDKnRhZrXfdS9urVWx98AipQ==", + "dev": true, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/magic-string": { + "version": "0.30.11", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz", + "integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mlly": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.1.tgz", + "integrity": "sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==", + "dev": true, + "dependencies": { + "acorn": "^8.11.3", + "pathe": "^1.1.2", + "pkg-types": "^1.1.1", + "ufo": "^1.5.3" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dev": true, + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/morgan/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/morgan/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/morgan/node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dev": true, + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/multer": { + "version": "1.4.5-lts.1", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", + "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "dev": true, + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/mustache": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", + "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "bin": { + "mustache": "bin/mustache" + } + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dev": true, + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "dev": true + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-package-data/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-is": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.6.tgz", + "integrity": "sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dev": true, + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/one-time": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", + "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "dev": true, + "dependencies": { + "fn.name": "1.x.x" + } + }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dev": true, + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==", + "peer": true + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", + "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", + "dev": true + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-scurry": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", + "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", + "dev": true, + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", + "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==", + "dev": true + }, + "node_modules/path-type": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", + "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pathe": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", + "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picocolors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkg-types": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.2.0.tgz", + "integrity": "sha512-+ifYuSSqOQ8CqP4MbZA5hDpb97n3E8SVWdJe+Wms9kj745lmd3b7EZJiqvmLwAlmRfjrI7Hi5z3kdBJ93lFNPA==", + "dev": true, + "dependencies": { + "confbox": "^0.1.7", + "mlly": "^1.7.1", + "pathe": "^1.1.2" + } + }, + "node_modules/pluralize": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.4.47", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", + "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dev": true, + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dev": true, + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dev": true, + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, + "node_modules/regexp-tree": { + "version": "0.1.27", + "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz", + "integrity": "sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==", + "dev": true, + "bin": { + "regexp-tree": "bin/regexp-tree" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regjsparser": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz", + "integrity": "sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==", + "dev": true, + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "5.0.10", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz", + "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==", + "dev": true, + "dependencies": { + "glob": "^10.3.7" + }, + "bin": { + "rimraf": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/foreground-child": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/rimraf/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true + }, + "node_modules/rimraf/node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rollup": { + "version": "4.21.3", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.3.tgz", + "integrity": "sha512-7sqRtBNnEbcBtMeRVc6VRsJMmpI+JU1z9VTvW8D4gXIYQFz0aLcsE6rRkyghZkLfEgUZgVvOG7A5CVz/VW5GIA==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.21.3", + "@rollup/rollup-android-arm64": "4.21.3", + "@rollup/rollup-darwin-arm64": "4.21.3", + "@rollup/rollup-darwin-x64": "4.21.3", + "@rollup/rollup-linux-arm-gnueabihf": "4.21.3", + "@rollup/rollup-linux-arm-musleabihf": "4.21.3", + "@rollup/rollup-linux-arm64-gnu": "4.21.3", + "@rollup/rollup-linux-arm64-musl": "4.21.3", + "@rollup/rollup-linux-powerpc64le-gnu": "4.21.3", + "@rollup/rollup-linux-riscv64-gnu": "4.21.3", + "@rollup/rollup-linux-s390x-gnu": "4.21.3", + "@rollup/rollup-linux-x64-gnu": "4.21.3", + "@rollup/rollup-linux-x64-musl": "4.21.3", + "@rollup/rollup-win32-arm64-msvc": "4.21.3", + "@rollup/rollup-win32-ia32-msvc": "4.21.3", + "@rollup/rollup-win32-x64-msvc": "4.21.3", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/sax": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", + "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", + "dev": true + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dev": true, + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dev": true, + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/simple-swizzle/node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "dev": true + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" + }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.20", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.20.tgz", + "integrity": "sha512-jg25NiDV/1fLtSgEgyvVyDunvaNHbuwF9lfNV17gSmPFAlYzdfNBlLtLzXTevwkPj7DhGbmN9VnmJIgLnhvaBw==", + "dev": true + }, + "node_modules/stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/std-env": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz", + "integrity": "sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==", + "dev": true + }, + "node_modules/stop-iteration-iterator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", + "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "dev": true, + "dependencies": { + "internal-slot": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/stoppable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", + "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", + "dev": true, + "engines": { + "node": ">=4", + "npm": ">=6" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", + "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", + "dev": true, + "dependencies": { + "js-tokens": "^9.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", + "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", + "dev": true + }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==", + "dev": true + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synckit": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.1.tgz", + "integrity": "sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==", + "dev": true, + "dependencies": { + "@pkgr/core": "^0.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/temporal-polyfill": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.2.5.tgz", + "integrity": "sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==", + "dependencies": { + "temporal-spec": "^0.2.4" + } + }, + "node_modules/temporal-spec": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.2.4.tgz", + "integrity": "sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==" + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==", + "dev": true + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true + }, + "node_modules/tinypool": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.8.4.tgz", + "integrity": "sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.1.tgz", + "integrity": "sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/triple-beam": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz", + "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==", + "dev": true, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/tslib": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", + "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==", + "dev": true + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/tsutils/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tsx": { + "version": "4.17.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.17.0.tgz", + "integrity": "sha512-eN4mnDA5UMKDt4YZixo9tBioibaMBpoxBkD+rIPAjVmYERSG0/dWEY1CEFuV89CgASlKL499q8AhmkMnnjtOJg==", + "dependencies": { + "esbuild": "~0.23.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dev": true, + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", + "dev": true + }, + "node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ufo": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", + "integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==", + "dev": true + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "dev": true, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vite": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.5.tgz", + "integrity": "sha512-pXqR0qtb2bTwLkev4SE3r4abCNioP3GkjvIDLlzziPpXtHgiJIjuKl+1GN6ESOT3wMjG3JTeARopj2SwYaHTOA==", + "dev": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.6.0.tgz", + "integrity": "sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==", + "dev": true, + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.3.4", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "vite": "^5.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/vitest": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-1.6.0.tgz", + "integrity": "sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==", + "dev": true, + "dependencies": { + "@vitest/expect": "1.6.0", + "@vitest/runner": "1.6.0", + "@vitest/snapshot": "1.6.0", + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "acorn-walk": "^8.3.2", + "chai": "^4.3.10", + "debug": "^4.3.4", + "execa": "^8.0.1", + "local-pkg": "^0.5.0", + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "tinybench": "^2.5.1", + "tinypool": "^0.8.3", + "vite": "^5.0.0", + "vite-node": "1.6.0", + "why-is-node-running": "^2.2.2" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "1.6.0", + "@vitest/ui": "1.6.0", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vscode-jsonrpc": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", + "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", + "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", + "dependencies": { + "vscode-languageserver-protocol": "3.17.5" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", + "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "dependencies": { + "vscode-jsonrpc": "8.2.0", + "vscode-languageserver-types": "3.17.5" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/winston": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.14.2.tgz", + "integrity": "sha512-CO8cdpBB2yqzEf8v895L+GNKYJiEq8eKlHU38af3snQBQ+sdAIUepjMSguOIJC7ICbzm0ZI+Af2If4vIJrtmOg==", + "dev": true, + "dependencies": { + "@colors/colors": "^1.6.0", + "@dabh/diagnostics": "^2.0.2", + "async": "^3.2.3", + "is-stream": "^2.0.0", + "logform": "^2.6.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "safe-stable-stringify": "^2.3.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.7.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.1.tgz", + "integrity": "sha512-wQCXXVgfv/wUPOfb2x0ruxzwkcZfxcktz6JIMUaPLmcNhO4bZTwA/WtDWK74xV3F2dKu8YadrFv0qhwYjVEwhA==", + "dev": true, + "dependencies": { + "logform": "^2.6.1", + "readable-stream": "^3.6.2", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/winston-transport/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/winston/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/winston/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/xml-formatter": { + "version": "3.6.3", + "resolved": "https://registry.npmjs.org/xml-formatter/-/xml-formatter-3.6.3.tgz", + "integrity": "sha512-++x1TlRO1FRlQ82AZ4WnoCSufaI/PT/sycn4K8nRl4gnrNC1uYY2VV/67aALZ2m0Q4Q/BLj/L69K360Itw9NNg==", + "dev": true, + "dependencies": { + "xml-parser-xo": "^4.1.2" + }, + "engines": { + "node": ">= 16" + } + }, + "node_modules/xml-parser-xo": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/xml-parser-xo/-/xml-parser-xo-4.1.2.tgz", + "integrity": "sha512-Z/DRB0ZAKj5vAQg++XsfQQKfT73Vfj5n5lKIVXobBDQEva6NHWUTxOA6OohJmEcpoy8AEqBmSGkXXAnFwt5qAA==", + "dev": true, + "engines": { + "node": ">= 16" + } + }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "dev": true, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yaml": { + "version": "2.4.5", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", + "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/packages/http-client-python/package.json b/packages/http-client-python/package.json new file mode 100644 index 0000000000..87a7fca0dd --- /dev/null +++ b/packages/http-client-python/package.json @@ -0,0 +1,94 @@ +{ + "name": "@typespec/http-client-python", + "version": "0.27.1", + "author": "Microsoft Corporation", + "description": "TypeSpec emitter for Python SDKs", + "homepage": "https://typespec.io", + "readme": "https://github.com/microsoft/typespec/blob/main/packages/http-client-python/README.md", + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/microsoft/typespec.git" + }, + "bugs": { + "url": "https://github.com/microsoft/typespec/issues" + }, + "keywords": [ + "typespec", + "python" + ], + "type": "module", + "main": "dist/emitter/index.js", + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "default": "./dist/src/index.js" + }, + "./testing": { + "types": "./dist/src/testing/index.d.ts", + "default": "./dist/src/testing/index.js" + } + }, + "engines": { + "node": ">=18.0.0" + }, + "scripts": { + "clean": "rimraf ./dist ./temp ./test/**/generated/ ./venv", + "build": "tsc -p ./emitter/tsconfig.build.json", + "watch": "tsc -p ./emitter/tsconfig.build.json --watch", + "lint": "eslint . --max-warnings=0", + "lint:py": "tsx ./eng/scripts/ci/lint.ts --folderName generator", + "format": "pnpm -w format:dir packages/http-client-python && tsx ./eng/scripts/ci/format.ts", + "install": "tsx ./eng/scripts/setup/run-python3.ts ./eng/scripts/setup/install.py", + "prepare": "tsx ./eng/scripts/setup/run-python3.ts ./eng/scripts/setup/prepare.py", + "regenerate": "tsx ./eng/scripts/ci/regenerate.ts", + "test": "tsx ./eng/scripts/ci/run-tests.ts" + }, + "files": [ + "dist/**", + "!dist/test/**", + "generator/**", + "eng/scripts/**" + ], + "peerDependencies": { + "@azure-tools/typespec-azure-core": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-azure-resource-manager": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-autorest": ">=0.45.0 <1.0.0", + "@azure-tools/typespec-client-generator-core": ">=0.45.4 <1.0.0", + "@azure-tools/typespec-azure-rulesets": ">=0.45.0 <3.0.0", + "@typespec/compiler": ">=0.59.1 <1.0.0", + "@typespec/http": ">=0.59.0 <1.0.0", + "@typespec/rest": ">=0.59.0 <1.0.0", + "@typespec/versioning": ">=0.59.0 <1.0.0", + "@typespec/openapi": ">=0.59.0 <1.0.0" + }, + "dependencies": { + "js-yaml": "~4.1.0", + "@typespec/openapi3": "~0.59.0", + "semver": "~7.6.2", + "tsx": "4.17.0" + }, + "devDependencies": { + "@azure-tools/typespec-azure-resource-manager": "~0.45.0", + "@azure-tools/typespec-autorest": "~0.45.0", + "@azure-tools/cadl-ranch-expect": "0.15.3", + "@azure-tools/cadl-ranch-specs": "0.37.1", + "@types/js-yaml": "~4.0.5", + "@types/node": "^18.16.3", + "@types/semver": "7.5.8", + "@typespec/eslint-config-typespec": "~0.55.0", + "@typespec/openapi": "~0.59.0", + "c8": "~7.13.0", + "rimraf": "~5.0.0", + "typescript": "~5.5.4", + "@azure-tools/typespec-azure-core": "~0.45.0", + "@azure-tools/typespec-client-generator-core": "0.45.4", + "@typespec/compiler": "~0.59.1", + "@typespec/http": "~0.59.0", + "@typespec/rest": "~0.59.0", + "@typespec/versioning": "~0.59.0", + "@azure-tools/typespec-azure-rulesets": "0.45.0", + "chalk": "5.3.0", + "vitest": "^1.4.0" + } +} diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_common_types_managed_identity_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_common_types_managed_identity_async.py new file mode 100644 index 0000000000..be5bb31f44 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_common_types_managed_identity_async.py @@ -0,0 +1,72 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azure.resourcemanager.models.commontypes.managedidentity.aio import ( + ManagedIdentityClient, +) +from azure.resourcemanager.models.commontypes.managedidentity import models + +SUBSCRIPTION_ID = "00000000-0000-0000-0000-000000000000" +RESOURCE_GROUP_NAME = "test-rg" + + +@pytest.fixture +async def client(credential, authentication_policy): + async with ManagedIdentityClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) as client: + yield client + + +@pytest.mark.asyncio +async def test_managed_identity_tracked_resources_get(client): + result = await client.managed_identity_tracked_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned" + assert result.properties.provisioning_state == "Succeeded" + + +@pytest.mark.asyncio +async def test_managed_identity_tracked_resources_create_with_system_assigned(client): + result = await client.managed_identity_tracked_resources.create_with_system_assigned( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + resource=models.ManagedIdentityTrackedResource( + location="eastus", + identity=models.ManagedServiceIdentity(type="SystemAssigned"), + ), + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned" + assert result.properties.provisioning_state == "Succeeded" + + +@pytest.mark.asyncio +async def test_managed_identity_tracked_resources_update_with_user_assigned_and_system_assigned( + client, +): + result = await client.managed_identity_tracked_resources.update_with_user_assigned_and_system_assigned( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + properties=models.ManagedIdentityTrackedResource( + location="eastus", + identity=models.ManagedServiceIdentity( + type="SystemAssigned,UserAssigned", + user_assigned_identities={ + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id1": models.UserAssignedIdentity() + }, + ), + ), + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned,UserAssigned" + assert result.properties.provisioning_state == "Succeeded" diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_resource_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_resource_async.py new file mode 100644 index 0000000000..dd0d060e97 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_arm_models_resource_async.py @@ -0,0 +1,218 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azure.resourcemanager.models.resources.aio import ResourcesClient +from azure.resourcemanager.models.resources import models + +SUBSCRIPTION_ID = "00000000-0000-0000-0000-000000000000" +RESOURCE_GROUP_NAME = "test-rg" + + +@pytest.fixture +async def client(credential, authentication_policy): + async with ResourcesClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) as client: + yield client + + +@pytest.mark.asyncio +async def test_client_signature(credential, authentication_policy): + # make sure signautre order is correct + client1 = ResourcesClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) + # make sure signautre name is correct + client2 = ResourcesClient( + credential=credential, + subscription_id=SUBSCRIPTION_ID, + base_url="http://localhost:3000", + authentication_policy=authentication_policy, + ) + for client in [client1, client2]: + # make sure signautre order is correct + await client.top_level_tracked_resources.get(RESOURCE_GROUP_NAME, "top") + # make sure signautre name is correct + await client.top_level_tracked_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + ) + + +@pytest.mark.asyncio +async def test_top_level_tracked_resources_begin_create_or_replace(client): + result = await ( + await client.top_level_tracked_resources.begin_create_or_replace( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + resource=models.TopLevelTrackedResource( + location="eastus", + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_top_level_tracked_resources_begin_update(client): + result = await ( + await client.top_level_tracked_resources.begin_update( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + properties=models.TopLevelTrackedResource( + location="eastus", + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid2") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + assert result.location == "eastus" + assert result.properties.description == "valid2" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_top_level_tracked_resources_begin_delete(client): + await ( + await client.top_level_tracked_resources.begin_delete( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + + +@pytest.mark.asyncio +async def test_top_level_tracked_resources_list_by_resource_group(client): + response = client.top_level_tracked_resources.list_by_resource_group( + resource_group_name=RESOURCE_GROUP_NAME, + ) + result = [r async for r in response] + for result in result: + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_top_level_tracked_resources_list_by_subscription(client): + response = client.top_level_tracked_resources.list_by_subscription() + result = [r async for r in response] + for result in result: + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_nested_proxy_resources_get(client): + result = await client.nested_proxy_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + ) + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_nested_proxy_resources_begin_create_or_replace(client): + result = await ( + await client.nested_proxy_resources.begin_create_or_replace( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + resource=models.TopLevelTrackedResource( + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_nested_proxy_resources_begin_update(client): + result = await ( + await client.nested_proxy_resources.begin_update( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + properties=models.TopLevelTrackedResource( + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid2") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + assert result.properties.description == "valid2" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +@pytest.mark.asyncio +async def test_nested_proxy_resources_begin_delete(client): + await ( + await client.nested_proxy_resources.begin_delete( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ) + ).result() + + +@pytest.mark.asyncio +async def test_nested_proxy_resources_list_by_top_level_tracked_resource(client): + response = client.nested_proxy_resources.list_by_top_level_tracked_resource( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + ) + result = [r async for r in response] + for result in result: + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_access_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_access_async.py new file mode 100644 index 0000000000..96c8bac7ea --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_access_async.py @@ -0,0 +1,105 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.access.aio import AccessClient +from specs.azure.clientgenerator.core.access import models + + +@pytest.fixture +async def client(): + async with AccessClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_no_decorator_in_public(client: AccessClient): + result = await client.public_operation.no_decorator_in_public(name="test") + assert result == models.NoDecoratorModelInPublic(name="test") + + +@pytest.mark.asyncio +async def test_public_decorator_in_public(client: AccessClient): + result = await client.public_operation.public_decorator_in_public(name="test") + assert result == models.PublicDecoratorModelInPublic(name="test") + + +@pytest.mark.asyncio +async def test_no_decorator_in_internal(client: AccessClient): + result = await client.internal_operation._no_decorator_in_internal(name="test") + assert result == models._models.NoDecoratorModelInInternal(name="test") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import ( + NoDecoratorModelInInternal, + ) + + with pytest.raises(AttributeError): + await client.internal_operation.no_decorator_in_internal(name="test") + + +@pytest.mark.asyncio +async def test_internal_decorator_in_internal(client: AccessClient): + result = await client.internal_operation._internal_decorator_in_internal(name="test") + assert result == models._models.InternalDecoratorModelInInternal(name="test") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import ( + InternalDecoratorModelInInternal, + ) + + with pytest.raises(AttributeError): + await client.internal_operation.internal_decorator_in_internal(name="test") + + +@pytest.mark.asyncio +async def test_public_decorator_in_internal(client: AccessClient): + result = await client.internal_operation._public_decorator_in_internal(name="test") + assert result == models.PublicDecoratorModelInInternal(name="test") + + with pytest.raises(AttributeError): + await client.internal_operation.public_decorator_in_internal(name="test") + + +@pytest.mark.asyncio +async def test_public(client: AccessClient): + result = await client.shared_model_in_operation.public(name="test") + assert result == models.SharedModel(name="test") + + +@pytest.mark.asyncio +async def test_internal(client: AccessClient): + result = await client.shared_model_in_operation._internal(name="test") + assert result == models.SharedModel(name="test") + + with pytest.raises(AttributeError): + await client.shared_model_in_operation.internal(name="test") + + +@pytest.mark.asyncio +async def test_operation(client: AccessClient): + result = await client.relative_model_in_operation._operation(name="test") + assert result == models._models.OuterModel(name="Madge", inner=models._models.InnerModel(name="Madge")) + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import OuterModel + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import InnerModel + + with pytest.raises(AttributeError): + await client.shared_model_in_operation.operation(name="test") + + +@pytest.mark.asyncio +async def test_discriminator(client: AccessClient): + result = await client.relative_model_in_operation._discriminator(kind="real") + assert result == models._models.RealModel(name="Madge", kind="real") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import RealModel + + with pytest.raises(AttributeError): + await client.shared_model_in_operation.discriminator(kind="real") diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_flatten_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_flatten_async.py new file mode 100644 index 0000000000..2ff5d0af0e --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_flatten_async.py @@ -0,0 +1,93 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.flattenproperty.aio import FlattenPropertyClient +from specs.azure.clientgenerator.core.flattenproperty.models import ( + FlattenModel, + ChildModel, + NestedFlattenModel, + ChildFlattenModel, +) + + +@pytest.fixture +async def client(): + async with FlattenPropertyClient() as client: + yield client + + +# ========== test for cadl-ranch ========== + + +@pytest.mark.asyncio +async def test_put_flatten_model(client: FlattenPropertyClient): + resp = FlattenModel(name="test", properties=ChildModel(age=1, description="test")) + assert ( + await client.put_flatten_model(FlattenModel(name="foo", properties=ChildModel(age=10, description="bar"))) + == resp + ) + assert await client.put_flatten_model(FlattenModel(name="foo", age=10, description="bar")) == resp + + +@pytest.mark.asyncio +async def test_put_nested_flatten_model(client: FlattenPropertyClient): + # python doesn't support nested flatten model + assert await client.put_nested_flatten_model( + NestedFlattenModel( + name="foo", + properties=ChildFlattenModel(summary="bar", properties=ChildModel(age=10, description="test")), + ) + ) == NestedFlattenModel( + name="test", + properties=ChildFlattenModel(summary="test", properties=ChildModel(age=1, description="foo")), + ) + + +@pytest.mark.asyncio # ============test for compatibility ============ +async def test_dpg_model_common(): + flatten_model = FlattenModel(name="hello", properties=ChildModel(age=0, description="test")) + assert flatten_model.name == "hello" + assert flatten_model.properties.age == 0 + assert flatten_model.properties.description == "test" + + +@pytest.mark.asyncio +async def test_dpg_model_none(): + flatten_model = FlattenModel() + assert flatten_model.name is None + assert flatten_model.properties is None + assert flatten_model.age is None + assert flatten_model.description is None + + +@pytest.mark.asyncio +async def test_dpg_model_compatibility(): + flatten_model = FlattenModel(description="test", age=0) + assert flatten_model.description == "test" + assert flatten_model.age == 0 + assert flatten_model.properties.description == "test" + assert flatten_model.properties.age == 0 + + +@pytest.mark.asyncio +async def test_dpg_model_setattr(): + flatten_model = FlattenModel() + + flatten_model.age = 0 + assert flatten_model.properties.age == 0 + flatten_model.description = "test" + assert flatten_model.properties.description == "test" + + flatten_model.properties.age = 1 + assert flatten_model.age == 1 + flatten_model.properties.description = "test2" + assert flatten_model.description == "test2" + + +@pytest.mark.asyncio +async def test_dpg_model_exception(): + with pytest.raises(AttributeError): + FlattenModel().no_prop diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_usage_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_usage_async.py new file mode 100644 index 0000000000..af1b31e92f --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_client_generator_core_usage_async.py @@ -0,0 +1,31 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.usage.aio import UsageClient +from specs.azure.clientgenerator.core.usage import models + + +@pytest.fixture +async def client(): + async with UsageClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_input_to_input_output(client: UsageClient): + await client.model_in_operation.input_to_input_output(models.InputModel(name="Madge")) + + +@pytest.mark.asyncio +async def test_output_to_input_output(client: UsageClient): + assert models.OutputModel(name="Madge") == await client.model_in_operation.output_to_input_output() + + +@pytest.mark.asyncio +async def test_model_usage(client: UsageClient): + assert models.RoundTripModel( + result=models.ResultModel(name="Madge") + ) == await client.model_in_operation.model_in_read_only_property(body=models.RoundTripModel()) diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_basic_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_basic_async.py new file mode 100644 index 0000000000..9a67c80137 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_basic_async.py @@ -0,0 +1,70 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.basic import models, aio + +VALID_USER = models.User(id=1, name="Madge", etag="11bdc430-65e8-45ad-81d9-8ffa60d55b59") + + +@pytest.fixture +async def client(): + async with aio.BasicClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_create_or_update(client: aio.BasicClient): + result = await client.create_or_update(id=1, resource={"name": "Madge"}) + assert result == VALID_USER + + +@pytest.mark.asyncio +async def test_create_or_replace(client: aio.BasicClient): + result = await client.create_or_replace(id=1, resource={"name": "Madge"}) + assert result == VALID_USER + + +@pytest.mark.asyncio +async def test_get(client: aio.BasicClient): + result = await client.get(id=1) + assert result == VALID_USER + + +@pytest.mark.asyncio +async def test_list(client: aio.BasicClient): + result = client.list( + top=5, + skip=10, + orderby=["id"], + filter="id lt 10", + select=["id", "orders", "etag"], + expand=["orders"], + ) + result = [item async for item in result] + assert len(result) == 2 + assert result[0].id == 1 + assert result[0].name == "Madge" + assert result[0].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert result[0].orders[0].id == 1 + assert result[0].orders[0].user_id == 1 + assert result[0].orders[0].detail == "a recorder" + assert result[1].id == 2 + assert result[1].name == "John" + assert result[1].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b5a" + assert result[1].orders[0].id == 2 + assert result[1].orders[0].user_id == 2 + assert result[1].orders[0].detail == "a TV" + + +@pytest.mark.asyncio +async def test_delete(client: aio.BasicClient): + await client.delete(id=1) + + +@pytest.mark.asyncio +async def test_export(client: aio.BasicClient): + result = await client.export(id=1, format="json") + assert result == VALID_USER diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_rpc_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_rpc_async.py new file mode 100644 index 0000000000..b024cb9bbf --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_rpc_async.py @@ -0,0 +1,24 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azurecore.lro.rpc.aio import RpcClient +from azurecore.lro.rpc import models + + +@pytest.fixture +async def client(): + async with RpcClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_long_running_rpc(client: RpcClient, async_polling_method): + result = await client.begin_long_running_rpc( + models.GenerationOptions(prompt="text"), + polling_interval=0, + polling=async_polling_method, + ) + assert (await result.result()) == models.GenerationResult(data="text data") diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_standard_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_standard_async.py new file mode 100644 index 0000000000..a809b4ca51 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_lro_standard_async.py @@ -0,0 +1,47 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.lro.standard.aio import StandardClient +from specs.azure.core.lro.standard.models import User, ExportedUser + + +@pytest.fixture +async def client(): + async with StandardClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_lro_core_put(client, async_polling_method): + user = User({"name": "madge", "role": "contributor"}) + result = await ( + await client.begin_create_or_replace( + name=user.name, + resource=user, + polling_interval=0, + polling=async_polling_method, + ) + ).result() + assert result == user + + +@pytest.mark.asyncio +async def test_lro_core_delete(client, async_polling_method): + await (await client.begin_delete(name="madge", polling_interval=0, polling=async_polling_method)).result() + + +@pytest.mark.asyncio +async def test_lro_core_export(client, async_polling_method): + export_user = ExportedUser({"name": "madge", "resourceUri": "/users/madge"}) + result = await ( + await client.begin_export( + name="madge", + format="json", + polling_interval=0, + polling=async_polling_method, + ) + ).result() + assert result == export_user diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_model_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_model_async.py new file mode 100644 index 0000000000..b2f8eed895 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_model_async.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.model.aio import ModelClient +from specs.azure.core.model.models import AzureEmbeddingModel + + +@pytest.fixture +async def client(): + async with ModelClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_azure_core_embedding_vector_post(client: ModelClient): + embedding_model = AzureEmbeddingModel(embedding=[0, 1, 2, 3, 4]) + result = await client.azure_core_embedding_vector.post( + body=embedding_model, + ) + assert result == AzureEmbeddingModel(embedding=[5, 6, 7, 8, 9]) + + +@pytest.mark.asyncio +async def test_azure_core_embedding_vector_put(client: ModelClient): + await client.azure_core_embedding_vector.put(body=[0, 1, 2, 3, 4]) + + +@pytest.mark.asyncio +async def test_azure_core_embedding_vector_get(client: ModelClient): + assert [0, 1, 2, 3, 4] == (await client.azure_core_embedding_vector.get()) diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_page_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_page_async.py new file mode 100644 index 0000000000..980083bc22 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_page_async.py @@ -0,0 +1,58 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typing import AsyncIterable +from specs.azure.core.page import models, aio + +VALID_USER = models.User(id=1, name="Madge", etag="11bdc430-65e8-45ad-81d9-8ffa60d55b59") + + +@pytest.fixture +async def client(): + async with aio.PageClient() as client: + yield client + + +async def _list_with_page_tests(pager: AsyncIterable[models.User]): + result = [p async for p in pager] + assert len(result) == 1 + assert result[0].id == 1 + assert result[0].name == "Madge" + assert result[0].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert result[0].orders is None + + +@pytest.mark.asyncio +async def test_list_with_page(client: aio.PageClient): + await _list_with_page_tests(client.list_with_page()) + + +@pytest.mark.asyncio +async def test_list_with_custom_page_model(client: aio.PageClient): + await _list_with_page_tests(client.list_with_custom_page_model()) + with pytest.raises(AttributeError): + models.CustomPageModel + + +@pytest.mark.asyncio +async def test_list_with_parameters(client: aio.PageClient): + result = [ + item + async for item in client.list_with_parameters(models.ListItemInputBody(input_name="Madge"), another="Second") + ] + assert len(result) == 1 + assert result[0] == VALID_USER + + +@pytest.mark.asyncio +async def test_two_models_as_page_item(client: aio.PageClient): + result = [item async for item in client.two_models_as_page_item.list_first_item()] + assert len(result) == 1 + assert result[0].id == 1 + + result = [item async for item in client.two_models_as_page_item.list_second_item()] + assert len(result) == 1 + assert result[0].name == "Madge" diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_scalar_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_scalar_async.py new file mode 100644 index 0000000000..4616cc2b5e --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_scalar_async.py @@ -0,0 +1,41 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.scalar.aio import ScalarClient +from specs.azure.core.scalar import models + + +@pytest.fixture +async def client(): + async with ScalarClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_azure_location_scalar_get(client: ScalarClient): + result = await client.azure_location_scalar.get() + assert result == "eastus" + + +@pytest.mark.asyncio +async def test_azure_location_scalar_put(client: ScalarClient): + await client.azure_location_scalar.put("eastus") + + +@pytest.mark.asyncio +async def test_azure_location_scalar_post(client: ScalarClient): + result = await client.azure_location_scalar.post(models.AzureLocationModel(location="eastus")) + assert result == models.AzureLocationModel(location="eastus") + + +@pytest.mark.asyncio +async def test_azure_location_scalar_header(client: ScalarClient): + await client.azure_location_scalar.header(region="eastus") + + +@pytest.mark.asyncio +async def test_azure_location_scalar_query(client: ScalarClient): + await client.azure_location_scalar.query(region="eastus") diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_traits_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_traits_async.py new file mode 100644 index 0000000000..fc1bdb9f7d --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_core_traits_async.py @@ -0,0 +1,89 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools +from datetime import datetime + +import pytest +from azure.core.exceptions import HttpResponseError +from azure.core import MatchConditions +from specs.azure.core.traits.aio import TraitsClient +from specs.azure.core.traits.models import UserActionParam + + +@pytest.fixture +async def client(): + async with TraitsClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_get(client: TraitsClient, check_client_request_id_header): + async def assert_test_get(**kwargs): + checked = {} + result, header = await client.smoke_test( + id=1, + foo="123", + if_unmodified_since=datetime(year=2022, month=8, day=26, hour=14, minute=38, second=0), + if_modified_since=datetime(year=2021, month=8, day=26, hour=14, minute=38, second=0), + cls=lambda x, y, z: (y, z), + raw_request_hook=functools.partial( + check_client_request_id_header, + header="x-ms-client-request-id", + checked=checked, + ), + **kwargs, + ) + assert result.id == 1 + assert result.name == "Madge" + assert header["ETag"] == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert header["bar"] == "456" + assert header["x-ms-client-request-id"] == checked["x-ms-client-request-id"] + + await assert_test_get(etag="valid", match_condition=MatchConditions.IfNotModified) + await assert_test_get(etag="invalid", match_condition=MatchConditions.IfModified) + with pytest.raises(HttpResponseError): + await assert_test_get() + + +@pytest.mark.asyncio +async def test_repeatable_action(client: TraitsClient, check_repeatability_header): + result, header = await client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + raw_request_hook=check_repeatability_header, + ) + assert result.user_action_result == "test" + assert header["Repeatability-Result"] == "accepted" + + result, header = await client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={ + "Repeatability-Request-ID": "5942d803-e3fa-4f96-8f67-607d7bd607f5", + "Repeatability-First-Sent": "Sun, 06 Nov 1994 08:49:37 GMT", + }, + raw_request_hook=check_repeatability_header, + ) + assert result.user_action_result == "test" + assert header["Repeatability-Result"] == "accepted" + + with pytest.raises(HttpResponseError): + await client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={"Repeatability-Request-ID": "wrong-id"}, + ) + + with pytest.raises(HttpResponseError): + await client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={"Repeatability-First-Sent": "wrong-datetime"}, + ) diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_example_basic_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_example_basic_async.py new file mode 100644 index 0000000000..46a66e519d --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_example_basic_async.py @@ -0,0 +1,30 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.example.basic.aio import AzureExampleClient +from specs.azure.example.basic.models import ActionRequest, Model + + +@pytest.fixture +async def client(): + async with AzureExampleClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_basic_action(client: AzureExampleClient): + body = ActionRequest( + string_property="text", + model_property=Model(int32_property=1, float32_property=1.5, enum_property="EnumValue1"), + array_property=["item"], + record_property={"record": "value"}, + ) + result = await client.basic_action( + body=body, + query_param="query", + header_param="header", + ) + assert result.string_property == body.string_property diff --git a/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_special_headers_client_request_id_async.py b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_special_headers_client_request_id_async.py new file mode 100644 index 0000000000..882ec94dce --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/asynctests/test_azure_special_headers_client_request_id_async.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools + +import pytest + +from azure.specialheaders.xmsclientrequestid.aio import XmsClientRequestIdClient + + +@pytest.fixture +async def client(): + async with XmsClientRequestIdClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_get(client: XmsClientRequestIdClient, check_client_request_id_header): + checked = {} + result, resp = await client.get( + cls=lambda x, y, z: (y, x), + raw_request_hook=functools.partial( + check_client_request_id_header, + header="x-ms-client-request-id", + checked=checked, + ), + ) + assert result is None + assert resp.http_response.headers["x-ms-client-request-id"] == checked["x-ms-client-request-id"] + pass diff --git a/packages/http-client-python/test/azure/mock_api_tests/conftest.py b/packages/http-client-python/test/azure/mock_api_tests/conftest.py new file mode 100644 index 0000000000..0b897317d4 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/conftest.py @@ -0,0 +1,150 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import os +import subprocess +import signal +import pytest +import re +from typing import Literal +from pathlib import Path + + +def start_server_process(): + path = Path(os.path.dirname(__file__)) / Path("../../../node_modules/@azure-tools/cadl-ranch-specs") + os.chdir(path.resolve()) + cmd = "cadl-ranch serve ./http" + if os.name == "nt": + return subprocess.Popen(cmd, shell=True) + return subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid) + + +def terminate_server_process(process): + if os.name == "nt": + process.kill() + else: + os.killpg(os.getpgid(process.pid), signal.SIGTERM) # Send the signal to all the process groups + + +@pytest.fixture(scope="session", autouse=True) +def testserver(): + """Start cadl ranch mock api tests""" + server = start_server_process() + yield + terminate_server_process(server) + + +_VALID_UUID = re.compile(r"^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$") +_VALID_RFC7231 = re.compile( + r"^(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT$" +) + + +def validate_format(value: str, format: Literal["uuid", "rfc7231"]): + if format == "uuid": + assert _VALID_UUID.match(value) + elif format == "rfc7231": + assert _VALID_RFC7231.match(value) + else: + raise ValueError("Unknown format") + + +@pytest.fixture +def check_repeatability_header(): + def func(request): + validate_format(request.http_request.headers["Repeatability-Request-ID"], "uuid") + validate_format(request.http_request.headers["Repeatability-First-Sent"], "rfc7231") + + return func + + +@pytest.fixture +def check_client_request_id_header(): + def func(request, header: str, checked: dict): + validate_format(request.http_request.headers[header], "uuid") + checked[header] = request.http_request.headers[header] + + return func + + +# ================== after azure-core fix, the following code can be removed (begin) ================== +import urllib.parse +from azure.core.rest import HttpRequest + + +def update_api_version_of_status_link(status_link: str): + request_params = {} + parsed_status_link = urllib.parse.urlparse(status_link) + request_params = { + key.lower(): [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(parsed_status_link.query).items() + } + request_params["api-version"] = "2022-12-01-preview" + status_link = urllib.parse.urljoin(status_link, parsed_status_link.path) + return status_link, request_params + + +@pytest.fixture +def polling_method(): + from azure.core.polling.base_polling import LROBasePolling + + class TempLroBasePolling(LROBasePolling): + + def request_status(self, status_link: str): + if self._path_format_arguments: + status_link = self._client.format_url(status_link, **self._path_format_arguments) + status_link, request_params = update_api_version_of_status_link(status_link) + if "request_id" not in self._operation_config: + self._operation_config["request_id"] = self._get_request_id() + + rest_request = HttpRequest("GET", status_link, params=request_params) + return self._client.send_request(rest_request, _return_pipeline_response=True, **self._operation_config) + + return TempLroBasePolling(0) + + +@pytest.fixture +def async_polling_method(): + from azure.core.polling.async_base_polling import AsyncLROBasePolling + + class AsyncTempLroBasePolling(AsyncLROBasePolling): + + async def request_status(self, status_link: str): + if self._path_format_arguments: + status_link = self._client.format_url(status_link, **self._path_format_arguments) + status_link, request_params = update_api_version_of_status_link(status_link) + # Re-inject 'x-ms-client-request-id' while polling + if "request_id" not in self._operation_config: + self._operation_config["request_id"] = self._get_request_id() + + rest_request = HttpRequest("GET", status_link, params=request_params) + return await self._client.send_request( + rest_request, _return_pipeline_response=True, **self._operation_config + ) + + return AsyncTempLroBasePolling(0) + + +# ================== after azure-core fix, the up code can be removed (end) ================== + + +@pytest.fixture() +def credential(): + """I actually don't need anything, since the authentication policy + will bypass it. + """ + + class FakeCredential: + pass + + return FakeCredential() + + +@pytest.fixture() +def authentication_policy(): + from azure.core.pipeline.policies import SansIOHTTPPolicy + + return SansIOHTTPPolicy() diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_common_types_managed_identity.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_common_types_managed_identity.py new file mode 100644 index 0000000000..312bbbdfe2 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_common_types_managed_identity.py @@ -0,0 +1,69 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azure.resourcemanager.models.commontypes.managedidentity import ( + ManagedIdentityClient, +) +from azure.resourcemanager.models.commontypes.managedidentity import models + +SUBSCRIPTION_ID = "00000000-0000-0000-0000-000000000000" +RESOURCE_GROUP_NAME = "test-rg" + + +@pytest.fixture +def client(credential, authentication_policy): + with ManagedIdentityClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) as client: + yield client + + +def test_managed_identity_tracked_resources_get(client): + result = client.managed_identity_tracked_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned" + assert result.properties.provisioning_state == "Succeeded" + + +def test_managed_identity_tracked_resources_create_with_system_assigned(client): + result = client.managed_identity_tracked_resources.create_with_system_assigned( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + resource=models.ManagedIdentityTrackedResource( + location="eastus", + identity=models.ManagedServiceIdentity(type="SystemAssigned"), + ), + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned" + assert result.properties.provisioning_state == "Succeeded" + + +def test_managed_identity_tracked_resources_update_with_user_assigned_and_system_assigned( + client, +): + result = client.managed_identity_tracked_resources.update_with_user_assigned_and_system_assigned( + resource_group_name=RESOURCE_GROUP_NAME, + managed_identity_tracked_resource_name="identity", + properties=models.ManagedIdentityTrackedResource( + location="eastus", + identity=models.ManagedServiceIdentity( + type="SystemAssigned,UserAssigned", + user_assigned_identities={ + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test-rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/id1": models.UserAssignedIdentity() + }, + ), + ), + ) + assert result.location == "eastus" + assert result.identity.type == "SystemAssigned,UserAssigned" + assert result.properties.provisioning_state == "Succeeded" diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_resource.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_resource.py new file mode 100644 index 0000000000..6d562c1cdc --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_arm_models_resource.py @@ -0,0 +1,195 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azure.resourcemanager.models.resources import ResourcesClient +from azure.resourcemanager.models.resources import models + +SUBSCRIPTION_ID = "00000000-0000-0000-0000-000000000000" +RESOURCE_GROUP_NAME = "test-rg" + + +@pytest.fixture +def client(credential, authentication_policy): + with ResourcesClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) as client: + yield client + + +def test_client_signature(credential, authentication_policy): + # make sure signautre order is correct + client1 = ResourcesClient( + credential, + SUBSCRIPTION_ID, + "http://localhost:3000", + authentication_policy=authentication_policy, + ) + # make sure signautre name is correct + client2 = ResourcesClient( + credential=credential, + subscription_id=SUBSCRIPTION_ID, + base_url="http://localhost:3000", + authentication_policy=authentication_policy, + ) + for client in [client1, client2]: + # make sure signautre order is correct + client.top_level_tracked_resources.get(RESOURCE_GROUP_NAME, "top") + # make sure signautre name is correct + client.top_level_tracked_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + ) + + +def test_top_level_tracked_resources_begin_create_or_replace(client): + result = client.top_level_tracked_resources.begin_create_or_replace( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + resource=models.TopLevelTrackedResource( + location="eastus", + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_top_level_tracked_resources_begin_update(client): + result = client.top_level_tracked_resources.begin_update( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + properties=models.TopLevelTrackedResource( + location="eastus", + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid2") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + assert result.location == "eastus" + assert result.properties.description == "valid2" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_top_level_tracked_resources_begin_delete(client): + client.top_level_tracked_resources.begin_delete( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + + +def test_top_level_tracked_resources_list_by_resource_group(client): + response = client.top_level_tracked_resources.list_by_resource_group( + resource_group_name=RESOURCE_GROUP_NAME, + ) + result = [r for r in response] + for result in result: + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_top_level_tracked_resources_list_by_subscription(client): + response = client.top_level_tracked_resources.list_by_subscription() + result = [r for r in response] + for result in result: + assert result.location == "eastus" + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "top" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_nested_proxy_resources_get(client): + result = client.nested_proxy_resources.get( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + ) + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_nested_proxy_resources_begin_create_or_replace(client): + result = client.nested_proxy_resources.begin_create_or_replace( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + resource=models.TopLevelTrackedResource( + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_nested_proxy_resources_begin_update(client): + result = client.nested_proxy_resources.begin_update( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + properties=models.TopLevelTrackedResource( + properties=models.TopLevelTrackedResourceProperties( + models.TopLevelTrackedResourceProperties(description="valid2") + ), + ), + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + assert result.properties.description == "valid2" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" + + +def test_nested_proxy_resources_begin_delete(client): + client.nested_proxy_resources.begin_delete( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + nexted_proxy_resource_name="nested", + polling_interval=0, # set polling_interval to 0 s to make the test faster since default is 30s + ).result() + + +def test_nested_proxy_resources_list_by_top_level_tracked_resource(client): + response = client.nested_proxy_resources.list_by_top_level_tracked_resource( + resource_group_name=RESOURCE_GROUP_NAME, + top_level_tracked_resource_name="top", + ) + result = [r for r in response] + for result in result: + assert result.properties.description == "valid" + assert result.properties.provisioning_state == "Succeeded" + assert result.name == "nested" + assert result.type == "Azure.ResourceManager.Models.Resources/topLevelTrackedResources/top/nestedProxyResources" + assert result.system_data.created_by == "AzureSDK" diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_access.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_access.py new file mode 100644 index 0000000000..f694dcbc21 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_access.py @@ -0,0 +1,96 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.access import AccessClient +from specs.azure.clientgenerator.core.access import models + + +@pytest.fixture +def client(): + with AccessClient() as client: + yield client + + +def test_no_decorator_in_public(client: AccessClient): + result = client.public_operation.no_decorator_in_public(name="test") + assert result == models.NoDecoratorModelInPublic(name="test") + + +def test_public_decorator_in_public(client: AccessClient): + result = client.public_operation.public_decorator_in_public(name="test") + assert result == models.PublicDecoratorModelInPublic(name="test") + + +def test_no_decorator_in_internal(client: AccessClient): + result = client.internal_operation._no_decorator_in_internal(name="test") + assert result == models._models.NoDecoratorModelInInternal(name="test") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import ( + NoDecoratorModelInInternal, + ) + + with pytest.raises(AttributeError): + client.internal_operation.no_decorator_in_internal(name="test") + + +def test_internal_decorator_in_internal(client: AccessClient): + result = client.internal_operation._internal_decorator_in_internal(name="test") + assert result == models._models.InternalDecoratorModelInInternal(name="test") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import ( + InternalDecoratorModelInInternal, + ) + + with pytest.raises(AttributeError): + client.internal_operation.internal_decorator_in_internal(name="test") + + +def test_public_decorator_in_internal(client: AccessClient): + result = client.internal_operation._public_decorator_in_internal(name="test") + assert result == models.PublicDecoratorModelInInternal(name="test") + + with pytest.raises(AttributeError): + client.internal_operation.public_decorator_in_internal(name="test") + + +def test_public(client: AccessClient): + result = client.shared_model_in_operation.public(name="test") + assert result == models.SharedModel(name="test") + + +def test_internal(client: AccessClient): + result = client.shared_model_in_operation._internal(name="test") + assert result == models.SharedModel(name="test") + + with pytest.raises(AttributeError): + client.shared_model_in_operation.internal(name="test") + + +def test_operation(client: AccessClient): + result = client.relative_model_in_operation._operation(name="test") + assert result == models._models.OuterModel(name="Madge", inner=models._models.InnerModel(name="Madge")) + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import OuterModel + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import InnerModel + + with pytest.raises(AttributeError): + client.shared_model_in_operation.operation(name="test") + + +def test_discriminator(client: AccessClient): + result = client.relative_model_in_operation._discriminator(kind="real") + assert result == models._models.RealModel(name="Madge", kind="real") + + with pytest.raises(ImportError): + from specs.azure.clientgenerator.core.access.models import RealModel + + with pytest.raises(AttributeError): + client.shared_model_in_operation.discriminator(kind="real") diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_flatten.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_flatten.py new file mode 100644 index 0000000000..366498d265 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_flatten.py @@ -0,0 +1,84 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.flattenproperty import FlattenPropertyClient +from specs.azure.clientgenerator.core.flattenproperty.models import ( + FlattenModel, + ChildModel, + NestedFlattenModel, + ChildFlattenModel, +) + + +@pytest.fixture +def client(): + with FlattenPropertyClient() as client: + yield client + + +# ========== test for cadl-ranch ========== + + +def test_put_flatten_model(client: FlattenPropertyClient): + resp = FlattenModel(name="test", properties=ChildModel(age=1, description="test")) + assert client.put_flatten_model(FlattenModel(name="foo", properties=ChildModel(age=10, description="bar"))) == resp + assert client.put_flatten_model(FlattenModel(name="foo", age=10, description="bar")) == resp + + +def test_put_nested_flatten_model(client: FlattenPropertyClient): + # python doesn't support nested flatten model + assert client.put_nested_flatten_model( + NestedFlattenModel( + name="foo", + properties=ChildFlattenModel(summary="bar", properties=ChildModel(age=10, description="test")), + ) + ) == NestedFlattenModel( + name="test", + properties=ChildFlattenModel(summary="test", properties=ChildModel(age=1, description="foo")), + ) + + +# ============test for compatibility ============ +def test_dpg_model_common(): + flatten_model = FlattenModel(name="hello", properties=ChildModel(age=0, description="test")) + assert flatten_model.name == "hello" + assert flatten_model.properties.age == 0 + assert flatten_model.properties.description == "test" + + +def test_dpg_model_none(): + flatten_model = FlattenModel() + assert flatten_model.name is None + assert flatten_model.properties is None + assert flatten_model.age is None + assert flatten_model.description is None + + +def test_dpg_model_compatibility(): + flatten_model = FlattenModel(description="test", age=0) + assert flatten_model.description == "test" + assert flatten_model.age == 0 + assert flatten_model.properties.description == "test" + assert flatten_model.properties.age == 0 + + +def test_dpg_model_setattr(): + flatten_model = FlattenModel() + + flatten_model.age = 0 + assert flatten_model.properties.age == 0 + flatten_model.description = "test" + assert flatten_model.properties.description == "test" + + flatten_model.properties.age = 1 + assert flatten_model.age == 1 + flatten_model.properties.description = "test2" + assert flatten_model.description == "test2" + + +def test_dpg_model_exception(): + with pytest.raises(AttributeError): + FlattenModel().no_prop diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_usage.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_usage.py new file mode 100644 index 0000000000..3416ef21b2 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_client_generator_core_usage.py @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.clientgenerator.core.usage import UsageClient +from specs.azure.clientgenerator.core.usage import models + + +@pytest.fixture +def client(): + with UsageClient() as client: + yield client + + +def test_input_to_input_output(client: UsageClient): + client.model_in_operation.input_to_input_output(models.InputModel(name="Madge")) + + +def test_output_to_input_output(client: UsageClient): + assert models.OutputModel(name="Madge") == client.model_in_operation.output_to_input_output() + + +def test_model_usage(client: UsageClient): + assert models.RoundTripModel( + result=models.ResultModel(name="Madge") + ) == client.model_in_operation.model_in_read_only_property(body=models.RoundTripModel()) diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_basic.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_basic.py new file mode 100644 index 0000000000..91a35bd86a --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_basic.py @@ -0,0 +1,65 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.basic import BasicClient, models + +VALID_USER = models.User(id=1, name="Madge", etag="11bdc430-65e8-45ad-81d9-8ffa60d55b59") + + +@pytest.fixture +def client(): + with BasicClient() as client: + yield client + + +def test_create_or_update(client: BasicClient): + result = client.create_or_update(id=1, resource={"name": "Madge"}) + assert result == VALID_USER + + +def test_create_or_replace(client: BasicClient): + result = client.create_or_replace(id=1, resource={"name": "Madge"}) + assert result == VALID_USER + + +def test_get(client: BasicClient): + result = client.get(id=1) + assert result == VALID_USER + + +def test_list(client: BasicClient): + result = list( + client.list( + top=5, + skip=10, + orderby=["id"], + filter="id lt 10", + select=["id", "orders", "etag"], + expand=["orders"], + ) + ) + assert len(result) == 2 + assert result[0].id == 1 + assert result[0].name == "Madge" + assert result[0].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert result[0].orders[0].id == 1 + assert result[0].orders[0].user_id == 1 + assert result[0].orders[0].detail == "a recorder" + assert result[1].id == 2 + assert result[1].name == "John" + assert result[1].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b5a" + assert result[1].orders[0].id == 2 + assert result[1].orders[0].user_id == 2 + assert result[1].orders[0].detail == "a TV" + + +def test_delete(client: BasicClient): + client.delete(id=1) + + +def test_export(client: BasicClient): + result = client.export(id=1, format="json") + assert result == VALID_USER diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_rpc.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_rpc.py new file mode 100644 index 0000000000..801f6bc8e6 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_rpc.py @@ -0,0 +1,22 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from azurecore.lro.rpc import RpcClient, models + + +@pytest.fixture +def client(): + with RpcClient() as client: + yield client + + +def test_long_running_rpc(client: RpcClient, polling_method): + result = client.begin_long_running_rpc( + models.GenerationOptions(prompt="text"), + polling_interval=0, + polling=polling_method, + ).result() + assert result == models.GenerationResult(data="text data") diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_standard.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_standard.py new file mode 100644 index 0000000000..c9337e93fa --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_lro_standard.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.lro.standard import StandardClient +from specs.azure.core.lro.standard.models import User, ExportedUser + + +@pytest.fixture +def client(): + with StandardClient() as client: + yield client + + +def test_lro_core_put(client, polling_method): + user = User({"name": "madge", "role": "contributor"}) + result = client.begin_create_or_replace( + name=user.name, resource=user, polling_interval=0, polling=polling_method + ).result() + assert result == user + + +def test_lro_core_delete(client, polling_method): + client.begin_delete(name="madge", polling_interval=0, polling=polling_method).result() + + +def test_lro_core_export(client, polling_method): + export_user = ExportedUser({"name": "madge", "resourceUri": "/users/madge"}) + result = client.begin_export(name="madge", format="json", polling_interval=0, polling=polling_method).result() + assert result == export_user diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_model.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_model.py new file mode 100644 index 0000000000..adca0211e5 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_model.py @@ -0,0 +1,30 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.model import ModelClient +from specs.azure.core.model.models import AzureEmbeddingModel + + +@pytest.fixture +def client(): + with ModelClient() as client: + yield client + + +def test_azure_core_embedding_vector_post(client: ModelClient): + embedding_model = AzureEmbeddingModel(embedding=[0, 1, 2, 3, 4]) + result = client.azure_core_embedding_vector.post( + body=embedding_model, + ) + assert result == AzureEmbeddingModel(embedding=[5, 6, 7, 8, 9]) + + +def test_azure_core_embedding_vector_put(client: ModelClient): + client.azure_core_embedding_vector.put(body=[0, 1, 2, 3, 4]) + + +def test_azure_core_embedding_vector_get(client: ModelClient): + assert [0, 1, 2, 3, 4] == client.azure_core_embedding_vector.get() diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_page.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_page.py new file mode 100644 index 0000000000..1c2ec72e16 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_page.py @@ -0,0 +1,51 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typing import Iterable +from specs.azure.core.page import PageClient, models + +VALID_USER = models.User(id=1, name="Madge", etag="11bdc430-65e8-45ad-81d9-8ffa60d55b59") + + +@pytest.fixture +def client(): + with PageClient() as client: + yield client + + +def _list_with_page_tests(pager: Iterable[models.User]): + result = list(pager) + assert len(result) == 1 + assert result[0].id == 1 + assert result[0].name == "Madge" + assert result[0].etag == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert result[0].orders is None + + +def test_list_with_page(client: PageClient): + _list_with_page_tests(client.list_with_page()) + + +def test_list_with_custom_page_model(client: PageClient): + _list_with_page_tests(client.list_with_custom_page_model()) + with pytest.raises(AttributeError): + models.CustomPageModel + + +def test_list_with_parameters(client: PageClient): + result = list(client.list_with_parameters(models.ListItemInputBody(input_name="Madge"), another="Second")) + assert len(result) == 1 + assert result[0] == VALID_USER + + +def test_two_models_as_page_item(client: PageClient): + result = list(client.two_models_as_page_item.list_first_item()) + assert len(result) == 1 + assert result[0].id == 1 + + result = list(client.two_models_as_page_item.list_second_item()) + assert len(result) == 1 + assert result[0].name == "Madge" diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_scalar.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_scalar.py new file mode 100644 index 0000000000..1bca122b36 --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_scalar.py @@ -0,0 +1,35 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.core.scalar import ScalarClient, models + + +@pytest.fixture +def client(): + with ScalarClient() as client: + yield client + + +def test_azure_location_scalar_get(client: ScalarClient): + result = client.azure_location_scalar.get() + assert result == "eastus" + + +def test_azure_location_scalar_put(client: ScalarClient): + client.azure_location_scalar.put("eastus") + + +def test_azure_location_scalar_post(client: ScalarClient): + result = client.azure_location_scalar.post(models.AzureLocationModel(location="eastus")) + assert result == models.AzureLocationModel(location="eastus") + + +def test_azure_location_scalar_header(client: ScalarClient): + client.azure_location_scalar.header(region="eastus") + + +def test_azure_location_scalar_query(client: ScalarClient): + client.azure_location_scalar.query(region="eastus") diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_traits.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_traits.py new file mode 100644 index 0000000000..cbbf12d21a --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_core_traits.py @@ -0,0 +1,87 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools +from datetime import datetime + +import pytest +from azure.core.exceptions import HttpResponseError +from azure.core import MatchConditions +from specs.azure.core.traits import TraitsClient +from specs.azure.core.traits.models import UserActionParam + + +@pytest.fixture +def client(): + with TraitsClient() as client: + yield client + + +def test_get(client: TraitsClient, check_client_request_id_header): + def assert_test_get(**kwargs): + checked = {} + result, header = client.smoke_test( + id=1, + foo="123", + if_unmodified_since=datetime(year=2022, month=8, day=26, hour=14, minute=38, second=0), + if_modified_since=datetime(year=2021, month=8, day=26, hour=14, minute=38, second=0), + cls=lambda x, y, z: (y, z), + raw_request_hook=functools.partial( + check_client_request_id_header, + header="x-ms-client-request-id", + checked=checked, + ), + **kwargs, + ) + assert result.id == 1 + assert result.name == "Madge" + assert header["ETag"] == "11bdc430-65e8-45ad-81d9-8ffa60d55b59" + assert header["bar"] == "456" + assert header["x-ms-client-request-id"] == checked["x-ms-client-request-id"] + + assert_test_get(etag="valid", match_condition=MatchConditions.IfNotModified) + assert_test_get(etag="invalid", match_condition=MatchConditions.IfModified) + with pytest.raises(HttpResponseError): + assert_test_get() + + +def test_repeatable_action(client: TraitsClient, check_repeatability_header): + result, header = client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + raw_request_hook=check_repeatability_header, + ) + assert result.user_action_result == "test" + assert header["Repeatability-Result"] == "accepted" + + result, header = client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={ + "Repeatability-Request-ID": "5942d803-e3fa-4f96-8f67-607d7bd607f5", + "Repeatability-First-Sent": "Sun, 06 Nov 1994 08:49:37 GMT", + }, + raw_request_hook=check_repeatability_header, + ) + assert result.user_action_result == "test" + assert header["Repeatability-Result"] == "accepted" + + with pytest.raises(HttpResponseError): + client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={"Repeatability-Request-ID": "wrong-id"}, + ) + + with pytest.raises(HttpResponseError): + client.repeatable_action( + id=1, + body=UserActionParam(user_action_value="test"), + cls=lambda x, y, z: (y, z), + headers={"Repeatability-First-Sent": "wrong-datetime"}, + ) diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_example_basic.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_example_basic.py new file mode 100644 index 0000000000..a3b0ee01cd --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_example_basic.py @@ -0,0 +1,29 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specs.azure.example.basic import AzureExampleClient +from specs.azure.example.basic.models import ActionRequest, Model + + +@pytest.fixture +def client(): + with AzureExampleClient() as client: + yield client + + +def test_basic_action(client: AzureExampleClient): + body = ActionRequest( + string_property="text", + model_property=Model(int32_property=1, float32_property=1.5, enum_property="EnumValue1"), + array_property=["item"], + record_property={"record": "value"}, + ) + result = client.basic_action( + body=body, + query_param="query", + header_param="header", + ) + assert result.string_property == body.string_property diff --git a/packages/http-client-python/test/azure/mock_api_tests/test_azure_special_headers_client_request_id.py b/packages/http-client-python/test/azure/mock_api_tests/test_azure_special_headers_client_request_id.py new file mode 100644 index 0000000000..c6f1d59beb --- /dev/null +++ b/packages/http-client-python/test/azure/mock_api_tests/test_azure_special_headers_client_request_id.py @@ -0,0 +1,31 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools + +import pytest + +from azure.specialheaders.xmsclientrequestid import XmsClientRequestIdClient + + +@pytest.fixture +def client(): + with XmsClientRequestIdClient() as client: + yield client + + +def test_get(client: XmsClientRequestIdClient, check_client_request_id_header): + checked = {} + result, resp = client.get( + cls=lambda x, y, z: (y, x), + raw_request_hook=functools.partial( + check_client_request_id_header, + header="x-ms-client-request-id", + checked=checked, + ), + ) + assert result is None + assert resp.http_response.headers["x-ms-client-request-id"] == checked["x-ms-client-request-id"] + pass diff --git a/packages/http-client-python/test/azure/requirements.txt b/packages/http-client-python/test/azure/requirements.txt new file mode 100644 index 0000000000..75b51c3b6b --- /dev/null +++ b/packages/http-client-python/test/azure/requirements.txt @@ -0,0 +1,88 @@ +aiohttp;python_full_version>="3.5.2" +requests==2.32.2 +pytest +pytest-asyncio==0.14.0;python_full_version>="3.5.2" +azure-core==1.30.0 +azure-mgmt-core==1.3.2 +pyright==1.1.379 +pylint==3.2.7 +tox==4.18.1 +mypy==1.10.1 + +# only for azure +-e ./generated/azure-client-generator-core-access +-e ./generated/azure-client-generator-core-usage +-e ./generated/azure-client-generator-core-flatten-property +-e ./generated/azure-core-basic +-e ./generated/azure-core-scalar +-e ./generated/azurecore-lro-rpc +-e ./generated/azure-core-lro-standard +-e ./generated/azure-core-model +-e ./generated/azure-core-traits +-e ./generated/azure-core-page +-e ./generated/azure-special-headers-client-request-id/ +-e ./generated/azure-example-basic +-e ./generated/azure-resource-manager-models-common-types-managed-identity +-e ./generated/azure-resource-manager-models-resources + +# common test case +-e ./generated/authentication-api-key +-e ./generated/authentication-http-custom +-e ./generated/authentication-oauth2 +-e ./generated/authentication-union +-e ./generated/client-naming +-e ./generated/encode-duration +-e ./generated/encode-numeric +-e ./generated/parameters-basic +-e ./generated/parameters-collection-format +-e ./generated/parameters-spread +-e ./generated/resiliency-srv-driven1 +-e ./generated/resiliency-srv-driven2 +-e ./generated/serialization-encoded-name-json +-e ./generated/server-endpoint-not-defined +-e ./generated/server-path-multiple +-e ./generated/server-path-single +-e ./generated/server-versions-versioned +-e ./generated/server-versions-not-versioned +-e ./generated/special-words +-e ./generated/typetest-array +-e ./generated/typetest-dictionary +-e ./generated/typetest-enum-extensible +-e ./generated/typetest-enum-fixed +-e ./generated/typetest-model-enumdiscriminator +-e ./generated/typetest-model-nesteddiscriminator +-e ./generated/typetest-model-notdiscriminated +-e ./generated/typetest-model-singlediscriminator +-e ./generated/typetest-model-recursive +-e ./generated/typetest-model-usage +-e ./generated/typetest-model-visibility +-e ./generated/typetest-property-nullable +-e ./generated/typetest-property-optional +-e ./generated/typetest-property-valuetypes +-e ./generated/typetest-property-additionalproperties +-e ./generated/typetest-scalar +-e ./generated/typetest-union +-e ./generated/typetest-model-empty +-e ./generated/headasbooleantrue +-e ./generated/headasbooleanfalse +-e ./generated/parameters-body-optionality +-e ./generated/special-headers-repeatability +-e ./generated/special-headers-conditional-request +-e ./generated/encode-datetime +-e ./generated/encode-bytes +-e ./generated/client-structure-default +-e ./generated/client-structure-multiclient +-e ./generated/client-structure-renamedoperation +-e ./generated/client-structure-twooperationgroup +-e ./generated/payload-content-negotiation +-e ./generated/payload-json-merge-patch +-e ./generated/payload-pageable +-e ./generated/payload-multipart +-e ./generated/payload-media-type +-e ./generated/payload-xml +-e ./generated/versioning-added +-e ./generated/versioning-madeoptional +-e ./generated/versioning-removed +-e ./generated/versioning-renamedfrom +-e ./generated/versioning-returntypechangedfrom +-e ./generated/versioning-typechangedfrom diff --git a/packages/http-client-python/test/azure/tox.ini b/packages/http-client-python/test/azure/tox.ini new file mode 100644 index 0000000000..2047ad19eb --- /dev/null +++ b/packages/http-client-python/test/azure/tox.ini @@ -0,0 +1,35 @@ +[tox] +envlist=py38, py310, py311, py312 +skipsdist=True + +[testenv] +passenv=* +deps= + -r requirements.txt +commands= + pytest + +[testenv:ci] +commands = + pytest mock_api_tests ../generic_mock_api_tests + +[testenv:lint] +deps= + -r requirements.txt +commands = + pip install azure-pylint-guidelines-checker==0.4.1 --index-url="https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/" + python ../../eng/scripts/ci/run_pylint.py -t azure -s "generated" {posargs} + +[testenv:mypy] +commands = + python ../../eng/scripts/ci/run_mypy.py -t azure -s "generated" {posargs} + +[testenv:pyright] +commands = + python ../../eng/scripts/ci/run_pyright.py -t azure -s "generated" {posargs} + +[testenv:apiview] +envlist=py311 +commands = + pip install ../../../../../azure-sdk-tools/packages/python-packages/apiview-stub-generator --extra-index-url="https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/" + python ../../eng/scripts/ci/run_apiview.py -t azure -s "generated" {posargs} diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_authentication_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_authentication_async.py new file mode 100644 index 0000000000..57122ced7f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_authentication_async.py @@ -0,0 +1,121 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from authentication.apikey.aio import ApiKeyClient +from authentication.http.custom.aio import CustomClient +from authentication.oauth2.aio import OAuth2Client +from authentication.union.aio import UnionClient + + +# Utilities functions + + +@pytest.fixture +async def api_key_client(key_credential): + client = None + + def _build_client(client_type): + client = client_type(key_credential("valid-key")) + return client + + yield _build_client + if client: + await client.close() + + +@pytest.fixture() +def token_credential(core_library): + class FakeCredential: + @staticmethod + async def get_token(*scopes): + return core_library.credentials.AccessToken(token="".join(scopes), expires_on=1800) + + return FakeCredential() + + +@pytest.fixture +async def oauth2_client(token_credential): + client = None + + def _build_client(client_type): + client = client_type(token_credential) + return client + + yield _build_client + if client: + await client.close() + + +@pytest.fixture +async def http_custom_client(key_credential): + client = None + + def _build_client(): + client = CustomClient(key_credential("valid-key")) + return client + + yield _build_client + if client: + await client.close() + + +# Tests + + +@pytest.mark.asyncio +async def test_api_key_valid(api_key_client): + client = api_key_client(ApiKeyClient) + await client.valid() + + +@pytest.mark.asyncio +async def test_api_key_invalid(api_key_client, core_library): + client = api_key_client(ApiKeyClient) + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + await client.invalid() + assert ex.value.status_code == 403 + assert ex.value.reason == "Forbidden" + + +@pytest.mark.asyncio +async def test_oauth2_valid(oauth2_client): + client = oauth2_client(OAuth2Client) + await client.valid(enforce_https=False) + + +@pytest.mark.asyncio +async def test_oauth2_invalid(oauth2_client, core_library): + client = oauth2_client(OAuth2Client) + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + await client.invalid(enforce_https=False) + assert ex.value.status_code == 403 + + +@pytest.mark.asyncio +async def test_union_keyvalid(api_key_client): + client = api_key_client(UnionClient) + await client.valid_key() + + +@pytest.mark.asyncio +async def test_union_tokenvalid(oauth2_client): + client = oauth2_client(UnionClient) + await client.valid_token(enforce_https=False) + + +@pytest.mark.asyncio +async def test_http_custom_valid(http_custom_client): + client = http_custom_client() + await client.valid() + + +@pytest.mark.asyncio +async def test_http_custom_invalid(http_custom_client, core_library): + client = http_custom_client() + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + await client.invalid() + assert ex.value.status_code == 403 + assert ex.value.reason == "Forbidden" diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_naming_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_naming_async.py new file mode 100644 index 0000000000..3d6ff69699 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_naming_async.py @@ -0,0 +1,69 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from client.naming.aio import NamingClient +from client.naming import models + + +@pytest.fixture +async def client(): + async with NamingClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_client(client: NamingClient): + await client.client(models.ClientNameModel(client_name=True)) + + +@pytest.mark.asyncio +async def test_language(client: NamingClient): + await client.language(models.LanguageClientNameModel(python_name=True)) + + +@pytest.mark.asyncio +async def test_compatible_with_encoded_name(client: NamingClient): + await client.compatible_with_encoded_name(models.ClientNameAndJsonEncodedNameModel(client_name=True)) + + +@pytest.mark.asyncio +async def test_operation(client: NamingClient): + await client.client_name() + + +@pytest.mark.asyncio +async def test_parameter(client: NamingClient): + await client.parameter(client_name="true") + + +@pytest.mark.asyncio +async def test_header_request(client: NamingClient): + await client.request(client_name="true") + + +@pytest.mark.asyncio +async def test_header_response(client: NamingClient): + assert (await client.response(cls=lambda x, y, z: z))["default-name"] == "true" + + +@pytest.mark.asyncio +async def test_model_client(client: NamingClient): + await client.client_model.client(models.ClientModel(default_name=True)) + + +@pytest.mark.asyncio +async def test_model_language(client: NamingClient): + await client.client_model.language(models.PythonModel(default_name=True)) + + +@pytest.mark.asyncio +async def test_union_enum_member_name(client: NamingClient): + await client.union_enum.union_enum_member_name(models.ExtensibleEnum.CLIENT_ENUM_VALUE1) + + +@pytest.mark.asyncio +async def test_union_enum_name(client: NamingClient): + await client.union_enum.union_enum_name(models.ClientExtensibleEnum.ENUM_VALUE1) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_structure_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_structure_async.py new file mode 100644 index 0000000000..be39934d89 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_client_structure_async.py @@ -0,0 +1,62 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from client.structure.service.models import ClientType +from client.structure.service.aio import ServiceClient +from client.structure.multiclient.aio import ClientAClient, ClientBClient +from client.structure.renamedoperation.aio import RenamedOperationClient +from client.structure.twooperationgroup.aio import TwoOperationGroupClient + + +@pytest.mark.asyncio +async def test_structure_default(): + client = ServiceClient(endpoint="http://localhost:3000", client=ClientType.DEFAULT) + await client.one() + await client.two() + await client.foo.three() + await client.foo.four() + await client.bar.five() + await client.bar.six() + await client.baz.foo.seven() + await client.qux.eight() + await client.qux.bar.nine() + + +@pytest.mark.asyncio +async def test_structure_multiclient(): + client_a = ClientAClient(endpoint="http://localhost:3000", client=ClientType.MULTI_CLIENT) + await client_a.renamed_one() + await client_a.renamed_three() + await client_a.renamed_five() + + client_b = ClientBClient(endpoint="http://localhost:3000", client=ClientType.MULTI_CLIENT) + await client_b.renamed_two() + await client_b.renamed_four() + await client_b.renamed_six() + + +@pytest.mark.asyncio +async def test_structure_renamed_operation(): + client = RenamedOperationClient(endpoint="http://localhost:3000", client=ClientType.RENAMED_OPERATION) + await client.renamed_one() + await client.renamed_three() + await client.renamed_five() + + await client.group.renamed_two() + await client.group.renamed_four() + await client.group.renamed_six() + + +@pytest.mark.asyncio +async def test_structure_two_operation_group(): + client = TwoOperationGroupClient(endpoint="http://localhost:3000", client=ClientType.RENAMED_OPERATION) + await client.group1.one() + await client.group1.three() + await client.group1.four() + + await client.group2.two() + await client.group2.five() + await client.group2.six() diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_bytes_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_bytes_async.py new file mode 100644 index 0000000000..db7c3a30db --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_bytes_async.py @@ -0,0 +1,133 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from pathlib import Path +from encode.bytes.aio import BytesClient +from encode.bytes.models import ( + DefaultBytesProperty, + Base64urlBytesProperty, + Base64BytesProperty, + Base64urlArrayBytesProperty, +) + + +FILE_FOLDER = Path(__file__).parent.parent + + +@pytest.fixture +async def client(): + async with BytesClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_query(client: BytesClient): + await client.query.default( + value=bytes("test", "utf-8"), + ) + await client.query.base64( + value=bytes("test", "utf-8"), + ) + await client.query.base64_url( + value=bytes("test", "utf-8"), + ) + await client.query.base64_url_array( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + + +@pytest.mark.asyncio +async def test_property(client: BytesClient): + result = await client.property.default( + DefaultBytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = await client.property.base64( + Base64BytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = await client.property.base64_url( + Base64urlBytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = await client.property.base64_url_array( + Base64urlArrayBytesProperty( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + ) + assert result.value == [ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ] + + +@pytest.mark.asyncio +async def test_header(client: BytesClient): + await client.header.default( + value=bytes("test", "utf-8"), + ) + await client.header.base64( + value=bytes("test", "utf-8"), + ) + await client.header.base64_url( + value=bytes("test", "utf-8"), + ) + await client.header.base64_url_array( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + + +@pytest.fixture +def png_data() -> bytes: + with open(str(FILE_FOLDER / "data/image.png"), "rb") as file_in: + return file_in.read() + + +@pytest.mark.asyncio +async def test_request_body(client: BytesClient, png_data: bytes): + await client.request_body.default( + value=bytes("test", "utf-8"), + ) + await client.request_body.octet_stream( + value=png_data, + ) + await client.request_body.custom_content_type( + value=png_data, + ) + await client.request_body.base64( + value=bytes("test", "utf-8"), + ) + await client.request_body.base64_url( + value=bytes("test", "utf-8"), + ) + + +@pytest.mark.asyncio +async def test_response_body(client: BytesClient, png_data: bytes): + expected = b"test" + assert expected == await client.response_body.default() + assert expected == await client.response_body.base64() + assert expected == await client.response_body.base64_url() + assert b"".join([d async for d in (await client.response_body.octet_stream())]) == png_data + assert b"".join([d async for d in (await client.response_body.custom_content_type())]) == png_data diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_datetime_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_datetime_async.py new file mode 100644 index 0000000000..1249b8e8f3 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_datetime_async.py @@ -0,0 +1,127 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime + +import pytest +from encode.datetime.aio import DatetimeClient +from encode.datetime.models import ( + DefaultDatetimeProperty, + Rfc3339DatetimeProperty, + Rfc7231DatetimeProperty, + UnixTimestampDatetimeProperty, + UnixTimestampArrayDatetimeProperty, +) + + +@pytest.fixture +async def client(): + async with DatetimeClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_query(client: DatetimeClient): + await client.query.default( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.query.rfc3339( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.query.rfc7231( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.query.unix_timestamp( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + await client.query.unix_timestamp_array( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ], + ) + + +@pytest.mark.asyncio +async def test_property(client: DatetimeClient): + result = await client.property.default( + DefaultDatetimeProperty( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc) + + result = await client.property.rfc3339( + Rfc3339DatetimeProperty( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc) + + result = await client.property.rfc7231( + Rfc7231DatetimeProperty( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + + result = await client.property.unix_timestamp( + UnixTimestampDatetimeProperty( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc) + + result = await client.property.unix_timestamp_array( + UnixTimestampArrayDatetimeProperty( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ], + ) + ) + assert result.value == [ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ] + + +@pytest.mark.asyncio +async def test_header(client: DatetimeClient): + await client.header.default( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.header.rfc3339( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.header.rfc7231( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + await client.header.unix_timestamp( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + await client.header.unix_timestamp_array( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ] + ) + + +@pytest.mark.asyncio +async def test_response_header(client: DatetimeClient): + cls = lambda x, y, z: z + assert (await client.response_header.default(cls=cls))["value"] == datetime.datetime( + 2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc + ) + assert (await client.response_header.rfc3339(cls=cls))["value"] == datetime.datetime( + 2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc + ) + assert (await client.response_header.rfc7231(cls=cls))["value"] == datetime.datetime( + 2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc + ) + assert (await client.response_header.unix_timestamp(cls=cls))["value"] == datetime.datetime( + 2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_duration_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_duration_async.py new file mode 100644 index 0000000000..0fca037194 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_duration_async.py @@ -0,0 +1,63 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime + +import pytest +from encode.duration.aio import DurationClient +from encode.duration.models import ( + Int32SecondsDurationProperty, + ISO8601DurationProperty, + FloatSecondsDurationProperty, + DefaultDurationProperty, + FloatSecondsDurationArrayProperty, +) + + +@pytest.fixture +async def client(): + async with DurationClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_query(client: DurationClient): + await client.query.default(input=datetime.timedelta(days=40)) + await client.query.iso8601(input=datetime.timedelta(days=40)) + await client.query.int32_seconds(input=36) + await client.query.int32_seconds_array(input=[36, 47]) + await client.query.float_seconds(input=35.625) + await client.query.float64_seconds(input=35.625) + + +@pytest.mark.asyncio +async def test_property(client: DurationClient): + result = await client.property.default(DefaultDurationProperty(value=datetime.timedelta(days=40))) + assert result.value == datetime.timedelta(days=40) + result = await client.property.default(DefaultDurationProperty(value="P40D")) + assert result.value == datetime.timedelta(days=40) + result = await client.property.iso8601(ISO8601DurationProperty(value=datetime.timedelta(days=40))) + assert result.value == datetime.timedelta(days=40) + result = await client.property.iso8601(ISO8601DurationProperty(value="P40D")) + assert result.value == datetime.timedelta(days=40) + result = await client.property.int32_seconds(Int32SecondsDurationProperty(value=36)) + assert result.value == 36 + result = await client.property.float_seconds(FloatSecondsDurationProperty(value=35.625)) + assert abs(result.value - 35.625) < 0.0001 + result = await client.property.float64_seconds(FloatSecondsDurationProperty(value=35.625)) + assert abs(result.value - 35.625) < 0.0001 + result = await client.property.float_seconds_array(FloatSecondsDurationArrayProperty(value=[35.625, 46.75])) + assert abs(result.value[0] - 35.625) < 0.0001 + assert abs(result.value[1] - 46.75) < 0.0001 + + +@pytest.mark.asyncio +async def test_header(client: DurationClient): + await client.header.default(duration=datetime.timedelta(days=40)) + await client.header.iso8601(duration=datetime.timedelta(days=40)) + await client.header.iso8601_array(duration=[datetime.timedelta(days=40), datetime.timedelta(days=50)]) + await client.header.int32_seconds(duration=36) + await client.header.float_seconds(duration=35.625) + await client.header.float64_seconds(duration=35.625) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_numeric_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_numeric_async.py new file mode 100644 index 0000000000..64fcd4dfba --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_encode_numeric_async.py @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from encode.numeric.aio import NumericClient +from encode.numeric import models + + +@pytest.fixture +async def client(): + async with NumericClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_safeint_as_string(client: NumericClient): + result = await client.property.safeint_as_string(models.SafeintAsStringProperty(value=10000000000)) + assert result.value == 10000000000 + assert result["value"] == "10000000000" + + +@pytest.mark.asyncio +async def test_uint32_as_string_optional(client: NumericClient): + result = await client.property.uint32_as_string_optional(models.Uint32AsStringProperty(value=1)) + assert result.value == 1 + assert result["value"] == "1" diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_headasboolean_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_headasboolean_async.py new file mode 100644 index 0000000000..0ec3574286 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_headasboolean_async.py @@ -0,0 +1,35 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from headasbooleantrue.aio import VisibilityClient as HeadAsBooleanTrueClient +from headasbooleantrue import models as models_true + +from headasbooleanfalse.aio import VisibilityClient as HeadAsBooleanFalseClient +from headasbooleanfalse import models as models_false + + +@pytest.fixture +async def client_true(): + async with HeadAsBooleanTrueClient() as client: + yield client + + +@pytest.fixture +async def client_false(): + async with HeadAsBooleanFalseClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_head_true(client_true): + body = models_true.VisibilityModel(query_prop=123) + assert await client_true.head_model(body) == True + + +@pytest.mark.asyncio +async def test_head_false(client_false): + body = models_false.VisibilityModel(query_prop=123) + assert await client_false.head_model(body) is None diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_basic_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_basic_async.py new file mode 100644 index 0000000000..969e73a6c8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_basic_async.py @@ -0,0 +1,24 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.basic.aio import BasicClient +from parameters.basic.models import User + + +@pytest.fixture +async def client(): + async with BasicClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_explicit_simple(client: BasicClient): + await client.explicit_body.simple(User(name="foo")) + + +@pytest.mark.asyncio +async def test_implicit_simple(client: BasicClient): + await client.implicit_body.simple(name="foo") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_body_optionality_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_body_optionality_async.py new file mode 100644 index 0000000000..7c6bbe82c8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_body_optionality_async.py @@ -0,0 +1,30 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.bodyoptionality.aio import BodyOptionalityClient +from parameters.bodyoptionality.models import BodyModel + + +@pytest.fixture +async def client(): + async with BodyOptionalityClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_required_explicit(client: BodyOptionalityClient): + await client.required_explicit(BodyModel(name="foo")) + + +@pytest.mark.asyncio +async def test_required_implicit(client: BodyOptionalityClient): + await client.required_implicit(name="foo") + + +@pytest.mark.asyncio +async def test_optional_explicit(client: BodyOptionalityClient): + await client.optional_explicit.set(BodyModel(name="foo")) + await client.optional_explicit.omit() diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_collection_format_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_collection_format_async.py new file mode 100644 index 0000000000..b57a24eb4c --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_collection_format_async.py @@ -0,0 +1,44 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.collectionformat.aio import CollectionFormatClient + + +@pytest.fixture +async def client(): + async with CollectionFormatClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_query_multi(client: CollectionFormatClient): + await client.query.multi(colors=["blue", "red", "green"]) + + +@pytest.mark.asyncio +async def test_query_csv(client: CollectionFormatClient): + await client.query.csv(colors=["blue", "red", "green"]) + + +@pytest.mark.asyncio +async def test_query_pipes(client: CollectionFormatClient): + await client.query.pipes(colors=["blue", "red", "green"]) + + +@pytest.mark.asyncio +async def test_query_ssv(client: CollectionFormatClient): + await client.query.ssv(colors=["blue", "red", "green"]) + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="https://github.com/aio-libs/aiohttp/issues/5904") +async def test_query_tsv(client: CollectionFormatClient): + await client.query.tsv(colors=["blue", "red", "green"]) + + +@pytest.mark.asyncio +async def test_csv_header(client: CollectionFormatClient): + await client.header.csv(colors=["blue", "red", "green"]) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_spread_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_spread_async.py new file mode 100644 index 0000000000..28d811eade --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_parameters_spread_async.py @@ -0,0 +1,81 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.spread.aio import SpreadClient +from parameters.spread.models import BodyParameter + + +@pytest.fixture +async def client(): + async with SpreadClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_model_body(client: SpreadClient): + await client.model.spread_as_request_body(name="foo") + + +@pytest.mark.asyncio +async def test_model_composite_request_only_with_body(client: SpreadClient): + await client.model.spread_composite_request_only_with_body(BodyParameter(name="foo")) + + +@pytest.mark.asyncio +async def test_model_composite_request_without_body(client: SpreadClient): + await client.model.spread_composite_request_without_body(name="foo", test_header="bar") + + +@pytest.mark.asyncio +async def test_model_composite_request(client: SpreadClient): + await client.model.spread_composite_request(name="foo", body=BodyParameter(name="foo"), test_header="bar") + + +@pytest.mark.asyncio +async def test_model_composite_request_mix(client: SpreadClient): + await client.model.spread_composite_request_mix(name="foo", prop="foo", test_header="bar") + + +@pytest.mark.asyncio +async def test_alias_body(client: SpreadClient): + await client.alias.spread_as_request_body(name="foo") + + +@pytest.mark.asyncio +async def test_alias_parameter(client: SpreadClient): + await client.alias.spread_as_request_parameter("1", x_ms_test_header="bar", name="foo") + + +@pytest.mark.asyncio +async def test_alias_multiple_parameter(client: SpreadClient): + await client.alias.spread_with_multiple_parameters( + "1", + x_ms_test_header="bar", + required_string="foo", + required_int_list=[1, 2], + optional_string_list=["foo", "bar"], + optional_int=1, + ) + await client.alias.spread_with_multiple_parameters( + "1", + { + "requiredString": "foo", + "optionalInt": 1, + "requiredIntList": [1, 2], + "optionalStringList": ["foo", "bar"], + }, + x_ms_test_header="bar", + ) + + +@pytest.mark.asyncio +async def test_inner_model(client: SpreadClient): + await client.alias.spread_parameter_with_inner_model(id="1", x_ms_test_header="bar", body={"name": "foo"}) + + +@pytest.mark.asyncio +async def test_inner_alias(client: SpreadClient): + await client.alias.spread_parameter_with_inner_alias(id="1", x_ms_test_header="bar", body={"name": "foo", "age": 1}) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_content_negotiation_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_content_negotiation_async.py new file mode 100644 index 0000000000..4c2a11f61a --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_content_negotiation_async.py @@ -0,0 +1,37 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import base64 +import pytest +from payload.contentnegotiation.aio import ContentNegotiationClient +from payload.contentnegotiation.models import PngImageAsJson + + +@pytest.fixture +async def client(): + async with ContentNegotiationClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_get_avatar_as_png(client: ContentNegotiationClient, png_data: bytes): + assert b"".join([d async for d in (await client.same_body.get_avatar_as_png())]) == png_data + + +@pytest.mark.asyncio +async def test_get_avatar_as_jpeg(client: ContentNegotiationClient, jpg_data: bytes): + assert b"".join([d async for d in (await client.same_body.get_avatar_as_jpeg())]) == jpg_data + + +@pytest.mark.asyncio +async def test_different_body_get_avatar_as_png(client: ContentNegotiationClient, png_data: bytes): + assert b"".join([d async for d in (await client.different_body.get_avatar_as_png())]) == png_data + + +@pytest.mark.asyncio +async def test_different_body_get_avatar_as_json(client: ContentNegotiationClient, png_data: bytes): + result = await client.different_body.get_avatar_as_json() + expected = PngImageAsJson(content=base64.b64encode(png_data).decode()) + assert result == expected diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_json_merge_patch_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_json_merge_patch_async.py new file mode 100644 index 0000000000..601a64482b --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_json_merge_patch_async.py @@ -0,0 +1,98 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.jsonmergepatch.aio import JsonMergePatchClient +from payload.jsonmergepatch.models import InnerModel, Resource, ResourcePatch + +try: + from azure.core.serialization import NULL +except ImportError: + from corehttp.serialization import NULL + + +@pytest.fixture +async def client(): + async with JsonMergePatchClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_create_resource(client: JsonMergePatchClient): + inner_madge = InnerModel(name="InnerMadge", description="innerDesc") + create_resource = Resource( + name="Madge", + description="desc", + map={"key": inner_madge}, + array=[inner_madge], + int_value=1, + float_value=1.1, + inner_model=inner_madge, + int_array=[1, 2, 3], + ) + response = await client.create_resource(create_resource) + assert response == create_resource + + +@pytest.mark.asyncio +async def test_update_resource_model_input(client: JsonMergePatchClient): + update_resource = ResourcePatch( + description=NULL, + map={"key": InnerModel(description=NULL), "key2": NULL}, + array=NULL, + int_value=NULL, + float_value=NULL, + inner_model=NULL, + int_array=NULL, + ) + response = await client.update_resource(update_resource) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +@pytest.mark.asyncio +async def test_update_resource_raw_input(client: JsonMergePatchClient): + response = await client.update_resource( + { + "description": None, + "map": {"key": {"description": None}, "key2": None}, + "array": None, + "intValue": None, + "floatValue": None, + "innerModel": None, + "intArray": None, + } + ) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +@pytest.mark.asyncio +async def test_update_optional_resource_model_input(client: JsonMergePatchClient): + update_resource = ResourcePatch( + description=NULL, + map={"key": InnerModel(description=NULL), "key2": NULL}, + array=NULL, + int_value=NULL, + float_value=NULL, + inner_model=NULL, + int_array=NULL, + ) + response = await client.update_optional_resource(update_resource) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +@pytest.mark.asyncio +async def test_update_optional_resource_raw_input(client: JsonMergePatchClient): + response = await client.update_optional_resource( + { + "description": None, + "map": {"key": {"description": None}, "key2": None}, + "array": None, + "intValue": None, + "floatValue": None, + "innerModel": None, + "intArray": None, + } + ) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_media_type_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_media_type_async.py new file mode 100644 index 0000000000..d783e6ac61 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_media_type_async.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.mediatype.aio import MediaTypeClient + + +@pytest.fixture +async def client(): + async with MediaTypeClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_json(client: MediaTypeClient): + data = "foo" + await client.string_body.send_as_json(data) + assert await client.string_body.get_as_json() == data + + +@pytest.mark.asyncio +async def test_text(client: MediaTypeClient): + data = "{cat}" + await client.string_body.send_as_text(data) + assert await client.string_body.get_as_text() == data diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_multipart_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_multipart_async.py new file mode 100644 index 0000000000..3563b6d486 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_multipart_async.py @@ -0,0 +1,157 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from pathlib import Path +import pytest +from payload.multipart import models +from payload.multipart.aio import MultiPartClient + +JPG = Path(__file__).parent.parent / "data/image.jpg" +PNG = Path(__file__).parent.parent / "data/image.png" + + +@pytest.fixture +async def client(): + async with MultiPartClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_anonymous_model(client: MultiPartClient): + await client.form_data.anonymous_model({"profileImage": open(str(JPG), "rb")}) + + +@pytest.mark.asyncio +async def test_basic(client: MultiPartClient): + await client.form_data.basic( + models.MultiPartRequest( + id="123", + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_binary_array_parts(client: MultiPartClient): + await client.form_data.binary_array_parts( + models.BinaryArrayPartsRequest( + id="123", + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + ) + ) + + +@pytest.mark.asyncio +async def test_check_file_name_and_content_type(client: MultiPartClient): + await client.form_data.check_file_name_and_content_type( + models.MultiPartRequest( + id="123", + profile_image=("hello.jpg", open(str(JPG), "rb"), "image/jpg"), + ) + ) + + +@pytest.mark.asyncio +async def test_complex(client: MultiPartClient): + await client.form_data.file_array_and_basic( + models.ComplexPartsRequest( + id="123", + address=models.Address(city="X"), + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_json_part(client: MultiPartClient): + await client.form_data.json_part( + models.JsonPartRequest( + address=models.Address(city="X"), + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_multi_binary_parts(client: MultiPartClient): + await client.form_data.multi_binary_parts( + models.MultiBinaryPartsRequest( + profile_image=open(str(JPG), "rb"), + picture=open(str(PNG), "rb"), + ) + ) + await client.form_data.multi_binary_parts( + models.MultiBinaryPartsRequest( + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_file_with_http_part_specific_content_type(client: MultiPartClient): + await client.form_data.http_parts.content_type.image_jpeg_content_type( + models.FileWithHttpPartSpecificContentTypeRequest( + profile_image=("hello.jpg", open(str(JPG), "rb"), "image/jpg"), + ) + ) + + +@pytest.mark.asyncio +async def test_file_with_http_part_required_content_type(client: MultiPartClient): + await client.form_data.http_parts.content_type.required_content_type( + models.FileWithHttpPartRequiredContentTypeRequest( + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_file_with_http_part_optional_content_type(client: MultiPartClient): + # call twice: one with content type, one without + await client.form_data.http_parts.content_type.optional_content_type( + models.FileWithHttpPartOptionalContentTypeRequest( + profile_image=("hello.jpg", open(str(JPG), "rb").read()), + ) + ) + await client.form_data.http_parts.content_type.optional_content_type( + models.FileWithHttpPartOptionalContentTypeRequest( + profile_image=( + "hello.jpg", + open(str(JPG), "rb").read(), + "application/octet-stream", + ), + ) + ) + + +@pytest.mark.asyncio +async def test_complex_with_http_part(client: MultiPartClient): + await client.form_data.http_parts.json_array_and_file_array( + models.ComplexHttpPartsModelRequest( + id="123", + previous_addresses=[ + models.Address(city="Y"), + models.Address(city="Z"), + ], + address=models.Address(city="X"), + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + profile_image=open(str(JPG), "rb"), + ) + ) + + +@pytest.mark.asyncio +async def test_http_parts_non_string_float(client: MultiPartClient): + await client.form_data.http_parts.non_string.float(models.FloatRequest(temperature=0.5)) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_pageable_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_pageable_async.py new file mode 100644 index 0000000000..b3b9c57c80 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_payload_pageable_async.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.pageable.aio import PageableClient + + +@pytest.fixture +async def client(): + async with PageableClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_list(client: PageableClient): + result = [p async for p in client.list(maxpagesize=3)] + assert len(result) == 4 diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_resiliency_srv_driven_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_resiliency_srv_driven_async.py new file mode 100644 index 0000000000..a74e6805fb --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_resiliency_srv_driven_async.py @@ -0,0 +1,128 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from resiliency.srv.driven1.aio import ResiliencyServiceDrivenClient as V1Client +from resiliency.srv.driven2.aio import ResiliencyServiceDrivenClient as V2Client + + +def get_v1_client(service_deployment_version: str, api_version: str = "v1") -> V1Client: + return V1Client( + endpoint="http://localhost:3000", + service_deployment_version=service_deployment_version, + api_version=api_version, + ) + + +def get_v2_client(service_deployment_version: str, api_version: str = "v2") -> V2Client: + return V2Client( + endpoint="http://localhost:3000", + service_deployment_version=service_deployment_version, + api_version=api_version, + ) + + +@pytest.mark.asyncio +async def test_add_optional_param_from_none(): + # old client to old service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + await client.from_none() + + # old client to new service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_none() + + # new client to new service with api version v1 + async with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + await client.from_none() + + # new client to new service with api version v2 + async with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_none(new_parameter="new") + + +@pytest.mark.asyncio +async def test_add_optional_param_from_one_required(): + # old client to old service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + await client.from_one_required(parameter="required") + + # old client to new service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_one_required(parameter="required") + + # new client to new service with api version v1 + async with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + await client.from_one_required(parameter="required") + + # new client to new service with api version v2 + async with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_one_required(parameter="required", new_parameter="new") + + +@pytest.mark.asyncio +async def test_add_optional_param_from_one_optional(): + # old client to old service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + await client.from_one_optional(parameter="optional") + + # old client to new service with api version v1 + async with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_one_optional(parameter="optional") + + # new client to new service with api version v1 + async with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + await client.from_one_optional(parameter="optional") + + # new client to new service with api version v2 + async with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.from_one_optional(parameter="optional", new_parameter="new") + + +@pytest.mark.asyncio +async def test_break_the_glass(core_library): + request = core_library.rest.HttpRequest(method="DELETE", url="/add-operation") + async with V1Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v2", + ) as client: + response = await client.send_request(request) + response.raise_for_status() + + +@pytest.mark.asyncio +async def test_add_operation(): + async with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + await client.add_operation() + + +@pytest.mark.parametrize( + "func_name, params", + [ + ("from_none", {"new_parameter": "new"}), + ("from_one_optional", {"parameter": "optional", "new_parameter": "new"}), + ("from_one_required", {"parameter": "required", "new_parameter": "new"}), + ("add_operation", {}), + ], +) +@pytest.mark.asyncio +async def test_new_client_with_old_apiversion_call_new_parameter(func_name, params): + client = get_v2_client(service_deployment_version="v2", api_version="v1") + with pytest.raises(ValueError) as ex: + await getattr(client, func_name)(**params) + assert "is not available in API version" in str(ex.value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_serialization_encoded_name_json_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_serialization_encoded_name_json_async.py new file mode 100644 index 0000000000..70bfcc77ad --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_serialization_encoded_name_json_async.py @@ -0,0 +1,24 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from serialization.encodedname.json.aio import JsonClient +from serialization.encodedname.json import models + + +@pytest.fixture +async def client(): + async with JsonClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_property_send(client: JsonClient): + await client.property.send(models.JsonEncodedNameModel(default_name=True)) + + +@pytest.mark.asyncio +async def test_property_get(client: JsonClient): + assert (await client.property.get()).default_name diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_endpoint_not_defined_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_endpoint_not_defined_async.py new file mode 100644 index 0000000000..148e61dd5c --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_endpoint_not_defined_async.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.endpoint.notdefined.aio import NotDefinedClient + + +@pytest.fixture +async def client(): + async with NotDefinedClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_valid(client: NotDefinedClient): + assert await client.valid() is True diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_multiple_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_multiple_async.py new file mode 100644 index 0000000000..bcdc53dd4f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_multiple_async.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.path.multiple.aio import MultipleClient + + +@pytest.fixture +async def client(): + async with MultipleClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_no_operation_params(client: MultipleClient): + # await client.no_operation_params() + pass + + +@pytest.mark.asyncio +async def test_with_operation_path_param(client: MultipleClient): + # await client.with_operation_path_param(keyword="test") + pass diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_single_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_single_async.py new file mode 100644 index 0000000000..d6ebe479f8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_path_single_async.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.path.single.aio import SingleClient + + +@pytest.fixture +async def client(): + async with SingleClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_my_op(client): + assert await client.my_op() is True diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_not_versioned_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_not_versioned_async.py new file mode 100644 index 0000000000..cc17b0f3f2 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_not_versioned_async.py @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.versions.notversioned.aio import NotVersionedClient + + +@pytest.fixture +async def client(): + async with NotVersionedClient(endpoint="http://localhost:3000", api_version="v1.0") as client: + yield client + + +@pytest.mark.asyncio +async def test_without_api_version(client: NotVersionedClient): + await client.without_api_version() + + +@pytest.mark.asyncio +async def test_with_query_api_version(client: NotVersionedClient): + await client.with_query_api_version() + + +@pytest.mark.asyncio +async def test_with_path_api_version(client: NotVersionedClient): + await client.with_path_api_version() diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_versioned_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_versioned_async.py new file mode 100644 index 0000000000..53e7d194f4 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_server_versions_versioned_async.py @@ -0,0 +1,34 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.versions.versioned.aio import VersionedClient + + +@pytest.fixture +async def client(): + async with VersionedClient(endpoint="http://localhost:3000") as client: + yield client + + +@pytest.mark.asyncio +async def test_without_api_version(client: VersionedClient): + await client.without_api_version() + + +@pytest.mark.asyncio +async def test_with_query_api_version(client: VersionedClient): + await client.with_query_api_version() + + +@pytest.mark.asyncio +async def test_with_path_api_version(client: VersionedClient): + await client.with_path_api_version() + + +@pytest.mark.asyncio +async def test_with_query_old_api_version(): + async with VersionedClient(endpoint="http://localhost:3000", api_version="2021-01-01-preview") as client: + await client.with_query_old_api_version() diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_conditional_request_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_conditional_request_async.py new file mode 100644 index 0000000000..e65b9d4e33 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_conditional_request_async.py @@ -0,0 +1,38 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +import datetime +from specialheaders.conditionalrequest.aio import ConditionalRequestClient + + +@pytest.fixture +async def client(): + async with ConditionalRequestClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_post_if_match(core_library, client: ConditionalRequestClient): + await client.post_if_match(etag="valid", match_condition=core_library.MatchConditions.IfNotModified) + + +@pytest.mark.asyncio +async def test_post_if_none_match(core_library, client: ConditionalRequestClient): + await client.post_if_none_match(etag="invalid", match_condition=core_library.MatchConditions.IfModified) + + +@pytest.mark.asyncio +async def test_head_if_modified_since(client: ConditionalRequestClient): + await client.head_if_modified_since( + if_modified_since=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + ) + + +@pytest.mark.asyncio +async def test_post_if_unmodified_since(client: ConditionalRequestClient): + await client.post_if_unmodified_since( + if_unmodified_since=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_repeatability_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_repeatability_async.py new file mode 100644 index 0000000000..dbf74414e1 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_headers_repeatability_async.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specialheaders.repeatability.aio import RepeatabilityClient + + +@pytest.fixture +async def client(): + async with RepeatabilityClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_immediate_success(client: RepeatabilityClient): + cls = lambda x, y, z: z + assert (await client.immediate_success(cls=cls))["Repeatability-Result"] == "accepted" diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_words_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_words_async.py new file mode 100644 index 0000000000..52cc99289e --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_special_words_async.py @@ -0,0 +1,42 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specialwords.aio import SpecialWordsClient +from specialwords import models + + +@pytest.fixture +async def client(): + async with SpecialWordsClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_operations(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "_method" + await getattr(client.operations, sw + suffix)() + + +@pytest.mark.asyncio +async def test_parameter(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "_parameter" + await getattr(client.parameters, "with_" + sw)(**{sw + suffix: "ok"}) + await client.parameters.with_cancellation_token(cancellation_token="ok") + + +@pytest.mark.asyncio +async def test_model(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "Model" + model = getattr(models, sw.capitalize() + suffix) + await getattr(client.models, "with_" + sw)(model(name="ok")) + + +@pytest.mark.asyncio +async def test_model_properties(client: SpecialWordsClient): + await client.model_properties.same_as_model(models.SameAsModel(same_as_model="ok")) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_array_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_array_async.py new file mode 100644 index 0000000000..26573428b2 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_array_async.py @@ -0,0 +1,118 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import pytest +import isodate +from typetest.array.aio import ArrayClient +from typetest.array import models + + +@pytest.fixture +async def client(): + async with ArrayClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_boolean_value(client: ArrayClient): + assert await client.boolean_value.get() == [True, False] + await client.boolean_value.put([True, False]) + + +@pytest.mark.asyncio +async def test_datetime_value(client: ArrayClient): + assert await client.datetime_value.get() == [isodate.parse_datetime("2022-08-26T18:38:00Z")] + await client.datetime_value.put([isodate.parse_datetime("2022-08-26T18:38:00Z")]) + + +@pytest.mark.asyncio +async def test_duration_value(client: ArrayClient): + assert await client.duration_value.get() == [isodate.parse_duration("P123DT22H14M12.011S")] + await client.duration_value.put([isodate.parse_duration("P123DT22H14M12.011S")]) + + +@pytest.mark.asyncio +async def test_float32_value(client: ArrayClient): + assert await client.float32_value.get() == [43.125] + await client.float32_value.put([43.125]) + + +@pytest.mark.asyncio +async def test_int32_value(client: ArrayClient): + assert await client.int32_value.get() == [1, 2] + await client.int32_value.put([1, 2]) + + +@pytest.mark.asyncio +async def test_int64_value(client: ArrayClient): + assert await client.int64_value.get() == [2**53 - 1, -(2**53 - 1)] + await client.int64_value.put([2**53 - 1, -(2**53 - 1)]) + + +@pytest.mark.asyncio +async def test_model_value(client: ArrayClient): + assert await client.model_value.get() == [ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ] + await client.model_value.put( + [ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ] + ) + + +@pytest.mark.asyncio +async def test_nullable_boolean_value(client: ArrayClient): + assert await client.nullable_boolean_value.get() == [True, None, False] + await client.nullable_boolean_value.put([True, None, False]) + + +@pytest.mark.asyncio +async def test_nullable_float_value(client: ArrayClient): + assert await client.nullable_float_value.get() == [1.25, None, 3.0] + await client.nullable_float_value.put([1.25, None, 3.0]) + + +@pytest.mark.asyncio +async def test_nullable_int32_value(client: ArrayClient): + assert await client.nullable_int32_value.get() == [1, None, 3] + await client.nullable_int32_value.put([1, None, 3]) + + +@pytest.mark.asyncio +async def test_nullable_model_value(client: ArrayClient): + assert await client.nullable_model_value.get() == [ + models.InnerModel(property="hello"), + None, + models.InnerModel(property="world"), + ] + await client.nullable_model_value.put( + [ + models.InnerModel(property="hello"), + None, + models.InnerModel(property="world"), + ] + ) + + +@pytest.mark.asyncio +async def test_nullable_string_value(client: ArrayClient): + assert await client.nullable_string_value.get() == ["hello", None, "world"] + await client.nullable_string_value.put(["hello", None, "world"]) + + +@pytest.mark.asyncio +async def test_string_value(client: ArrayClient): + assert await client.string_value.get() == ["hello", ""] + await client.string_value.put(["hello", ""]) + + +@pytest.mark.asyncio +async def test_unknown_value(client: ArrayClient): + assert await client.unknown_value.get() == [1, "hello", None] + await client.unknown_value.put([1, "hello", None]) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_dictionary_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_dictionary_async.py new file mode 100644 index 0000000000..9549207957 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_dictionary_async.py @@ -0,0 +1,101 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.dictionary import models +from typetest.dictionary.aio import DictionaryClient +import isodate + + +@pytest.fixture +async def client(): + async with DictionaryClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_boolean_value(client: DictionaryClient): + value = {"k1": True, "k2": False} + assert await client.boolean_value.get() == value + await client.boolean_value.put(value) + + +@pytest.mark.asyncio +async def test_datetime_value(client: DictionaryClient): + value = {"k1": isodate.parse_datetime("2022-08-26T18:38:00Z")} + assert await client.datetime_value.get() == value + await client.datetime_value.put(value) + + +@pytest.mark.asyncio +async def test_duration_value(client: DictionaryClient): + value = {"k1": isodate.parse_duration("P123DT22H14M12.011S")} + assert await client.duration_value.get() == value + await client.duration_value.put(value) + + +@pytest.mark.asyncio +async def test_float32_value(client: DictionaryClient): + value = {"k1": 43.125} + assert await client.float32_value.get() == value + await client.float32_value.put(value) + + +@pytest.mark.asyncio +async def test_int32_value(client: DictionaryClient): + value = {"k1": 1, "k2": 2} + assert await client.int32_value.get() == value + await client.int32_value.put(value) + + +@pytest.mark.asyncio +async def test_int64_value(client: DictionaryClient): + value = {"k1": 2**53 - 1, "k2": -(2**53 - 1)} + assert await client.int64_value.get() == value + await client.int64_value.put(value) + + +@pytest.mark.asyncio +async def test_model_value(client: DictionaryClient): + value = { + "k1": models.InnerModel(property="hello"), + "k2": models.InnerModel(property="world"), + } + assert await client.model_value.get() == value + await client.model_value.put(value) + + +@pytest.mark.asyncio +async def test_nullable_float_value(client: DictionaryClient): + value = {"k1": 1.25, "k2": 0.5, "k3": None} + assert await client.nullable_float_value.get() == value + await client.nullable_float_value.put(value) + + +@pytest.mark.asyncio +async def test_recursive_model_value(client: DictionaryClient): + value = { + "k1": models.InnerModel(property="hello", children={}), + "k2": models.InnerModel( + property="world", + children={"k2.1": models.InnerModel(property="inner world")}, + ), + } + assert await client.recursive_model_value.get() == value + await client.recursive_model_value.put(value) + + +@pytest.mark.asyncio +async def test_string_value(client: DictionaryClient): + value = {"k1": "hello", "k2": ""} + assert await client.string_value.get() == value + await client.string_value.put(value) + + +@pytest.mark.asyncio +async def test_unknown_value(client: DictionaryClient): + value = {"k1": 1, "k2": "hello", "k3": None} + assert await client.unknown_value.get() == value + await client.unknown_value.put(value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_extensible_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_extensible_async.py new file mode 100644 index 0000000000..75fca822dd --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_extensible_async.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.enum.extensible import models, aio + + +@pytest.fixture +async def client(): + async with aio.ExtensibleClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_known_value(client): + assert await client.string.get_known_value() == models.DaysOfWeekExtensibleEnum.MONDAY + await client.string.put_known_value(models.DaysOfWeekExtensibleEnum.MONDAY) + + +@pytest.mark.asyncio +async def test_unknown_value(client): + assert await client.string.get_unknown_value() == "Weekend" + await client.string.put_unknown_value("Weekend") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_fixed_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_fixed_async.py new file mode 100644 index 0000000000..de17f194b6 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_enum_fixed_async.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.enum.fixed import aio, models + + +@pytest.fixture +async def client(): + async with aio.FixedClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_known_value(client): + assert await client.string.get_known_value() == models.DaysOfWeekEnum.MONDAY + await client.string.put_known_value(models.DaysOfWeekEnum.MONDAY) + + +@pytest.mark.asyncio +async def test_unknown_value(client: aio.FixedClient, core_library): + try: + await client.string.put_unknown_value("Weekend") + except core_library.exceptions.HttpResponseError as err: + assert err.status_code == 500 diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_empty_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_empty_async.py new file mode 100644 index 0000000000..b5518c5e90 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_empty_async.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.empty.aio import EmptyClient +from typetest.model.empty.models import EmptyInput, EmptyOutput, EmptyInputOutput + + +@pytest.fixture +async def client(): + async with EmptyClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_put(client: EmptyClient): + await client.put_empty(EmptyInput()) + await client.put_empty({}) + + +@pytest.mark.asyncio +async def test_get(client: EmptyClient): + assert await client.get_empty() == EmptyOutput() + assert await client.get_empty() == {} + + +@pytest.mark.asyncio +async def test_post_round(client: EmptyClient): + assert await client.post_round_trip_empty(EmptyInputOutput()) == EmptyInputOutput() + assert await client.post_round_trip_empty({}) == {} diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_enum_discriminator_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_enum_discriminator_async.py new file mode 100644 index 0000000000..0a72d5465e --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_enum_discriminator_async.py @@ -0,0 +1,70 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.enumdiscriminator.aio import EnumDiscriminatorClient +from typetest.model.enumdiscriminator import models + + +@pytest.fixture +async def client(): + async with EnumDiscriminatorClient() as client: + yield client + + +@pytest.fixture +def valid_body(): + return models.Golden(weight=10) + + +@pytest.fixture +def valid_fixed_body(): + return models.Cobra(length=10) + + +@pytest.mark.asyncio +async def test_get_extensible_model(client: EnumDiscriminatorClient, valid_body: models.Dog): + assert await client.get_extensible_model() == valid_body + assert isinstance(await client.get_extensible_model(), models.Golden) + + +@pytest.mark.asyncio +async def test_put_extensible_model(client: EnumDiscriminatorClient, valid_body: models.Dog): + await client.put_extensible_model(valid_body) + + +@pytest.mark.asyncio +async def test_get_extensible_model_missing_discriminator( + client: EnumDiscriminatorClient, +): + assert await client.get_extensible_model_missing_discriminator() == models.Dog(weight=10) + + +@pytest.mark.asyncio +async def test_get_extensible_model_wrong_discriminator( + client: EnumDiscriminatorClient, +): + assert await client.get_extensible_model_wrong_discriminator() == models.Dog(weight=8, kind="wrongKind") + + +@pytest.mark.asyncio +async def test_get_fixed_model(client: EnumDiscriminatorClient, valid_fixed_body: models.Snake): + assert await client.get_fixed_model() == valid_fixed_body + assert isinstance(await client.get_fixed_model(), models.Cobra) + + +@pytest.mark.asyncio +async def test_put_fixed_model(client: EnumDiscriminatorClient, valid_fixed_body: models.Snake): + await client.put_fixed_model(valid_fixed_body) + + +@pytest.mark.asyncio +async def test_get_fixed_model_missing_discriminator(client: EnumDiscriminatorClient): + assert await client.get_fixed_model_missing_discriminator() == models.Snake(length=10) + + +@pytest.mark.asyncio +async def test_get_fixed_model_wrong_discriminator(client: EnumDiscriminatorClient): + assert await client.get_fixed_model_wrong_discriminator() == models.Snake(length=8, kind="wrongKind") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_nested_discriminator_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_nested_discriminator_async.py new file mode 100644 index 0000000000..c641f25eb8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_nested_discriminator_async.py @@ -0,0 +1,85 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.nesteddiscriminator.aio import NestedDiscriminatorClient +from typetest.model.nesteddiscriminator.models import GoblinShark, Salmon, Fish + + +@pytest.fixture +async def client(): + async with NestedDiscriminatorClient() as client: + yield client + + +@pytest.fixture +async def valid_body(): + return GoblinShark(age=1) + + +@pytest.mark.asyncio +async def test_get_model(client, valid_body): + assert await client.get_model() == valid_body + assert isinstance(await client.get_model(), GoblinShark) + + +@pytest.mark.asyncio +async def test_put_model(client, valid_body): + await client.put_model(valid_body) + + +@pytest.fixture +async def valid_recursive_body(): + return Salmon( + { + "age": 1, + "kind": "salmon", + "partner": {"age": 2, "kind": "shark", "sharktype": "saw"}, + "friends": [ + { + "age": 2, + "kind": "salmon", + "partner": {"age": 3, "kind": "salmon"}, + "hate": { + "key1": {"age": 4, "kind": "salmon"}, + "key2": {"age": 2, "kind": "shark", "sharktype": "goblin"}, + }, + }, + {"age": 3, "kind": "shark", "sharktype": "goblin"}, + ], + "hate": { + "key3": {"age": 3, "kind": "shark", "sharktype": "saw"}, + "key4": { + "age": 2, + "kind": "salmon", + "friends": [ + {"age": 1, "kind": "salmon"}, + {"age": 4, "kind": "shark", "sharktype": "goblin"}, + ], + }, + }, + } + ) + + +@pytest.mark.asyncio +async def test_get_recursive_model(client, valid_recursive_body): + assert valid_recursive_body == await client.get_recursive_model() + assert isinstance(await client.get_recursive_model(), Salmon) + + +@pytest.mark.asyncio +async def test_put_recursive_model(client, valid_recursive_body): + await client.put_recursive_model(valid_recursive_body) + + +@pytest.mark.asyncio +async def test_get_missing_discriminator(client): + assert await client.get_missing_discriminator() == Fish(age=1) + + +@pytest.mark.asyncio +async def test_get_wrong_discriminator(client): + assert await client.get_wrong_discriminator() == Fish(age=1, kind="wrongKind") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_not_discriminated_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_not_discriminated_async.py new file mode 100644 index 0000000000..7e7ce09695 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_not_discriminated_async.py @@ -0,0 +1,34 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.notdiscriminated.aio import NotDiscriminatedClient +from typetest.model.notdiscriminated.models import Siamese + + +@pytest.fixture +async def client(): + async with NotDiscriminatedClient() as client: + yield client + + +@pytest.fixture +async def valid_body(): + return Siamese(name="abc", age=32, smart=True) + + +@pytest.mark.asyncio +async def test_get_valid(client, valid_body): + assert await client.get_valid() == valid_body + + +@pytest.mark.asyncio +async def test_post_valid(client, valid_body): + await client.post_valid(valid_body) + + +@pytest.mark.asyncio +async def test_put_valid(client, valid_body): + assert valid_body == await client.put_valid(valid_body) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_recursive_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_recursive_async.py new file mode 100644 index 0000000000..aea6f1bb9e --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_recursive_async.py @@ -0,0 +1,34 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.recursive.aio import RecursiveClient +from typetest.model.recursive.models import Extension + + +@pytest.fixture +async def client(): + async with RecursiveClient() as client: + yield client + + +@pytest.fixture +async def expected(): + return Extension( + { + "level": 0, + "extension": [{"level": 1, "extension": [{"level": 2}]}, {"level": 1}], + } + ) + + +@pytest.mark.asyncio +async def test_put(client: RecursiveClient, expected: Extension): + await client.put(expected) + + +@pytest.mark.asyncio +async def test_get(client: RecursiveClient, expected: Extension): + assert await client.get() == expected diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_single_discriminator_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_single_discriminator_async.py new file mode 100644 index 0000000000..dc98cd81c9 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_inheritance_single_discriminator_async.py @@ -0,0 +1,67 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.singlediscriminator.aio import SingleDiscriminatorClient +from typetest.model.singlediscriminator.models import Sparrow, Eagle, Bird, Dinosaur + + +@pytest.fixture +async def client(): + async with SingleDiscriminatorClient() as client: + yield client + + +@pytest.fixture +async def valid_body(): + return Sparrow(wingspan=1) + + +@pytest.mark.asyncio +async def test_get_model(client, valid_body): + assert await client.get_model() == valid_body + + +@pytest.mark.asyncio +async def test_put_model(client, valid_body): + await client.put_model(valid_body) + + +@pytest.fixture +async def recursive_body(): + return Eagle( + { + "wingspan": 5, + "kind": "eagle", + "partner": {"wingspan": 2, "kind": "goose"}, + "friends": [{"wingspan": 2, "kind": "seagull"}], + "hate": {"key3": {"wingspan": 1, "kind": "sparrow"}}, + } + ) + + +@pytest.mark.asyncio +async def test_get_recursive_model(client, recursive_body): + assert await client.get_recursive_model() == recursive_body + + +@pytest.mark.asyncio +async def test_put_recursive_model(client, recursive_body): + await client.put_recursive_model(recursive_body) + + +@pytest.mark.asyncio +async def test_get_missing_discriminator(client): + assert await client.get_missing_discriminator() == Bird(wingspan=1) + + +@pytest.mark.asyncio +async def test_get_wrong_discriminator(client): + assert await client.get_wrong_discriminator() == Bird(wingspan=1, kind="wrongKind") + + +@pytest.mark.asyncio +async def test_get_legacy_model(client): + assert await client.get_legacy_model() == Dinosaur(size=20, kind="t-rex") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_usage_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_usage_async.py new file mode 100644 index 0000000000..bcebeaec20 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_usage_async.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.usage import models +from typetest.model.usage.aio import UsageClient + + +@pytest.fixture +async def client(): + async with UsageClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_input(client: UsageClient): + input = models.InputRecord(required_prop="example-value") + assert await client.input(input) is None + + +@pytest.mark.asyncio +async def test_output(client: UsageClient): + output = models.OutputRecord(required_prop="example-value") + assert output == await client.output() + + +@pytest.mark.asyncio +async def test_input_and_output(client: UsageClient): + input_output = models.InputOutputRecord(required_prop="example-value") + assert input_output == await client.input_and_output(input_output) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_visibility_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_visibility_async.py new file mode 100644 index 0000000000..e73d7f3030 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_model_visibility_async.py @@ -0,0 +1,47 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.visibility.aio import VisibilityClient +from typetest.model.visibility import models + + +@pytest.fixture +async def client(): + async with VisibilityClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_get_model(client): + result = await client.get_model(models.VisibilityModel(query_prop=123)) + assert result == models.VisibilityModel(read_prop="abc") + + +@pytest.mark.asyncio +async def test_put_model(client): + await client.put_model(models.VisibilityModel(create_prop=["foo", "bar"], update_prop=[1, 2])) + + +@pytest.mark.asyncio +async def test_patch_model(client): + await client.patch_model(models.VisibilityModel(update_prop=[1, 2])) + + +@pytest.mark.asyncio +async def test_post_model(client): + await client.post_model(models.VisibilityModel(create_prop=["foo", "bar"])) + + +@pytest.mark.asyncio +async def test_delete_model(client): + await client.delete_model(models.VisibilityModel(delete_prop=True)) + + +@pytest.mark.asyncio +async def test_put_read_only_model(client): + await client.put_read_only_model( + models.ReadOnlyModel(optional_nullable_int_list=[1, 2], optional_string_record={"foo", "bar"}) + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_additionalproperties_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_additionalproperties_async.py new file mode 100644 index 0000000000..bd773f6a47 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_additionalproperties_async.py @@ -0,0 +1,352 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.property.additionalproperties import models +from typetest.property.additionalproperties.aio import AdditionalPropertiesClient + + +@pytest.fixture +async def client(): + async with AdditionalPropertiesClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_extends_different_spread_float(client: AdditionalPropertiesClient): + body = models.DifferentSpreadFloatDerived({"name": "abc", "prop": 43.125, "derivedProp": 43.125}) + assert await client.extends_different_spread_float.get() == body + await client.extends_different_spread_float.put(body) + + +@pytest.mark.asyncio +async def test_extends_different_spread_model(client: AdditionalPropertiesClient): + body = models.DifferentSpreadModelDerived( + {"knownProp": "abc", "prop": {"state": "ok"}, "derivedProp": {"state": "ok"}} + ) + assert await client.extends_different_spread_model.get() == body + await client.extends_different_spread_model.put(body) + + +@pytest.mark.asyncio +async def test_extends_different_spread_model_array(client: AdditionalPropertiesClient): + body = models.DifferentSpreadModelArrayDerived( + { + "knownProp": "abc", + "prop": [{"state": "ok"}, {"state": "ok"}], + "derivedProp": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert await client.extends_different_spread_model_array.get() == body + await client.extends_different_spread_model_array.put(body) + + +@pytest.mark.asyncio +async def test_extends_different_spread_string(client: AdditionalPropertiesClient): + body = models.DifferentSpreadStringDerived({"id": 43.125, "prop": "abc", "derivedProp": "abc"}) + assert await client.extends_different_spread_string.get() == body + await client.extends_different_spread_string.put(body) + + +@pytest.mark.asyncio +async def test_extends_float(client: AdditionalPropertiesClient): + body = models.ExtendsFloatAdditionalProperties({"id": 43.125, "prop": 43.125}) + assert await client.extends_float.get() == body + await client.extends_float.put(body) + + +@pytest.mark.asyncio +async def test_extends_model(client: AdditionalPropertiesClient): + body = models.ExtendsModelAdditionalProperties({"knownProp": {"state": "ok"}, "prop": {"state": "ok"}}) + assert await client.extends_model.get() == body + await client.extends_model.put(body) + + +@pytest.mark.asyncio +async def test_extends_model_array(client: AdditionalPropertiesClient): + body = models.ExtendsModelArrayAdditionalProperties( + { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert await client.extends_model_array.get() == body + await client.extends_model_array.put(body) + + +@pytest.mark.asyncio +async def test_extends_string(client: AdditionalPropertiesClient): + body = models.ExtendsStringAdditionalProperties({"name": "ExtendsStringAdditionalProperties", "prop": "abc"}) + assert await client.extends_string.get() == body + await client.extends_string.put(body) + + +@pytest.mark.asyncio +async def test_extends_unknown(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalProperties( + { + "name": "ExtendsUnknownAdditionalProperties", + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.extends_unknown.get() == body + await client.extends_unknown.put(body) + + +@pytest.mark.asyncio +async def test_extends_unknown_derived(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalPropertiesDerived( + { + "name": "ExtendsUnknownAdditionalProperties", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.extends_unknown_derived.get() == body + await client.extends_unknown_derived.put(body) + + +@pytest.mark.asyncio +async def test_extends_unknown_discriminated(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalPropertiesDiscriminatedDerived( + { + "kind": "derived", + "name": "Derived", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.extends_unknown_discriminated.get() == body + await client.extends_unknown_discriminated.put(body) + + +@pytest.mark.asyncio +async def test_is_float(client: AdditionalPropertiesClient): + body = models.IsFloatAdditionalProperties({"id": 43.125, "prop": 43.125}) + assert await client.is_float.get() == body + await client.is_float.put(body) + + +@pytest.mark.asyncio +async def test_is_model(client: AdditionalPropertiesClient): + body = models.IsModelAdditionalProperties({"knownProp": {"state": "ok"}, "prop": {"state": "ok"}}) + assert await client.is_model.get() == body + await client.is_model.put(body) + + +@pytest.mark.asyncio +async def test_is_model_array(client: AdditionalPropertiesClient): + body = models.IsModelArrayAdditionalProperties( + { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert await client.is_model_array.get() == body + await client.is_model_array.put(body) + + +@pytest.mark.asyncio +async def test_is_string(client: AdditionalPropertiesClient): + body = models.IsStringAdditionalProperties({"name": "IsStringAdditionalProperties", "prop": "abc"}) + assert await client.is_string.get() == body + await client.is_string.put(body) + + +@pytest.mark.asyncio +async def test_is_unknown(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalProperties( + { + "name": "IsUnknownAdditionalProperties", + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.is_unknown.get() == body + await client.is_unknown.put(body) + + +@pytest.mark.asyncio +async def test_is_unknown_derived(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalPropertiesDerived( + { + "name": "IsUnknownAdditionalProperties", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.is_unknown_derived.get() == body + await client.is_unknown_derived.put(body) + + +@pytest.mark.asyncio +async def test_is_unknown_discriminated(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalPropertiesDiscriminatedDerived( + { + "kind": "derived", + "name": "Derived", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert await client.is_unknown_discriminated.get() == body + await client.is_unknown_discriminated.put(body) + + +@pytest.mark.asyncio +async def test_multiple_spread(client: AdditionalPropertiesClient): + body = {"flag": True, "prop1": "abc", "prop2": 43.125} + assert await client.multiple_spread.get() == body + await client.multiple_spread.put(body) + + +@pytest.mark.asyncio +async def test_spread_different_float(client: AdditionalPropertiesClient): + body = {"name": "abc", "prop": 43.125} + assert await client.spread_different_float.get() == body + await client.spread_different_float.put(body) + + +@pytest.mark.asyncio +async def test_spread_different_model(client: AdditionalPropertiesClient): + body = {"knownProp": "abc", "prop": {"state": "ok"}} + assert await client.spread_different_model.get() == body + await client.spread_different_model.put(body) + + +@pytest.mark.asyncio +async def test_spread_different_model_array(client: AdditionalPropertiesClient): + body = {"knownProp": "abc", "prop": [{"state": "ok"}, {"state": "ok"}]} + assert await client.spread_different_model_array.get() == body + await client.spread_different_model_array.put(body) + + +@pytest.mark.asyncio +async def test_spread_different_string(client: AdditionalPropertiesClient): + body = {"id": 43.125, "prop": "abc"} + assert await client.spread_different_string.get() == body + await client.spread_different_string.put(body) + + +@pytest.mark.asyncio +async def test_spread_model(client: AdditionalPropertiesClient): + body = {"knownProp": {"state": "ok"}, "prop": {"state": "ok"}} + assert await client.spread_model.get() == body + await client.spread_model.put(body) + + +@pytest.mark.asyncio +async def test_spread_model_array(client: AdditionalPropertiesClient): + body = { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + assert await client.spread_model_array.get() == body + await client.spread_model_array.put(body) + + +@pytest.mark.asyncio +async def test_spread_record_discriminated_union(client: AdditionalPropertiesClient): + body = { + "name": "abc", + "prop1": {"fooProp": "abc", "kind": "kind0"}, + "prop2": { + "end": "2021-01-02T00:00:00Z", + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + }, + } + assert await client.spread_record_discriminated_union.get() == body + await client.spread_record_discriminated_union.put(body) + + +@pytest.mark.asyncio +async def test_spread_record_non_discriminated_union( + client: AdditionalPropertiesClient, +): + body = { + "name": "abc", + "prop1": {"kind": "kind0", "fooProp": "abc"}, + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert await client.spread_record_non_discriminated_union.get() == body + await client.spread_record_non_discriminated_union.put(body) + + +@pytest.mark.asyncio +async def test_spread_record_non_discriminated_union2( + client: AdditionalPropertiesClient, +): + body = { + "name": "abc", + "prop1": {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert await client.spread_record_non_discriminated_union2.get() == body + await client.spread_record_non_discriminated_union2.put(body) + + +@pytest.mark.asyncio +async def test_spread_record_non_discriminated_union3( + client: AdditionalPropertiesClient, +): + body = { + "name": "abc", + "prop1": [ + {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + ], + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert await client.spread_record_non_discriminated_union3.get() == body + await client.spread_record_non_discriminated_union3.put(body) + + +@pytest.mark.asyncio +async def test_spread_record_union(client: AdditionalPropertiesClient): + body = {"flag": True, "prop1": "abc", "prop2": 43.125} + assert await client.spread_record_union.get() == body + await client.spread_record_union.put(body) + + +@pytest.mark.asyncio +async def test_spread_string(client: AdditionalPropertiesClient): + body = {"name": "SpreadSpringRecord", "prop": "abc"} + assert await client.spread_string.get() == body + await client.spread_string.put(body) + + +@pytest.mark.asyncio +async def test_spread_float(client: AdditionalPropertiesClient): + body = {"id": 43.125, "prop": 43.125} + assert await client.spread_float.get() == body + await client.spread_float.put(body) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_nullable_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_nullable_async.py new file mode 100644 index 0000000000..77c6aed8b4 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_nullable_async.py @@ -0,0 +1,110 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import json +import pytest +from typetest.property.nullable import models +from typetest.property.nullable.aio import NullableClient +from typetest.property.nullable._model_base import ( # pylint: disable=protected-access + SdkJSONEncoder, +) + +try: + from corehttp.serialization import NULL +except ImportError: + from azure.core.serialization import NULL + + +@pytest.fixture +async def client(): + async with NullableClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_bytes(client: NullableClient): + non_null_model = models.BytesProperty(required_property="foo", nullable_property="aGVsbG8sIHdvcmxkIQ==") + non_model = models.BytesProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.bytes.get_non_null() == non_null_model + assert (await client.bytes.get_null())["nullableProperty"] is None + await client.bytes.patch_non_null(body=non_null_model) + await client.bytes.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_collections_byte(client: NullableClient): + non_null_model = models.CollectionsByteProperty( + required_property="foo", + nullable_property=["aGVsbG8sIHdvcmxkIQ==", "aGVsbG8sIHdvcmxkIQ=="], + ) + non_model = models.CollectionsByteProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.collections_byte.get_non_null() == non_null_model + assert (await client.collections_byte.get_null())["nullableProperty"] is None + await client.collections_byte.patch_non_null(body=non_null_model) + await client.collections_byte.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_collections_model(client: NullableClient): + non_null_model = models.CollectionsModelProperty( + required_property="foo", + nullable_property=[ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ], + ) + non_model = models.CollectionsModelProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.collections_model.get_non_null() == non_null_model + assert (await client.collections_model.get_null())["nullableProperty"] is None + await client.collections_model.patch_non_null(body=non_null_model) + await client.collections_model.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_collections_string(client: NullableClient): + non_null_model = models.CollectionsStringProperty(required_property="foo", nullable_property=["hello", "world"]) + non_model = models.CollectionsStringProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.collections_string.get_non_null() == non_null_model + assert (await client.collections_string.get_null())["nullableProperty"] is None + await client.collections_string.patch_non_null(body=non_null_model) + await client.collections_string.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_datetime(client: NullableClient): + non_null_model = models.DatetimeProperty(required_property="foo", nullable_property="2022-08-26T18:38:00Z") + non_model = models.DatetimeProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.datetime.get_non_null() == non_null_model + assert (await client.datetime.get_null())["nullableProperty"] is None + await client.datetime.patch_non_null(body=non_null_model) + await client.datetime.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_duration(client: NullableClient): + non_null_model = models.DurationProperty(required_property="foo", nullable_property="P123DT22H14M12.011S") + non_model = models.DurationProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.duration.get_non_null() == non_null_model + assert (await client.duration.get_null())["nullableProperty"] is None + await client.duration.patch_non_null(body=non_null_model) + await client.duration.patch_null(body=non_model) + + +@pytest.mark.asyncio +async def test_string(client: NullableClient): + non_null_model = models.StringProperty(required_property="foo", nullable_property="hello") + non_model = models.StringProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert await client.string.get_non_null() == non_null_model + assert (await client.string.get_null())["nullableProperty"] is None + await client.string.patch_non_null(body=non_null_model) + await client.string.patch_null(body=non_model) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_optional_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_optional_async.py new file mode 100644 index 0000000000..e7ec09d005 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_optional_async.py @@ -0,0 +1,197 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any +import pytest +from typetest.property.optional import models +from typetest.property.optional.aio import OptionalClient + + +@pytest.fixture +async def client(): + async with OptionalClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_boolean_literal(client): + body = models.BooleanLiteralProperty(property=True) + assert await client.boolean_literal.get_all() == body + assert await client.boolean_literal.get_default() == models.BooleanLiteralProperty() + await client.boolean_literal.put_all(body) + await client.boolean_literal.put_default(models.BooleanLiteralProperty()) + + +@pytest.mark.asyncio +async def test_bytes(client): + body = models.BytesProperty(property="aGVsbG8sIHdvcmxkIQ==") + assert await client.bytes.get_all() == body + assert await client.bytes.get_default() == models.BytesProperty() + await client.bytes.put_all(body) + await client.bytes.put_default(models.BytesProperty()) + + +@pytest.mark.asyncio +async def test_collections_byte(client): + body = models.CollectionsByteProperty(property=["aGVsbG8sIHdvcmxkIQ==", "aGVsbG8sIHdvcmxkIQ=="]) + assert await client.collections_byte.get_all() == body + assert await client.collections_byte.get_default() == models.CollectionsByteProperty() + await client.collections_byte.put_all(body) + await client.collections_byte.put_default(models.CollectionsByteProperty()) + + +@pytest.mark.asyncio +async def test_collections_model(client): + body = models.CollectionsModelProperty( + property=[ + models.StringProperty(property="hello"), + models.StringProperty(property="world"), + ] + ) + assert await client.collections_model.get_all() == body + assert await client.collections_model.get_default() == models.CollectionsModelProperty() + await client.collections_model.put_all(body) + await client.collections_model.put_default(models.CollectionsModelProperty()) + + +@pytest.mark.asyncio +async def test_datetime(client): + body = models.DatetimeProperty(property="2022-08-26T18:38:00Z") + assert await client.datetime.get_all() == body + assert await client.datetime.get_default() == models.DatetimeProperty() + await client.datetime.put_all(body) + await client.datetime.put_default(models.DatetimeProperty()) + + +@pytest.mark.asyncio +async def test_duration(client): + body = models.DurationProperty(property="P123DT22H14M12.011S") + assert await client.duration.get_all() == body + assert await client.duration.get_default() == models.DurationProperty() + await client.duration.put_all(body) + await client.duration.put_default(models.DurationProperty()) + + +@pytest.mark.asyncio +async def test_float_literal(client): + body = models.FloatLiteralProperty(property=1.25) + assert await client.float_literal.get_all() == body + assert await client.float_literal.get_default() == models.FloatLiteralProperty() + await client.float_literal.put_all(body) + await client.float_literal.put_default(models.FloatLiteralProperty()) + + +@pytest.mark.asyncio +async def test_int_literal(client): + body = models.IntLiteralProperty(property=1) + assert await client.int_literal.get_all() == body + assert await client.int_literal.get_default() == models.IntLiteralProperty() + await client.int_literal.put_all(body) + await client.int_literal.put_default(models.IntLiteralProperty()) + + +@pytest.mark.asyncio +async def test_plaindate(client): + body = models.PlainDateProperty(property="2022-12-12") + assert await client.plain_date.get_all() == body + + +@pytest.mark.asyncio +async def test_plaindate(client): + assert await client.plain_date.get_default() == models.PlainDateProperty() + + +@pytest.mark.asyncio +async def test_plaindate(client): + body = models.PlainDateProperty(property="2022-12-12") + await client.plain_date.put_all(body) + + +@pytest.mark.asyncio +async def test_plaindate(client): + await client.plain_date.put_default(models.PlainDateProperty()) + + +@pytest.mark.asyncio +async def test_plaintime(client): + body = models.PlainTimeProperty(property="13:06:12") + assert await client.plain_time.get_all() == body + + +@pytest.mark.asyncio +async def test_plaintime(client): + assert await client.plain_time.get_default() == models.PlainTimeProperty() + + +@pytest.mark.asyncio +async def test_plaintime(client): + body = models.PlainTimeProperty(property="13:06:12") + await client.plain_time.put_all(body) + + +@pytest.mark.asyncio +async def test_plaintime(client): + await client.plain_time.put_default(models.PlainTimeProperty()) + + +@pytest.mark.asyncio +async def test_required_and_optional(client): + all_body = { + "optionalProperty": "hello", + "requiredProperty": 42, + } + required_only_body = { + "requiredProperty": 42, + } + assert await client.required_and_optional.get_all() == all_body + assert await client.required_and_optional.get_required_only() == required_only_body + await client.required_and_optional.put_all(all_body) + await client.required_and_optional.put_required_only(required_only_body) + + +@pytest.mark.asyncio +async def test_string(client): + body = models.StringProperty(property="hello") + assert await client.string.get_all() == body + assert await client.string.get_default() == models.StringProperty() + await client.string.put_all(body) + await client.string.put_default(models.StringProperty()) + + +@pytest.mark.asyncio +async def test_string_literal(client): + body = models.StringLiteralProperty(property="hello") + assert await client.string_literal.get_all() == body + assert await client.string_literal.get_default() == models.StringLiteralProperty() + await client.string_literal.put_all(body) + await client.string_literal.put_default(models.StringLiteralProperty()) + + +@pytest.mark.asyncio +async def test_union_float_literal(client): + body = models.UnionFloatLiteralProperty(property=2.375) + assert await client.union_float_literal.get_all() == body + assert await client.union_float_literal.get_default() == models.UnionFloatLiteralProperty() + await client.union_float_literal.put_all(body) + await client.union_float_literal.put_default(models.UnionFloatLiteralProperty()) + + +@pytest.mark.asyncio +async def test_union_int_literal(client): + body = models.UnionIntLiteralProperty(property=2) + assert await client.union_int_literal.get_all() == body + assert await client.union_int_literal.get_default() == models.UnionIntLiteralProperty() + await client.union_int_literal.put_all(body) + await client.union_int_literal.put_default(models.UnionIntLiteralProperty()) + + +@pytest.mark.asyncio +async def test_union_string_literal(client): + body = models.UnionStringLiteralProperty(property="world") + assert await client.union_string_literal.get_all() == body + assert await client.union_string_literal.get_default() == models.UnionStringLiteralProperty() + await client.union_string_literal.put_all(body) + await client.union_string_literal.put_default(models.UnionStringLiteralProperty()) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_valuetypes_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_valuetypes_async.py new file mode 100644 index 0000000000..1b7566c573 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_property_valuetypes_async.py @@ -0,0 +1,315 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import decimal + +import pytest +import datetime +from typetest.property.valuetypes import models +from typetest.property.valuetypes.aio import ValueTypesClient + + +@pytest.fixture +async def client(): + async with ValueTypesClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_boolean(client: ValueTypesClient): + body = models.BooleanProperty(property=True) + assert body.property == body["property"] + await client.boolean.put(body) + + resp = await client.boolean.get() + assert resp.property == resp["property"] == True + + +@pytest.mark.asyncio +async def test_boolean_literal(client: ValueTypesClient): + body = models.BooleanLiteralProperty(property=True) + assert body.property == body["property"] + await client.boolean_literal.put(body) + + resp = await client.boolean_literal.get() + assert resp.property == resp["property"] == True + + +@pytest.mark.asyncio +async def test_bytes(client: ValueTypesClient): + body = models.BytesProperty(property=b"hello, world!") + assert body.property == b"hello, world!" + assert body["property"] == "aGVsbG8sIHdvcmxkIQ==" + await client.bytes.put(body) + + resp = await client.bytes.get() + assert resp.property == b"hello, world!" + assert resp["property"] == "aGVsbG8sIHdvcmxkIQ==" + + +@pytest.mark.asyncio +async def test_collections_int(client: ValueTypesClient): + body = models.CollectionsIntProperty(property=[1, 2]) + assert body.property == body["property"] + await client.collections_int.put(body) + + resp = await client.collections_int.get() + assert resp.property == resp["property"] == [1, 2] + + +@pytest.mark.asyncio +async def test_collections_model(client: ValueTypesClient): + body = models.CollectionsModelProperty(property=[{"property": "hello"}, {"property": "world"}]) + assert body.property[0].property == body["property"][0]["property"] + await client.collections_model.put(body) + + resp = await client.collections_model.get() + assert resp.property[1].property == resp["property"][1]["property"] + + +@pytest.mark.asyncio +async def test_collections_string(client: ValueTypesClient): + body = models.CollectionsStringProperty(property=["hello", "world"]) + assert body.property == body["property"] + await client.collections_string.put(body) + + resp = await client.collections_string.get() + assert resp.property == resp["property"] == ["hello", "world"] + + +@pytest.mark.asyncio +async def test_datetime(client): + received_body = await client.datetime.get() + assert received_body == {"property": "2022-08-26T18:38:00Z"} + assert received_body.property.year == 2022 + assert received_body.property.month == 8 + assert received_body.property.day == 26 + assert received_body.property.hour == 18 + assert received_body.property.minute == 38 + + await client.datetime.put(models.DatetimeProperty(property=datetime.datetime(2022, 8, 26, hour=18, minute=38))) + + +@pytest.mark.asyncio +async def test_decimal(client: ValueTypesClient): + body = models.DecimalProperty(property=decimal.Decimal("0.33333")) + assert body.property == decimal.Decimal("0.33333") + assert body["property"] == 0.33333 + await client.decimal.put(body) + + resp = await client.decimal.get() + assert resp.property == decimal.Decimal("0.33333") + assert resp["property"] == 0.33333 + + +@pytest.mark.asyncio +async def test_decimal128(client: ValueTypesClient): + body = models.Decimal128Property(property=decimal.Decimal("0.33333")) + assert body.property == decimal.Decimal("0.33333") + assert body["property"] == 0.33333 + await client.decimal128.put(body) + + resp = await client.decimal128.get() + assert resp.property == decimal.Decimal("0.33333") + assert resp["property"] == 0.33333 + + +@pytest.mark.asyncio +async def test_dictionary_string(client: ValueTypesClient): + body = models.DictionaryStringProperty(property={"k1": "hello", "k2": "world"}) + assert body.property == body["property"] + await client.dictionary_string.put(body) + + resp = await client.dictionary_string.get() + assert resp.property == resp["property"] == {"k1": "hello", "k2": "world"} + + +@pytest.mark.asyncio +async def test_duration(client: ValueTypesClient): + body = models.DurationProperty(property="P123DT22H14M12.011S") + assert body.property == datetime.timedelta(days=123, seconds=80052, microseconds=11000) + assert body["property"] == "P123DT22H14M12.011S" + await client.duration.put(body) + + resp = await client.duration.get() + assert resp.property == datetime.timedelta(days=123, seconds=80052, microseconds=11000) + assert resp["property"] == "P123DT22H14M12.011S" + + +@pytest.mark.asyncio +async def test_enum(client: ValueTypesClient): + body = models.EnumProperty(property=models.InnerEnum.VALUE_ONE) + assert body.property == body["property"] + await client.enum.put(body) + + resp = await client.enum.get() + assert resp.property == resp["property"] == "ValueOne" + + +@pytest.mark.asyncio +async def test_extensible_enum(client: ValueTypesClient): + body = models.ExtensibleEnumProperty(property="UnknownValue") + assert body.property == body["property"] + await client.extensible_enum.put(body) + + resp = await client.extensible_enum.get() + assert resp.property == resp["property"] == "UnknownValue" + + +@pytest.mark.asyncio +async def test_float(client: ValueTypesClient): + body = models.FloatProperty(property=43.125) + assert body.property == body["property"] + await client.float.put(body) + + resp = await client.float.get() + assert resp.property == resp["property"] == 43.125 + + +@pytest.mark.asyncio +async def test_float_literal(client: ValueTypesClient): + body = models.FloatLiteralProperty(property=43.125) + assert body.property == body["property"] + await client.float_literal.put(body) + + resp = await client.float_literal.get() + assert resp.property == resp["property"] == 43.125 + + +@pytest.mark.asyncio +async def test_int(client: ValueTypesClient): + body = models.IntProperty(property=42) + assert body.property == body["property"] + await client.int_operations.put(body) + + resp = await client.int_operations.get() + assert resp.property == resp["property"] == 42 + + +@pytest.mark.asyncio +async def test_int_literal(client: ValueTypesClient): + body = models.IntLiteralProperty(property=42) + assert body.property == body["property"] + await client.int_literal.put(body) + + resp = await client.int_literal.get() + assert resp.property == resp["property"] == 42 + + +@pytest.mark.asyncio +async def test_model(client: ValueTypesClient): + body = models.ModelProperty(property={"property": "hello"}) + assert body.property.property == body["property"]["property"] + await client.model.put(body) + + resp = await client.model.get() + assert resp.property.property == resp["property"]["property"] + + +@pytest.mark.asyncio +async def test_never(client: ValueTypesClient): + assert await client.never.get() == models.NeverProperty() + await client.never.put(models.NeverProperty()) + + +@pytest.mark.asyncio +async def test_string(client: ValueTypesClient): + body = models.StringProperty(property="hello") + assert body.property == body["property"] + await client.string.put(body) + + resp = await client.string.get() + assert resp.property == resp["property"] == "hello" + + +@pytest.mark.asyncio +async def test_string_literal(client: ValueTypesClient): + body = models.StringLiteralProperty(property="hello") + assert body.property == body["property"] + await client.string_literal.put(body) + + resp = await client.string_literal.get() + assert resp.property == resp["property"] == "hello" + + +@pytest.mark.asyncio +async def test_union_enum_value(client: ValueTypesClient): + body = models.UnionEnumValueProperty(property=models.ExtendedEnum.ENUM_VALUE2) + assert body.property == body["property"] + await client.union_enum_value.put(body) + + resp = await client.union_enum_value.get() + assert resp.property == resp["property"] == "value2" + + +@pytest.mark.asyncio +async def test_union_float_literal(client: ValueTypesClient): + body = models.UnionFloatLiteralProperty(property=46.875) + assert body.property == body["property"] + await client.union_float_literal.put(body) + + resp = await client.union_float_literal.get() + assert resp.property == resp["property"] == 46.875 + + +@pytest.mark.asyncio +async def test_union_int_literal(client: ValueTypesClient): + body = models.UnionIntLiteralProperty(property=42) + assert body.property == body["property"] + await client.union_int_literal.put(body) + + resp = await client.union_int_literal.get() + assert resp.property == resp["property"] == 42 + + +@pytest.mark.asyncio +async def test_union_string_literal(client: ValueTypesClient): + body = models.UnionStringLiteralProperty(property="world") + assert body.property == body["property"] + await client.union_string_literal.put(body) + + resp = await client.union_string_literal.get() + assert resp.property == resp["property"] == "world" + + +@pytest.mark.asyncio +async def test_unknown_array(client: ValueTypesClient): + body = models.UnknownArrayProperty(property=["hello", "world"]) + assert body.property == body["property"] + await client.unknown_array.put(body) + + resp = await client.unknown_array.get() + assert resp.property == resp["property"] == ["hello", "world"] + + +@pytest.mark.asyncio +async def test_unknown_dict(client: ValueTypesClient): + body = models.UnknownDictProperty(property={"k1": "hello", "k2": 42}) + assert body.property == body["property"] + await client.unknown_dict.put(body) + + resp = await client.unknown_dict.get() + assert resp.property == resp["property"] == {"k1": "hello", "k2": 42} + + +@pytest.mark.asyncio +async def test_unknown_int(client: ValueTypesClient): + body = models.UnknownIntProperty(property=42) + assert body.property == body["property"] + await client.unknown_int.put(body) + + resp = await client.unknown_int.get() + assert resp.property == resp["property"] == 42 + + +@pytest.mark.asyncio +async def test_unknown_string(client: ValueTypesClient): + body = models.UnknownStringProperty(property="hello") + assert body.property == body["property"] + await client.unknown_string.put(body) + + resp = await client.unknown_string.get() + assert resp.property == resp["property"] == "hello" diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_scalar_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_scalar_async.py new file mode 100644 index 0000000000..3e2b308f8b --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_scalar_async.py @@ -0,0 +1,60 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import decimal +from functools import reduce + +import pytest +from typetest.scalar.aio import ScalarClient + + +@pytest.fixture +async def client(): + async with ScalarClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_scalar_string(client: ScalarClient): + assert await client.string.get() == "test" + await client.string.put("test") + + +@pytest.mark.asyncio +async def test_scalar_boolean(client: ScalarClient): + assert await client.boolean.get() == True + await client.boolean.put(True) + + +@pytest.mark.asyncio +async def test_scalar_unknown(client: ScalarClient): + assert await client.unknown.get() == "test" + await client.unknown.put("test") + + +@pytest.mark.asyncio +async def test_decimal128_type(client: ScalarClient): + assert await client.decimal128_type.response_body() == decimal.Decimal("0.33333") + await client.decimal128_type.request_body(decimal.Decimal("0.33333")) + await client.decimal128_type.request_parameter(value=decimal.Decimal("0.33333")) + + +@pytest.mark.asyncio +async def test_decimal_type(client: ScalarClient): + assert await client.decimal_type.response_body() == decimal.Decimal("0.33333") + await client.decimal_type.request_body(decimal.Decimal("0.33333")) + await client.decimal_type.request_parameter(value=decimal.Decimal("0.33333")) + + +@pytest.mark.asyncio +async def test_decimal128_verify(client: ScalarClient): + prepare = await client.decimal128_verify.prepare_verify() + await client.decimal128_verify.verify(reduce(lambda x, y: x + y, prepare)) + + +@pytest.mark.asyncio +async def test_decimal_verify(client: ScalarClient): + prepare = await client.decimal_verify.prepare_verify() + await client.decimal_verify.verify(reduce(lambda x, y: x + y, prepare)) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_union_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_union_async.py new file mode 100644 index 0000000000..9e738701f4 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_typetest_union_async.py @@ -0,0 +1,90 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.union.aio import UnionClient +from typetest.union import models + + +@pytest.fixture +async def client(): + async with UnionClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_enums_only(client: UnionClient): + value = models.EnumsOnlyCases(lr="right", ud="up") + assert (await client.enums_only.get()) == {"prop": value} + await client.enums_only.send(prop=value) + + +@pytest.mark.asyncio +async def test_floats_only(client: UnionClient): + value = 2.2 + assert (await client.floats_only.get()) == {"prop": value} + await client.floats_only.send(prop=value) + + +@pytest.mark.asyncio +async def test_ints_only(client: UnionClient): + value = 2 + assert (await client.ints_only.get()) == {"prop": value} + await client.ints_only.send(prop=value) + + +@pytest.mark.asyncio +async def test_mixed_literals(client: UnionClient): + value = models.MixedLiteralsCases(string_literal="a", int_literal=2, float_literal=3.3, boolean_literal=True) + assert (await client.mixed_literals.get()) == {"prop": value} + await client.mixed_literals.send(prop=value) + + +@pytest.mark.asyncio +async def test_mixed_types(client: UnionClient): + value = models.MixedTypesCases( + model=models.Cat(name="test"), + literal="a", + int_property=2, + boolean=True, + array=[models.Cat(name="test"), "a", 2, True], + ) + assert (await client.mixed_types.get()) == {"prop": value} + await client.mixed_types.send(prop=value) + + +@pytest.mark.asyncio +async def test_models_only(client: UnionClient): + value = models.Cat(name="test") + assert (await client.models_only.get()) == {"prop": value} + await client.models_only.send(prop=value) + + +@pytest.mark.asyncio +async def test_string_and_array(client: UnionClient): + value = models.StringAndArrayCases(string="test", array=["test1", "test2"]) + assert (await client.string_and_array.get()) == {"prop": value} + await client.string_and_array.send(prop=value) + + +@pytest.mark.asyncio +async def test_string_extensible(client: UnionClient): + value = "custom" + assert (await client.string_extensible.get()) == {"prop": value} + await client.string_extensible.send(prop=value) + + +@pytest.mark.asyncio +async def test_string_extensible_named(client: UnionClient): + value = "custom" + assert (await client.string_extensible_named.get()) == {"prop": value} + await client.string_extensible_named.send(prop=value) + + +@pytest.mark.asyncio +async def test_strings_only(client: UnionClient): + value = "b" + assert (await client.strings_only.get()) == {"prop": value} + await client.strings_only.send(prop=value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_added_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_added_async.py new file mode 100644 index 0000000000..27e78a8892 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_added_async.py @@ -0,0 +1,36 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.added.aio import AddedClient +from versioning.added.models import ModelV1, ModelV2, EnumV1, EnumV2 + + +@pytest.fixture +async def client(): + async with AddedClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test_v1(client: AddedClient): + assert await client.v1( + ModelV1(prop="foo", enum_prop=EnumV1.ENUM_MEMBER_V2, union_prop=10), + header_v2="bar", + ) == ModelV1(prop="foo", enum_prop=EnumV1.ENUM_MEMBER_V2, union_prop=10) + + +@pytest.mark.asyncio +async def test_v2(client: AddedClient): + assert await client.v2(ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar")) == ModelV2( + prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar" + ) + + +@pytest.mark.asyncio +async def test_interface_v2(client: AddedClient): + assert await client.interface_v2.v2_in_interface( + ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar") + ) == ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_made_optional_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_made_optional_async.py new file mode 100644 index 0000000000..27ec811cae --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_made_optional_async.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.madeoptional.aio import MadeOptionalClient +from versioning.madeoptional.models import TestModel + + +@pytest.fixture +async def client(): + async with MadeOptionalClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test(client: MadeOptionalClient): + assert await client.test( + TestModel(prop="foo"), + ) == TestModel(prop="foo") diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_removed_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_removed_async.py new file mode 100644 index 0000000000..1ec75c5229 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_removed_async.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.removed.aio import RemovedClient +from versioning.removed.models import ModelV2, EnumV2 + + +@pytest.fixture +async def client(): + async with RemovedClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test_v2(client: RemovedClient): + assert await client.v2(ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER_V2, union_prop="bar")) == ModelV2( + prop="foo", enum_prop=EnumV2.ENUM_MEMBER_V2, union_prop="bar" + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_renamed_from_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_renamed_from_async.py new file mode 100644 index 0000000000..46ef588f65 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_renamed_from_async.py @@ -0,0 +1,29 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.renamedfrom.aio import RenamedFromClient +from versioning.renamedfrom.models import NewModel, NewEnum + + +@pytest.fixture +async def client(): + async with RenamedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test_new_op(client: RenamedFromClient): + assert await client.new_op( + NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10), + new_query="bar", + ) == NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) + + +@pytest.mark.asyncio +async def test_new_interface_test(client: RenamedFromClient): + assert await client.new_interface.new_op_in_new_interface( + NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) + ) == NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_return_type_changed_from_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_return_type_changed_from_async.py new file mode 100644 index 0000000000..80eefecbe2 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_return_type_changed_from_async.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.returntypechangedfrom.aio import ReturnTypeChangedFromClient + + +@pytest.fixture +async def client(): + async with ReturnTypeChangedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test(client: ReturnTypeChangedFromClient): + assert await client.test("test") == "test" diff --git a/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_type_changed_from_async.py b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_type_changed_from_async.py new file mode 100644 index 0000000000..2374bd1f55 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/asynctests/test_versioning_type_changed_from_async.py @@ -0,0 +1,22 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.typechangedfrom.aio import TypeChangedFromClient +from versioning.typechangedfrom.models import TestModel + + +@pytest.fixture +async def client(): + async with TypeChangedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +@pytest.mark.asyncio +async def test(client: TypeChangedFromClient): + assert await client.test( + TestModel(prop="foo", changed_prop="bar"), + param="baz", + ) == TestModel(prop="foo", changed_prop="bar") diff --git a/packages/http-client-python/test/generic_mock_api_tests/conftest.py b/packages/http-client-python/test/generic_mock_api_tests/conftest.py new file mode 100644 index 0000000000..53eb50e2bb --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/conftest.py @@ -0,0 +1,113 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import os +import subprocess +import signal +import pytest +import importlib +from pathlib import Path +from typing import List + +FILE_FOLDER = Path(__file__).parent + + +def start_server_process(): + path = Path(os.path.dirname(__file__)) / Path("../../node_modules/@azure-tools/cadl-ranch-specs") + os.chdir(path.resolve()) + cmd = "cadl-ranch serve ./http" + if os.name == "nt": + return subprocess.Popen(cmd, shell=True) + return subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid) + + +def terminate_server_process(process): + if os.name == "nt": + process.kill() + else: + os.killpg(os.getpgid(process.pid), signal.SIGTERM) # Send the signal to all the process groups + + +@pytest.fixture(scope="session", autouse=True) +def testserver(): + """Start cadl ranch mock api tests""" + server = start_server_process() + yield + terminate_server_process(server) + + +""" +Use to disambiguate the core library we use +""" + + +@pytest.fixture +def core_library(): + try: + return importlib.import_module("azure.core") + except ModuleNotFoundError: + return importlib.import_module("corehttp") + + +@pytest.fixture +def key_credential(core_library): + try: + return core_library.credentials.AzureKeyCredential + except AttributeError: + return core_library.credentials.ServiceKeyCredential + + +SPECIAL_WORDS = [ + "and", + "as", + "assert", + "async", + "await", + "break", + "class", + "constructor", + "continue", + "def", + "del", + "elif", + "else", + "except", + "exec", + "finally", + "for", + "from", + "global", + "if", + "import", + "in", + "is", + "lambda", + "not", + "or", + "pass", + "raise", + "return", + "try", + "while", + "with", + "yield", +] + + +@pytest.fixture +def special_words() -> List[str]: + return SPECIAL_WORDS + + +@pytest.fixture +def png_data() -> bytes: + with open(str(FILE_FOLDER / "data/image.png"), "rb") as file_in: + return file_in.read() + + +@pytest.fixture +def jpg_data() -> bytes: + with open(str(FILE_FOLDER / "data/image.jpg"), "rb") as file_in: + return file_in.read() diff --git a/packages/http-client-python/test/generic_mock_api_tests/data/image.jpg b/packages/http-client-python/test/generic_mock_api_tests/data/image.jpg new file mode 100644 index 0000000000..b95b3e7b58 Binary files /dev/null and b/packages/http-client-python/test/generic_mock_api_tests/data/image.jpg differ diff --git a/packages/http-client-python/test/generic_mock_api_tests/data/image.png b/packages/http-client-python/test/generic_mock_api_tests/data/image.png new file mode 100644 index 0000000000..42fe8dc145 Binary files /dev/null and b/packages/http-client-python/test/generic_mock_api_tests/data/image.png differ diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_authentication.py b/packages/http-client-python/test/generic_mock_api_tests/test_authentication.py new file mode 100644 index 0000000000..079bb38b1e --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_authentication.py @@ -0,0 +1,113 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from authentication.apikey import ApiKeyClient +from authentication.oauth2 import OAuth2Client +from authentication.union import UnionClient +from authentication.http.custom import CustomClient + + +# Utilities functions + + +@pytest.fixture +def api_key_client(key_credential): + client = None + + def _build_client(client_type): + client = client_type(key_credential("valid-key")) + return client + + yield _build_client + if client: + client.close() + + +@pytest.fixture +def token_credential(core_library): + class FakeCredential: + @staticmethod + def get_token(*scopes): + return core_library.credentials.AccessToken(token="".join(scopes), expires_on=1800) + + return FakeCredential() + + +@pytest.fixture +def oauth2_client(token_credential): + client = None + + def _build_client(client_type): + client = client_type(token_credential) + return client + + yield _build_client + if client: + client.close() + + +@pytest.fixture +def http_custom_client(key_credential): + client = None + + def _build_client(): + client = CustomClient(key_credential("valid-key")) + return client + + yield _build_client + if client: + client.close() + + +# Tests + + +def test_api_key_valid(api_key_client): + client = api_key_client(ApiKeyClient) + client.valid() + + +def test_api_key_invalid(api_key_client, core_library): + client = api_key_client(ApiKeyClient) + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + client.invalid() + assert ex.value.status_code == 403 + assert ex.value.reason == "Forbidden" + + +def test_oauth2_valid(oauth2_client): + client = oauth2_client(OAuth2Client) + client.valid(enforce_https=False) + + +def test_oauth2_invalid(oauth2_client, core_library): + client = oauth2_client(OAuth2Client) + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + client.invalid(enforce_https=False) + assert ex.value.status_code == 403 + + +def test_union_keyvalid(api_key_client): + client = api_key_client(UnionClient) + client.valid_key() + + +def test_union_tokenvalid(oauth2_client): + client = oauth2_client(UnionClient) + client.valid_token(enforce_https=False) + + +def test_http_custom_valid(http_custom_client): + client = http_custom_client() + client.valid() + + +def test_http_custom_invalid(http_custom_client, core_library): + client = http_custom_client() + with pytest.raises(core_library.exceptions.HttpResponseError) as ex: + client.invalid() + assert ex.value.status_code == 403 + assert ex.value.reason == "Forbidden" diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_client_naming.py b/packages/http-client-python/test/generic_mock_api_tests/test_client_naming.py new file mode 100644 index 0000000000..023c2f3aa8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_client_naming.py @@ -0,0 +1,57 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from client.naming import NamingClient, models + + +@pytest.fixture +def client(): + with NamingClient() as client: + yield client + + +def test_client(client: NamingClient): + client.client(models.ClientNameModel(client_name=True)) + + +def test_language(client: NamingClient): + client.language(models.LanguageClientNameModel(python_name=True)) + + +def test_compatible_with_encoded_name(client: NamingClient): + client.compatible_with_encoded_name(models.ClientNameAndJsonEncodedNameModel(client_name=True)) + + +def test_operation(client: NamingClient): + client.client_name() + + +def test_parameter(client: NamingClient): + client.parameter(client_name="true") + + +def test_header_request(client: NamingClient): + client.request(client_name="true") + + +def test_header_response(client: NamingClient): + assert client.response(cls=lambda x, y, z: z)["default-name"] == "true" + + +def test_model_client(client: NamingClient): + client.client_model.client(models.ClientModel(default_name=True)) + + +def test_model_language(client: NamingClient): + client.client_model.language(models.PythonModel(default_name=True)) + + +def test_union_enum_member_name(client: NamingClient): + client.union_enum.union_enum_member_name(models.ExtensibleEnum.CLIENT_ENUM_VALUE1) + + +def test_union_enum_name(client: NamingClient): + client.union_enum.union_enum_name(models.ClientExtensibleEnum.ENUM_VALUE1) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_client_structure.py b/packages/http-client-python/test/generic_mock_api_tests/test_client_structure.py new file mode 100644 index 0000000000..d28827376c --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_client_structure.py @@ -0,0 +1,57 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from client.structure.service.models import ClientType +from client.structure.service import ServiceClient +from client.structure.multiclient import ClientAClient, ClientBClient +from client.structure.renamedoperation import RenamedOperationClient +from client.structure.twooperationgroup import TwoOperationGroupClient + + +def test_structure_default(): + client = ServiceClient(endpoint="http://localhost:3000", client=ClientType.DEFAULT) + client.one() + client.two() + client.foo.three() + client.foo.four() + client.bar.five() + client.bar.six() + client.baz.foo.seven() + client.qux.eight() + client.qux.bar.nine() + + +def test_structure_multiclient(): + client_a = ClientAClient(endpoint="http://localhost:3000", client=ClientType.MULTI_CLIENT) + client_a.renamed_one() + client_a.renamed_three() + client_a.renamed_five() + + client_b = ClientBClient(endpoint="http://localhost:3000", client=ClientType.MULTI_CLIENT) + client_b.renamed_two() + client_b.renamed_four() + client_b.renamed_six() + + +def test_structure_renamed_operation(): + client = RenamedOperationClient(endpoint="http://localhost:3000", client=ClientType.RENAMED_OPERATION) + client.renamed_one() + client.renamed_three() + client.renamed_five() + + client.group.renamed_two() + client.group.renamed_four() + client.group.renamed_six() + + +def test_structure_two_operation_group(): + client = TwoOperationGroupClient(endpoint="http://localhost:3000", client=ClientType.TWO_OPERATION_GROUP) + client.group1.one() + client.group1.three() + client.group1.four() + + client.group2.two() + client.group2.five() + client.group2.six() diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_encode_bytes.py b/packages/http-client-python/test/generic_mock_api_tests/test_encode_bytes.py new file mode 100644 index 0000000000..b398952f6f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_encode_bytes.py @@ -0,0 +1,128 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from pathlib import Path + +import pytest +from encode.bytes import BytesClient +from encode.bytes.models import ( + DefaultBytesProperty, + Base64urlBytesProperty, + Base64BytesProperty, + Base64urlArrayBytesProperty, +) + +FILE_FOLDER = Path(__file__).parent + + +@pytest.fixture +def client(): + with BytesClient() as client: + yield client + + +def test_query(client: BytesClient): + client.query.default( + value=bytes("test", "utf-8"), + ) + client.query.base64( + value=bytes("test", "utf-8"), + ) + client.query.base64_url( + value=bytes("test", "utf-8"), + ) + client.query.base64_url_array( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + + +def test_property(client: BytesClient): + result = client.property.default( + DefaultBytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = client.property.base64( + Base64BytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = client.property.base64_url( + Base64urlBytesProperty( + value=bytes("test", "utf-8"), + ) + ) + assert result.value == bytes("test", "utf-8") + + result = client.property.base64_url_array( + Base64urlArrayBytesProperty( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + ) + assert result.value == [ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ] + + +def test_header(client: BytesClient): + client.header.default( + value=bytes("test", "utf-8"), + ) + client.header.base64( + value=bytes("test", "utf-8"), + ) + client.header.base64_url( + value=bytes("test", "utf-8"), + ) + client.header.base64_url_array( + value=[ + bytes("test", "utf-8"), + bytes("test", "utf-8"), + ], + ) + + +@pytest.fixture +def png_data() -> bytes: + with open(str(FILE_FOLDER / "data/image.png"), "rb") as file_in: + return file_in.read() + + +def test_request_body(client: BytesClient, png_data: bytes): + client.request_body.default( + value=bytes("test", "utf-8"), + ) + client.request_body.octet_stream( + value=png_data, + ) + client.request_body.custom_content_type( + value=png_data, + ) + client.request_body.base64( + value=bytes("test", "utf-8"), + ) + client.request_body.base64_url( + value=bytes("test", "utf-8"), + ) + + +def test_response_body(client: BytesClient, png_data: bytes): + expected = b"test" + assert expected == client.response_body.default() + assert expected == client.response_body.base64() + assert expected == client.response_body.base64_url() + assert b"".join(client.response_body.octet_stream()) == png_data + assert b"".join(client.response_body.custom_content_type()) == png_data diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_encode_datetime.py b/packages/http-client-python/test/generic_mock_api_tests/test_encode_datetime.py new file mode 100644 index 0000000000..46b32f9734 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_encode_datetime.py @@ -0,0 +1,123 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime + +import pytest +from encode.datetime import DatetimeClient +from encode.datetime.models import ( + DefaultDatetimeProperty, + Rfc3339DatetimeProperty, + Rfc7231DatetimeProperty, + UnixTimestampDatetimeProperty, + UnixTimestampArrayDatetimeProperty, +) + + +@pytest.fixture +def client(): + with DatetimeClient() as client: + yield client + + +def test_query(client: DatetimeClient): + client.query.default( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.query.rfc3339( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.query.rfc7231( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.query.unix_timestamp( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + client.query.unix_timestamp_array( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ], + ) + + +def test_property(client: DatetimeClient): + result = client.property.default( + DefaultDatetimeProperty( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc) + + result = client.property.rfc3339( + Rfc3339DatetimeProperty( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc) + + result = client.property.rfc7231( + Rfc7231DatetimeProperty( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + + result = client.property.unix_timestamp( + UnixTimestampDatetimeProperty( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + ) + assert result.value == datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc) + + result = client.property.unix_timestamp_array( + UnixTimestampArrayDatetimeProperty( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ], + ) + ) + assert result.value == [ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ] + + +def test_header(client: DatetimeClient): + client.header.default( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.header.rfc3339( + value=datetime.datetime(2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.header.rfc7231( + value=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc), + ) + client.header.unix_timestamp( + value=datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + ) + client.header.unix_timestamp_array( + value=[ + datetime.datetime(2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc), + datetime.datetime(2023, 6, 14, 9, 17, 36, tzinfo=datetime.timezone.utc), + ] + ) + + +def test_response_header(client: DatetimeClient): + cls = lambda x, y, z: z + assert client.response_header.default(cls=cls)["value"] == datetime.datetime( + 2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc + ) + assert client.response_header.rfc3339(cls=cls)["value"] == datetime.datetime( + 2022, 8, 26, 18, 38, 0, tzinfo=datetime.timezone.utc + ) + assert client.response_header.rfc7231(cls=cls)["value"] == datetime.datetime( + 2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc + ) + assert client.response_header.unix_timestamp(cls=cls)["value"] == datetime.datetime( + 2023, 6, 12, 10, 47, 44, tzinfo=datetime.timezone.utc + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_encode_duration.py b/packages/http-client-python/test/generic_mock_api_tests/test_encode_duration.py new file mode 100644 index 0000000000..db0e29416f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_encode_duration.py @@ -0,0 +1,60 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import datetime + +import pytest +from encode.duration import DurationClient +from encode.duration.models import ( + Int32SecondsDurationProperty, + ISO8601DurationProperty, + FloatSecondsDurationProperty, + DefaultDurationProperty, + FloatSecondsDurationArrayProperty, +) + + +@pytest.fixture +def client(): + with DurationClient() as client: + yield client + + +def test_query(client: DurationClient): + client.query.default(input=datetime.timedelta(days=40)) + client.query.iso8601(input=datetime.timedelta(days=40)) + client.query.int32_seconds(input=36) + client.query.int32_seconds_array(input=[36, 47]) + client.query.float_seconds(input=35.625) + client.query.float64_seconds(input=35.625) + + +def test_property(client: DurationClient): + result = client.property.default(DefaultDurationProperty(value=datetime.timedelta(days=40))) + assert result.value == datetime.timedelta(days=40) + result = client.property.default(DefaultDurationProperty(value="P40D")) + assert result.value == datetime.timedelta(days=40) + result = client.property.iso8601(ISO8601DurationProperty(value=datetime.timedelta(days=40))) + assert result.value == datetime.timedelta(days=40) + result = client.property.iso8601(ISO8601DurationProperty(value="P40D")) + assert result.value == datetime.timedelta(days=40) + result = client.property.int32_seconds(Int32SecondsDurationProperty(value=36)) + assert result.value == 36 + result = client.property.float_seconds(FloatSecondsDurationProperty(value=35.625)) + assert abs(result.value - 35.625) < 0.0001 + result = client.property.float64_seconds(FloatSecondsDurationProperty(value=35.625)) + assert abs(result.value - 35.625) < 0.0001 + result = client.property.float_seconds_array(FloatSecondsDurationArrayProperty(value=[35.625, 46.75])) + assert abs(result.value[0] - 35.625) < 0.0001 + assert abs(result.value[1] - 46.75) < 0.0001 + + +def test_header(client: DurationClient): + client.header.default(duration=datetime.timedelta(days=40)) + client.header.iso8601(duration=datetime.timedelta(days=40)) + client.header.iso8601_array(duration=[datetime.timedelta(days=40), datetime.timedelta(days=50)]) + client.header.int32_seconds(duration=36) + client.header.float_seconds(duration=35.625) + client.header.float64_seconds(duration=35.625) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_encode_numeric.py b/packages/http-client-python/test/generic_mock_api_tests/test_encode_numeric.py new file mode 100644 index 0000000000..397b1e69f6 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_encode_numeric.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from encode.numeric import NumericClient, models + + +@pytest.fixture +def client(): + with NumericClient() as client: + yield client + + +def test_safeint_as_string(client: NumericClient): + result = client.property.safeint_as_string(models.SafeintAsStringProperty(value=10000000000)) + assert result.value == 10000000000 + assert result["value"] == "10000000000" + + +def test_uint32_as_string_optional(client: NumericClient): + result = client.property.uint32_as_string_optional(models.Uint32AsStringProperty(value=1)) + assert result.value == 1 + assert result["value"] == "1" diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_headasboolean.py b/packages/http-client-python/test/generic_mock_api_tests/test_headasboolean.py new file mode 100644 index 0000000000..a8c5db102b --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_headasboolean.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from headasbooleantrue import VisibilityClient as HeadAsBooleanTrueClient +from headasbooleantrue import models as models_true + +from headasbooleanfalse import VisibilityClient as HeadAsBooleanFalseClient +from headasbooleanfalse import models as models_false + + +@pytest.fixture +def client_true(): + with HeadAsBooleanTrueClient() as client: + yield client + + +@pytest.fixture +def client_false(): + with HeadAsBooleanFalseClient() as client: + yield client + + +def test_head_true(client_true): + body = models_true.VisibilityModel(query_prop=123) + assert client_true.head_model(body) == True + + +def test_head_false(client_false): + body = models_false.VisibilityModel(query_prop=123) + assert client_false.head_model(body) is None diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_parameters_basic.py b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_basic.py new file mode 100644 index 0000000000..922bc1d629 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_basic.py @@ -0,0 +1,22 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.basic import BasicClient +from parameters.basic.models import User + + +@pytest.fixture +def client(): + with BasicClient() as client: + yield client + + +def test_explicit_simple(client: BasicClient): + client.explicit_body.simple(User(name="foo")) + + +def test_implicit_simple(client: BasicClient): + client.implicit_body.simple(name="foo") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_parameters_body_optionality.py b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_body_optionality.py new file mode 100644 index 0000000000..66ea0a9fea --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_body_optionality.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.bodyoptionality import BodyOptionalityClient +from parameters.bodyoptionality.models import BodyModel + + +@pytest.fixture +def client(): + with BodyOptionalityClient() as client: + yield client + + +def test_required_explicit(client: BodyOptionalityClient): + client.required_explicit(BodyModel(name="foo")) + + +def test_required_implicit(client: BodyOptionalityClient): + client.required_implicit(name="foo") + + +def test_optional_explicit(client: BodyOptionalityClient): + client.optional_explicit.set(BodyModel(name="foo")) + client.optional_explicit.omit() diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_parameters_collection_format.py b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_collection_format.py new file mode 100644 index 0000000000..ab55144e85 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_collection_format.py @@ -0,0 +1,37 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.collectionformat import CollectionFormatClient + + +@pytest.fixture +def client(): + with CollectionFormatClient() as client: + yield client + + +def test_query_multi(client: CollectionFormatClient): + client.query.multi(colors=["blue", "red", "green"]) + + +def test_query_csv(client: CollectionFormatClient): + client.query.csv(colors=["blue", "red", "green"]) + + +def test_query_pipes(client: CollectionFormatClient): + client.query.pipes(colors=["blue", "red", "green"]) + + +def test_query_ssv(client: CollectionFormatClient): + client.query.ssv(colors=["blue", "red", "green"]) + + +def test_query_tsv(client: CollectionFormatClient): + client.query.tsv(colors=["blue", "red", "green"]) + + +def test_csv_header(client: CollectionFormatClient): + client.header.csv(colors=["blue", "red", "green"]) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_parameters_spread.py b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_spread.py new file mode 100644 index 0000000000..17a50f461f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_parameters_spread.py @@ -0,0 +1,71 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from parameters.spread import SpreadClient +from parameters.spread.models import BodyParameter + + +@pytest.fixture +def client(): + with SpreadClient() as client: + yield client + + +def test_model_body(client: SpreadClient): + client.model.spread_as_request_body(name="foo") + + +def test_model_composite_request_only_with_body(client: SpreadClient): + client.model.spread_composite_request_only_with_body(BodyParameter(name="foo")) + + +def test_model_composite_request_without_body(client: SpreadClient): + client.model.spread_composite_request_without_body(name="foo", test_header="bar") + + +def test_model_composite_request(client: SpreadClient): + client.model.spread_composite_request(name="foo", body=BodyParameter(name="foo"), test_header="bar") + + +def test_model_composite_request_mix(client: SpreadClient): + client.model.spread_composite_request_mix(name="foo", prop="foo", test_header="bar") + + +def test_alias_body(client: SpreadClient): + client.alias.spread_as_request_body(name="foo") + + +def test_alias_parameter(client: SpreadClient): + client.alias.spread_as_request_parameter("1", x_ms_test_header="bar", name="foo") + + +def test_alias_multiple_parameter(client: SpreadClient): + client.alias.spread_with_multiple_parameters( + "1", + x_ms_test_header="bar", + required_string="foo", + required_int_list=[1, 2], + optional_string_list=["foo", "bar"], + optional_int=1, + ) + client.alias.spread_with_multiple_parameters( + "1", + { + "requiredString": "foo", + "optionalInt": 1, + "requiredIntList": [1, 2], + "optionalStringList": ["foo", "bar"], + }, + x_ms_test_header="bar", + ) + + +def test_inner_model(client: SpreadClient): + client.alias.spread_parameter_with_inner_model(id="1", x_ms_test_header="bar", body={"name": "foo"}) + + +def test_inner_alias(client: SpreadClient): + client.alias.spread_parameter_with_inner_alias(id="1", x_ms_test_header="bar", body={"name": "foo", "age": 1}) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_content_negotiation.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_content_negotiation.py new file mode 100644 index 0000000000..27fa1ba25f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_content_negotiation.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import base64 +import pytest +from payload.contentnegotiation import ContentNegotiationClient +from payload.contentnegotiation.models import PngImageAsJson + + +@pytest.fixture +def client(): + with ContentNegotiationClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_get_avatar_as_png(client: ContentNegotiationClient, png_data: bytes): + assert b"".join(client.same_body.get_avatar_as_png()) == png_data + + +def test_get_avatar_as_jpeg(client: ContentNegotiationClient, jpg_data: bytes): + assert b"".join(client.same_body.get_avatar_as_jpeg()) == jpg_data + + +def test_different_body_get_avatar_as_png(client: ContentNegotiationClient, png_data: bytes): + assert b"".join(client.different_body.get_avatar_as_png()) == png_data + + +def test_different_body_get_avatar_as_json(client: ContentNegotiationClient, png_data: bytes): + result = client.different_body.get_avatar_as_json() + expected = PngImageAsJson(content=base64.b64encode(png_data).decode()) + assert result == expected diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_json_merge_patch.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_json_merge_patch.py new file mode 100644 index 0000000000..af0a9afea8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_json_merge_patch.py @@ -0,0 +1,93 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.jsonmergepatch import JsonMergePatchClient +from payload.jsonmergepatch.models import InnerModel, Resource, ResourcePatch + +try: + from azure.core.serialization import NULL +except ImportError: + from corehttp.serialization import NULL + + +@pytest.fixture +def client(): + with JsonMergePatchClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_create_resource(client: JsonMergePatchClient): + inner_madge = InnerModel(name="InnerMadge", description="innerDesc") + create_resource = Resource( + name="Madge", + description="desc", + map={"key": inner_madge}, + array=[inner_madge], + int_value=1, + float_value=1.1, + inner_model=inner_madge, + int_array=[1, 2, 3], + ) + response = client.create_resource(create_resource) + assert response == create_resource + + +def test_update_resource_model_input(client: JsonMergePatchClient): + update_resource = ResourcePatch( + description=NULL, + map={"key": InnerModel(description=NULL), "key2": NULL}, + array=NULL, + int_value=NULL, + float_value=NULL, + inner_model=NULL, + int_array=NULL, + ) + response = client.update_resource(update_resource) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +def test_update_resource_raw_input(client: JsonMergePatchClient): + response = client.update_resource( + { + "description": None, + "map": {"key": {"description": None}, "key2": None}, + "array": None, + "intValue": None, + "floatValue": None, + "innerModel": None, + "intArray": None, + } + ) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +def test_update_optional_resource_model_input(client: JsonMergePatchClient): + update_resource = ResourcePatch( + description=NULL, + map={"key": InnerModel(description=NULL), "key2": NULL}, + array=NULL, + int_value=NULL, + float_value=NULL, + inner_model=NULL, + int_array=NULL, + ) + response = client.update_optional_resource(update_resource) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) + + +def test_update_optional_resource_raw_input(client: JsonMergePatchClient): + response = client.update_optional_resource( + { + "description": None, + "map": {"key": {"description": None}, "key2": None}, + "array": None, + "intValue": None, + "floatValue": None, + "innerModel": None, + "intArray": None, + } + ) + assert response == Resource(name="Madge", map={"key": InnerModel(name="InnerMadge")}) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_media_type.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_media_type.py new file mode 100644 index 0000000000..72bc9425fc --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_media_type.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.mediatype import MediaTypeClient + + +@pytest.fixture +def client(): + with MediaTypeClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_json(client: MediaTypeClient): + data = "foo" + client.string_body.send_as_json(data) + assert client.string_body.get_as_json() == data + + +def test_text(client: MediaTypeClient): + data = "{cat}" + client.string_body.send_as_text(data) + assert client.string_body.get_as_text() == data diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_multipart.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_multipart.py new file mode 100644 index 0000000000..49e3c6085f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_multipart.py @@ -0,0 +1,144 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from pathlib import Path +import pytest +from payload.multipart import MultiPartClient, models + +JPG = Path(__file__).parent / "data/image.jpg" +PNG = Path(__file__).parent / "data/image.png" + + +@pytest.fixture +def client(): + with MultiPartClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_anonymous_model(client: MultiPartClient): + client.form_data.anonymous_model({"profileImage": open(str(JPG), "rb")}) + + +def test_basic(client: MultiPartClient): + client.form_data.basic( + models.MultiPartRequest( + id="123", + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_binary_array_parts(client: MultiPartClient): + client.form_data.binary_array_parts( + models.BinaryArrayPartsRequest( + id="123", + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + ) + ) + + +def test_check_file_name_and_content_type(client: MultiPartClient): + client.form_data.check_file_name_and_content_type( + models.MultiPartRequest( + id="123", + profile_image=("hello.jpg", open(str(JPG), "rb"), "image/jpg"), + ) + ) + + +def test_complex(client: MultiPartClient): + client.form_data.file_array_and_basic( + models.ComplexPartsRequest( + id="123", + address=models.Address(city="X"), + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_json_part(client: MultiPartClient): + client.form_data.json_part( + models.JsonPartRequest( + address=models.Address(city="X"), + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_multi_binary_parts(client: MultiPartClient): + client.form_data.multi_binary_parts( + models.MultiBinaryPartsRequest( + profile_image=open(str(JPG), "rb"), + picture=open(str(PNG), "rb"), + ) + ) + client.form_data.multi_binary_parts( + models.MultiBinaryPartsRequest( + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_file_with_http_part_specific_content_type(client: MultiPartClient): + client.form_data.http_parts.content_type.image_jpeg_content_type( + models.FileWithHttpPartSpecificContentTypeRequest( + profile_image=("hello.jpg", open(str(JPG), "rb"), "image/jpg"), + ) + ) + + +def test_file_with_http_part_required_content_type(client: MultiPartClient): + client.form_data.http_parts.content_type.required_content_type( + models.FileWithHttpPartRequiredContentTypeRequest( + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_file_with_http_part_optional_content_type(client: MultiPartClient): + # call twice: one with content type, one without + client.form_data.http_parts.content_type.optional_content_type( + models.FileWithHttpPartOptionalContentTypeRequest( + profile_image=("hello.jpg", open(str(JPG), "rb").read()), + ) + ) + client.form_data.http_parts.content_type.optional_content_type( + models.FileWithHttpPartOptionalContentTypeRequest( + profile_image=( + "hello.jpg", + open(str(JPG), "rb").read(), + "application/octet-stream", + ), + ) + ) + + +def test_complex_with_http_part(client: MultiPartClient): + client.form_data.http_parts.json_array_and_file_array( + models.ComplexHttpPartsModelRequest( + id="123", + previous_addresses=[ + models.Address(city="Y"), + models.Address(city="Z"), + ], + address=models.Address(city="X"), + pictures=[ + open(str(PNG), "rb"), + open(str(PNG), "rb"), + ], + profile_image=open(str(JPG), "rb"), + ) + ) + + +def test_http_parts_non_string_float(client: MultiPartClient): + client.form_data.http_parts.non_string.float(models.FloatRequest(temperature=0.5)) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_pageable.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_pageable.py new file mode 100644 index 0000000000..a13109a38c --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_pageable.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.pageable import PageableClient + + +@pytest.fixture +def client(): + with PageableClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_list(client: PageableClient): + result = list(client.list(maxpagesize=3)) + assert len(result) == 4 diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_payload_xml.py b/packages/http-client-python/test/generic_mock_api_tests/test_payload_xml.py new file mode 100644 index 0000000000..901beb181c --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_payload_xml.py @@ -0,0 +1,93 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from payload.xml import XmlClient +from payload.xml.models import ( + SimpleModel, + ModelWithSimpleArrays, + ModelWithArrayOfModel, + ModelWithAttributes, + ModelWithUnwrappedArray, + ModelWithRenamedFields, + ModelWithEmptyArray, + ModelWithText, + ModelWithDictionary, + ModelWithEncodedNames, +) + + +@pytest.fixture +def client(): + with XmlClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_simple_model(client: XmlClient): + model = SimpleModel(name="foo", age=123) + assert client.simple_model_value.get() == model + client.simple_model_value.put(model) + + +def test_model_with_simple_arrays(client: XmlClient): + model = ModelWithSimpleArrays(colors=["red", "green", "blue"], counts=[1, 2]) + assert client.model_with_simple_arrays_value.get() == model + client.model_with_simple_arrays_value.put(model) + + +def test_model_with_array_of_model(client: XmlClient): + model = ModelWithArrayOfModel( + items_property=[ + SimpleModel(name="foo", age=123), + SimpleModel(name="bar", age=456), + ] + ) + assert client.model_with_array_of_model_value.get() == model + client.model_with_array_of_model_value.put(model) + + +def test_model_with_attributes(client: XmlClient): + model = ModelWithAttributes(id1=123, id2="foo", enabled=True) + assert client.model_with_attributes_value.get() == model + client.model_with_attributes_value.put(model) + + +def test_model_with_unwrapped_array(client: XmlClient): + model = ModelWithUnwrappedArray(colors=["red", "green", "blue"], counts=[1, 2]) + assert client.model_with_unwrapped_array_value.get() == model + client.model_with_unwrapped_array_value.put(model) + + +def test_model_with_renamed_fields(client: XmlClient): + model = ModelWithRenamedFields( + input_data=SimpleModel(name="foo", age=123), + output_data=SimpleModel(name="bar", age=456), + ) + assert client.model_with_renamed_fields_value.get() == model + client.model_with_renamed_fields_value.put(model) + + +def test_model_with_empty_array(client: XmlClient): + model = ModelWithEmptyArray(items_property=[]) + assert client.model_with_empty_array_value.get() == model + client.model_with_empty_array_value.put(model) + + +def test_model_with_text(client: XmlClient): + model = ModelWithText(language="foo", content="\n This is some text.\n") + assert client.model_with_text_value.get() == model + client.model_with_text_value.put(model) + + +def test_model_with_dictionary(client: XmlClient): + model = ModelWithDictionary(metadata={"Color": "blue", "Count": "123", "Enabled": "false"}) + assert client.model_with_dictionary_value.get() == model + client.model_with_dictionary_value.put(model) + + +def test_model_with_encoded_names(client: XmlClient): + model = ModelWithEncodedNames(model_data=SimpleModel(name="foo", age=123), colors=["red", "green", "blue"]) + assert client.model_with_encoded_names_value.get() == model + client.model_with_encoded_names_value.put(model) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_resiliency_srv_driven.py b/packages/http-client-python/test/generic_mock_api_tests/test_resiliency_srv_driven.py new file mode 100644 index 0000000000..ec411ab1b0 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_resiliency_srv_driven.py @@ -0,0 +1,122 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from resiliency.srv.driven1 import ResiliencyServiceDrivenClient as V1Client +from resiliency.srv.driven2 import ResiliencyServiceDrivenClient as V2Client + + +def get_v1_client(service_deployment_version: str, api_version: str = "v1") -> V1Client: + return V1Client( + endpoint="http://localhost:3000", + service_deployment_version=service_deployment_version, + api_version=api_version, + ) + + +def get_v2_client(service_deployment_version: str, api_version: str = "v2") -> V2Client: + return V2Client( + endpoint="http://localhost:3000", + service_deployment_version=service_deployment_version, + api_version=api_version, + ) + + +def test_add_optional_param_from_none(): + # old client to old service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + client.from_none() + + # old client to new service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_none() + + # new client to new service with api version v1 + with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + client.from_none() + + # new client to new service with api version v2 + with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_none(new_parameter="new") + + +def test_add_optional_param_from_one_required(): + # old client to old service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + client.from_one_required(parameter="required") + + # old client to new service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_one_required(parameter="required") + + # new client to new service with api version v1 + with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + client.from_one_required(parameter="required") + + # new client to new service with api version v2 + with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_one_required(parameter="required", new_parameter="new") + + +def test_add_optional_param_from_one_optional(): + # old client to old service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v1") as client: + client.from_one_optional(parameter="optional") + + # old client to new service with api version v1 + with V1Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_one_optional(parameter="optional") + + # new client to new service with api version v1 + with V2Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v1", + ) as client: + client.from_one_optional(parameter="optional") + + # new client to new service with api version v2 + with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.from_one_optional(parameter="optional", new_parameter="new") + + +def test_break_the_glass(core_library): + request = core_library.rest.HttpRequest(method="DELETE", url="/add-operation") + with V1Client( + endpoint="http://localhost:3000", + service_deployment_version="v2", + api_version="v2", + ) as client: + response = client.send_request(request) + response.raise_for_status() + + +def test_add_operation(): + with V2Client(endpoint="http://localhost:3000", service_deployment_version="v2") as client: + client.add_operation() + + +@pytest.mark.parametrize( + "func_name, params", + [ + ("from_none", {"new_parameter": "new"}), + ("from_one_optional", {"parameter": "optional", "new_parameter": "new"}), + ("from_one_required", {"parameter": "required", "new_parameter": "new"}), + ("add_operation", {}), + ], +) +def test_new_client_with_old_apiversion_call_new_parameter(func_name, params): + client = get_v2_client(service_deployment_version="v2", api_version="v1") + with pytest.raises(ValueError) as ex: + getattr(client, func_name)(**params) + assert "is not available in API version" in str(ex.value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_serialization_encoded_name_json.py b/packages/http-client-python/test/generic_mock_api_tests/test_serialization_encoded_name_json.py new file mode 100644 index 0000000000..0f61ce86f2 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_serialization_encoded_name_json.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from serialization.encodedname.json import JsonClient, models + + +@pytest.fixture +def client(): + with JsonClient() as client: + yield client + + +def test_property_send(client: JsonClient): + client.property.send(models.JsonEncodedNameModel(default_name=True)) + + +def test_property_get(client: JsonClient): + assert client.property.get().default_name diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_server_endpoint_not_defined.py b/packages/http-client-python/test/generic_mock_api_tests/test_server_endpoint_not_defined.py new file mode 100644 index 0000000000..432542dd95 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_server_endpoint_not_defined.py @@ -0,0 +1,17 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.endpoint.notdefined import NotDefinedClient + + +@pytest.fixture +def client(): + with NotDefinedClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_valid(client: NotDefinedClient): + assert client.valid() is True diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_server_path_multiple.py b/packages/http-client-python/test/generic_mock_api_tests/test_server_path_multiple.py new file mode 100644 index 0000000000..2a697f142f --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_server_path_multiple.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.path.multiple import MultipleClient + + +@pytest.fixture +def client(): + with MultipleClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_no_operation_params(client: MultipleClient): + client.no_operation_params() + + +def test_with_operation_path_param(client: MultipleClient): + client.with_operation_path_param(keyword="test") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_server_path_single.py b/packages/http-client-python/test/generic_mock_api_tests/test_server_path_single.py new file mode 100644 index 0000000000..0ef66c1d0b --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_server_path_single.py @@ -0,0 +1,17 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.path.single import SingleClient + + +@pytest.fixture +def client(): + with SingleClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_my_op(client): + assert client.my_op() is True diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_not_versioned.py b/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_not_versioned.py new file mode 100644 index 0000000000..47528ab8fc --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_not_versioned.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.versions.notversioned import NotVersionedClient + + +@pytest.fixture +def client(): + with NotVersionedClient(endpoint="http://localhost:3000", api_version="v1.0") as client: + yield client + + +def test_without_api_version(client: NotVersionedClient): + client.without_api_version() + + +def test_with_query_api_version(client: NotVersionedClient): + client.with_query_api_version() + + +def test_with_path_api_version(client: NotVersionedClient): + client.with_path_api_version() diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_versioned.py b/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_versioned.py new file mode 100644 index 0000000000..00b7fcd192 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_server_versions_versioned.py @@ -0,0 +1,30 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from server.versions.versioned import VersionedClient + + +@pytest.fixture +def client(): + with VersionedClient(endpoint="http://localhost:3000") as client: + yield client + + +def test_without_api_version(client: VersionedClient): + client.without_api_version() + + +def test_with_query_api_version(client: VersionedClient): + client.with_query_api_version() + + +def test_with_path_api_version(client: VersionedClient): + client.with_path_api_version() + + +def test_with_query_old_api_version(): + with VersionedClient(endpoint="http://localhost:3000", api_version="2021-01-01-preview") as client: + client.with_query_old_api_version() diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_conditional_request.py b/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_conditional_request.py new file mode 100644 index 0000000000..f476a1db9d --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_conditional_request.py @@ -0,0 +1,34 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +import datetime +from specialheaders.conditionalrequest import ConditionalRequestClient + + +@pytest.fixture +def client(): + with ConditionalRequestClient() as client: + yield client + + +def test_post_if_match(core_library, client: ConditionalRequestClient): + client.post_if_match(etag="valid", match_condition=core_library.MatchConditions.IfNotModified) + + +def test_post_if_none_match(core_library, client: ConditionalRequestClient): + client.post_if_none_match(etag="invalid", match_condition=core_library.MatchConditions.IfModified) + + +def test_head_if_modified_since(client: ConditionalRequestClient): + client.head_if_modified_since( + if_modified_since=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + ) + + +def test_post_if_unmodified_since(client: ConditionalRequestClient): + client.post_if_unmodified_since( + if_unmodified_since=datetime.datetime(2022, 8, 26, 14, 38, 0, tzinfo=datetime.timezone.utc) + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_repeatability.py b/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_repeatability.py new file mode 100644 index 0000000000..b35c8a1f16 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_special_headers_repeatability.py @@ -0,0 +1,18 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specialheaders.repeatability import RepeatabilityClient + + +@pytest.fixture +def client(): + with RepeatabilityClient() as client: + yield client + + +def test_immediate_success(client: RepeatabilityClient): + cls = lambda x, y, z: z + assert client.immediate_success(cls=cls)["Repeatability-Result"] == "accepted" diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_special_words.py b/packages/http-client-python/test/generic_mock_api_tests/test_special_words.py new file mode 100644 index 0000000000..2b4dcb7491 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_special_words.py @@ -0,0 +1,37 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from specialwords import SpecialWordsClient, models + + +@pytest.fixture +def client(): + with SpecialWordsClient() as client: + yield client + + +def test_operations(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "_method" + getattr(client.operations, sw + suffix)() + + +def test_parameter(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "_parameter" + getattr(client.parameters, "with_" + sw)(**{sw + suffix: "ok"}) + client.parameters.with_cancellation_token(cancellation_token="ok") + + +def test_model(client: SpecialWordsClient, special_words): + for sw in special_words: + suffix = "" if sw == "constructor" else "Model" + model = getattr(models, sw.capitalize() + suffix) + getattr(client.models, "with_" + sw)(model(name="ok")) + + +def test_model_properties(client: SpecialWordsClient): + client.model_properties.same_as_model(models.SameAsModel(same_as_model="ok")) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_array.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_array.py new file mode 100644 index 0000000000..0ab85a86f8 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_array.py @@ -0,0 +1,103 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import pytest +import isodate +from typetest.array import ArrayClient, models + + +@pytest.fixture +def client(): + with ArrayClient() as client: + yield client + + +def test_boolean_value(client: ArrayClient): + assert client.boolean_value.get() == [True, False] + client.boolean_value.put([True, False]) + + +def test_datetime_value(client: ArrayClient): + assert client.datetime_value.get() == [isodate.parse_datetime("2022-08-26T18:38:00Z")] + client.datetime_value.put([isodate.parse_datetime("2022-08-26T18:38:00Z")]) + + +def test_duration_value(client: ArrayClient): + assert client.duration_value.get() == [isodate.parse_duration("P123DT22H14M12.011S")] + client.duration_value.put([isodate.parse_duration("P123DT22H14M12.011S")]) + + +def test_float32_value(client: ArrayClient): + assert client.float32_value.get() == [43.125] + client.float32_value.put([43.125]) + + +def test_int32_value(client: ArrayClient): + assert client.int32_value.get() == [1, 2] + client.int32_value.put([1, 2]) + + +def test_int64_value(client: ArrayClient): + assert client.int64_value.get() == [2**53 - 1, -(2**53 - 1)] + client.int64_value.put([2**53 - 1, -(2**53 - 1)]) + + +def test_model_value(client: ArrayClient): + assert client.model_value.get() == [ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ] + client.model_value.put( + [ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ] + ) + + +def test_nullable_boolean_value(client: ArrayClient): + assert client.nullable_boolean_value.get() == [True, None, False] + client.nullable_boolean_value.put([True, None, False]) + + +def test_nullable_float_value(client: ArrayClient): + assert client.nullable_float_value.get() == [1.25, None, 3.0] + client.nullable_float_value.put([1.25, None, 3.0]) + + +def test_nullable_int32_value(client: ArrayClient): + assert client.nullable_int32_value.get() == [1, None, 3] + client.nullable_int32_value.put([1, None, 3]) + + +def test_nullable_model_value(client: ArrayClient): + assert client.nullable_model_value.get() == [ + models.InnerModel(property="hello"), + None, + models.InnerModel(property="world"), + ] + client.nullable_model_value.put( + [ + models.InnerModel(property="hello"), + None, + models.InnerModel(property="world"), + ] + ) + + +def test_nullable_string_value(client: ArrayClient): + assert client.nullable_string_value.get() == ["hello", None, "world"] + client.nullable_string_value.put(["hello", None, "world"]) + + +def test_string_value(client: ArrayClient): + assert client.string_value.get() == ["hello", ""] + client.string_value.put(["hello", ""]) + + +def test_unknown_value(client: ArrayClient): + assert client.unknown_value.get() == [1, "hello", None] + client.unknown_value.put([1, "hello", None]) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_dictionary.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_dictionary.py new file mode 100644 index 0000000000..c9eadd9eba --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_dictionary.py @@ -0,0 +1,89 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.dictionary import DictionaryClient, models +import isodate + + +@pytest.fixture +def client(): + with DictionaryClient() as client: + yield client + + +def test_boolean_value(client: DictionaryClient): + value = {"k1": True, "k2": False} + assert client.boolean_value.get() == value + client.boolean_value.put(value) + + +def test_datetime_value(client: DictionaryClient): + value = {"k1": isodate.parse_datetime("2022-08-26T18:38:00Z")} + assert client.datetime_value.get() == value + client.datetime_value.put(value) + + +def test_duration_value(client: DictionaryClient): + value = {"k1": isodate.parse_duration("P123DT22H14M12.011S")} + assert client.duration_value.get() == value + client.duration_value.put(value) + + +def test_float32_value(client: DictionaryClient): + value = {"k1": 43.125} + assert client.float32_value.get() == value + client.float32_value.put(value) + + +def test_int32_value(client: DictionaryClient): + value = {"k1": 1, "k2": 2} + assert client.int32_value.get() == value + client.int32_value.put(value) + + +def test_int64_value(client: DictionaryClient): + value = {"k1": 2**53 - 1, "k2": -(2**53 - 1)} + assert client.int64_value.get() == value + client.int64_value.put(value) + + +def test_model_value(client: DictionaryClient): + value = { + "k1": models.InnerModel(property="hello"), + "k2": models.InnerModel(property="world"), + } + assert client.model_value.get() == value + client.model_value.put(value) + + +def test_nullable_float_value(client: DictionaryClient): + value = {"k1": 1.25, "k2": 0.5, "k3": None} + assert client.nullable_float_value.get() == value + client.nullable_float_value.put(value) + + +def test_recursive_model_value(client: DictionaryClient): + value = { + "k1": models.InnerModel(property="hello", children={}), + "k2": models.InnerModel( + property="world", + children={"k2.1": models.InnerModel(property="inner world")}, + ), + } + assert client.recursive_model_value.get() == value + client.recursive_model_value.put(value) + + +def test_string_value(client: DictionaryClient): + value = {"k1": "hello", "k2": ""} + assert client.string_value.get() == value + client.string_value.put(value) + + +def test_unknown_value(client: DictionaryClient): + value = {"k1": 1, "k2": "hello", "k3": None} + assert client.unknown_value.get() == value + client.unknown_value.put(value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_extensible.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_extensible.py new file mode 100644 index 0000000000..b6bcf593e6 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_extensible.py @@ -0,0 +1,23 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.enum.extensible import ExtensibleClient, models + + +@pytest.fixture +def client(): + with ExtensibleClient() as client: + yield client + + +def test_known_value(client): + assert client.string.get_known_value() == models.DaysOfWeekExtensibleEnum.MONDAY + client.string.put_known_value(models.DaysOfWeekExtensibleEnum.MONDAY) + + +def test_unknown_value(client): + assert client.string.get_unknown_value() == "Weekend" + client.string.put_unknown_value("Weekend") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_fixed.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_fixed.py new file mode 100644 index 0000000000..0d085d0200 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_enum_fixed.py @@ -0,0 +1,25 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.enum.fixed import FixedClient, models + + +@pytest.fixture +def client(): + with FixedClient() as client: + yield client + + +def test_known_value(client): + assert client.string.get_known_value() == models.DaysOfWeekEnum.MONDAY + client.string.put_known_value(models.DaysOfWeekEnum.MONDAY) + + +def test_unknown_value(client: FixedClient, core_library): + try: + client.string.put_unknown_value("Weekend") + except core_library.exceptions.HttpResponseError as err: + assert err.status_code == 500 diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_empty.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_empty.py new file mode 100644 index 0000000000..1ef7fcb5e2 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_empty.py @@ -0,0 +1,29 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.empty import EmptyClient +from typetest.model.empty.models import EmptyInput, EmptyOutput, EmptyInputOutput + + +@pytest.fixture +def client(): + with EmptyClient() as client: + yield client + + +def test_put(client: EmptyClient): + client.put_empty(EmptyInput()) + client.put_empty({}) + + +def test_get(client: EmptyClient): + assert client.get_empty() == EmptyOutput() + assert client.get_empty() == {} + + +def test_post_round(client: EmptyClient): + assert client.post_round_trip_empty(EmptyInputOutput()) == EmptyInputOutput() + assert client.post_round_trip_empty({}) == {} diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_enum_discriminator.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_enum_discriminator.py new file mode 100644 index 0000000000..6d752166b4 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_enum_discriminator.py @@ -0,0 +1,58 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.enumdiscriminator import EnumDiscriminatorClient +from typetest.model.enumdiscriminator import models + + +@pytest.fixture +def client(): + with EnumDiscriminatorClient() as client: + yield client + + +@pytest.fixture +def valid_body(): + return models.Golden(weight=10) + + +@pytest.fixture +def valid_fixed_body(): + return models.Cobra(length=10) + + +def test_get_extensible_model(client: EnumDiscriminatorClient, valid_body: models.Dog): + assert client.get_extensible_model() == valid_body + assert isinstance(client.get_extensible_model(), models.Golden) + + +def test_put_extensible_model(client: EnumDiscriminatorClient, valid_body: models.Dog): + client.put_extensible_model(valid_body) + + +def test_get_extensible_model_missing_discriminator(client: EnumDiscriminatorClient): + assert client.get_extensible_model_missing_discriminator() == models.Dog(weight=10) + + +def test_get_extensible_model_wrong_discriminator(client: EnumDiscriminatorClient): + assert client.get_extensible_model_wrong_discriminator() == models.Dog(weight=8, kind="wrongKind") + + +def test_get_fixed_model(client: EnumDiscriminatorClient, valid_fixed_body: models.Snake): + assert client.get_fixed_model() == valid_fixed_body + assert isinstance(client.get_fixed_model(), models.Cobra) + + +def test_put_fixed_model(client: EnumDiscriminatorClient, valid_fixed_body: models.Snake): + client.put_fixed_model(valid_fixed_body) + + +def test_get_fixed_model_missing_discriminator(client: EnumDiscriminatorClient): + assert client.get_fixed_model_missing_discriminator() == models.Snake(length=10) + + +def test_get_fixed_model_wrong_discriminator(client: EnumDiscriminatorClient): + assert client.get_fixed_model_wrong_discriminator() == models.Snake(length=8, kind="wrongKind") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_nested_discriminator.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_nested_discriminator.py new file mode 100644 index 0000000000..8ea1366afa --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_nested_discriminator.py @@ -0,0 +1,79 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.nesteddiscriminator import NestedDiscriminatorClient +from typetest.model.nesteddiscriminator.models import GoblinShark, Salmon, Fish + + +@pytest.fixture +def client(): + with NestedDiscriminatorClient() as client: + yield client + + +@pytest.fixture +def valid_body(): + return GoblinShark(age=1) + + +def test_get_model(client, valid_body): + assert client.get_model() == valid_body + assert isinstance(client.get_model(), GoblinShark) + + +def test_put_model(client, valid_body): + client.put_model(valid_body) + + +@pytest.fixture +def valid_recursive_body(): + return Salmon( + { + "age": 1, + "kind": "salmon", + "partner": {"age": 2, "kind": "shark", "sharktype": "saw"}, + "friends": [ + { + "age": 2, + "kind": "salmon", + "partner": {"age": 3, "kind": "salmon"}, + "hate": { + "key1": {"age": 4, "kind": "salmon"}, + "key2": {"age": 2, "kind": "shark", "sharktype": "goblin"}, + }, + }, + {"age": 3, "kind": "shark", "sharktype": "goblin"}, + ], + "hate": { + "key3": {"age": 3, "kind": "shark", "sharktype": "saw"}, + "key4": { + "age": 2, + "kind": "salmon", + "friends": [ + {"age": 1, "kind": "salmon"}, + {"age": 4, "kind": "shark", "sharktype": "goblin"}, + ], + }, + }, + } + ) + + +def test_get_recursive_model(client, valid_recursive_body): + assert valid_recursive_body == client.get_recursive_model() + assert isinstance(client.get_recursive_model(), Salmon) + + +def test_put_recursive_model(client, valid_recursive_body): + client.put_recursive_model(valid_recursive_body) + + +def test_get_missing_discriminator(client): + assert client.get_missing_discriminator() == Fish(age=1) + + +def test_get_wrong_discriminator(client): + assert client.get_wrong_discriminator() == Fish(age=1, kind="wrongKind") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_not_discriminated.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_not_discriminated.py new file mode 100644 index 0000000000..e9a1fa4752 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_not_discriminated.py @@ -0,0 +1,31 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.notdiscriminated import NotDiscriminatedClient +from typetest.model.notdiscriminated.models import Siamese + + +@pytest.fixture +def client(): + with NotDiscriminatedClient() as client: + yield client + + +@pytest.fixture +def valid_body(): + return Siamese(name="abc", age=32, smart=True) + + +def test_get_valid(client, valid_body): + assert client.get_valid() == valid_body + + +def test_post_valid(client, valid_body): + client.post_valid(valid_body) + + +def test_put_valid(client, valid_body): + assert valid_body == client.put_valid(valid_body) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_recursive.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_recursive.py new file mode 100644 index 0000000000..1aa0758e34 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_recursive.py @@ -0,0 +1,32 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.recursive import RecursiveClient +from typetest.model.recursive.models import Extension + + +@pytest.fixture +def client(): + with RecursiveClient() as client: + yield client + + +@pytest.fixture +def expected(): + return Extension( + { + "level": 0, + "extension": [{"level": 1, "extension": [{"level": 2}]}, {"level": 1}], + } + ) + + +def test_put(client: RecursiveClient, expected: Extension): + client.put(expected) + + +def test_get(client: RecursiveClient, expected: Extension): + assert client.get() == expected diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_single_discriminator.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_single_discriminator.py new file mode 100644 index 0000000000..86435d5486 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_inheritance_single_discriminator.py @@ -0,0 +1,60 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.singlediscriminator import SingleDiscriminatorClient +from typetest.model.singlediscriminator.models import Sparrow, Eagle, Bird, Dinosaur + + +@pytest.fixture +def client(): + with SingleDiscriminatorClient() as client: + yield client + + +@pytest.fixture +def valid_body(): + return Sparrow(wingspan=1) + + +def test_get_model(client, valid_body): + assert client.get_model() == valid_body + + +def test_put_model(client, valid_body): + client.put_model(valid_body) + + +@pytest.fixture +def recursive_body(): + return Eagle( + { + "wingspan": 5, + "kind": "eagle", + "partner": {"wingspan": 2, "kind": "goose"}, + "friends": [{"wingspan": 2, "kind": "seagull"}], + "hate": {"key3": {"wingspan": 1, "kind": "sparrow"}}, + } + ) + + +def test_get_recursive_model(client, recursive_body): + assert client.get_recursive_model() == recursive_body + + +def test_put_recursive_model(client, recursive_body): + client.put_recursive_model(recursive_body) + + +def test_get_missing_discriminator(client): + assert client.get_missing_discriminator() == Bird(wingspan=1) + + +def test_get_wrong_discriminator(client): + assert client.get_wrong_discriminator() == Bird(wingspan=1, kind="wrongKind") + + +def test_get_legacy_model(client): + assert client.get_legacy_model() == Dinosaur(size=20, kind="t-rex") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_usage.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_usage.py new file mode 100644 index 0000000000..c9ef0e63e7 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_usage.py @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.usage import UsageClient, models + + +@pytest.fixture +def client(): + with UsageClient() as client: + yield client + + +def test_input(client: UsageClient): + input = models.InputRecord(required_prop="example-value") + assert client.input(input) is None + + +def test_output(client: UsageClient): + output = models.OutputRecord(required_prop="example-value") + assert output == client.output() + + +def test_input_and_output(client: UsageClient): + input_output = models.InputOutputRecord(required_prop="example-value") + assert input_output == client.input_and_output(input_output) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_visibility.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_visibility.py new file mode 100644 index 0000000000..8f06c07662 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_model_visibility.py @@ -0,0 +1,40 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.model.visibility import VisibilityClient, models + + +@pytest.fixture +def client(): + with VisibilityClient() as client: + yield client + + +def test_get_model(client): + result = client.get_model(models.VisibilityModel(query_prop=123)) + assert result == models.VisibilityModel(read_prop="abc") + + +def test_put_model(client): + client.put_model(models.VisibilityModel(create_prop=["foo", "bar"], update_prop=[1, 2])) + + +def test_patch_model(client): + client.patch_model(models.VisibilityModel(update_prop=[1, 2])) + + +def test_post_model(client): + client.post_model(models.VisibilityModel(create_prop=["foo", "bar"])) + + +def test_delete_model(client): + client.delete_model(models.VisibilityModel(delete_prop=True)) + + +def test_put_read_only_model(client): + client.put_read_only_model( + models.ReadOnlyModel(optional_nullable_int_list=[1, 2], optional_string_record={"foo", "bar"}) + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_additionalproperties.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_additionalproperties.py new file mode 100644 index 0000000000..b32a662599 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_additionalproperties.py @@ -0,0 +1,313 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.property.additionalproperties import AdditionalPropertiesClient, models + + +@pytest.fixture +def client(): + with AdditionalPropertiesClient() as client: + yield client + + +def test_extends_different_spread_float(client: AdditionalPropertiesClient): + body = models.DifferentSpreadFloatDerived({"name": "abc", "prop": 43.125, "derivedProp": 43.125}) + assert client.extends_different_spread_float.get() == body + client.extends_different_spread_float.put(body) + + +def test_extends_different_spread_model(client: AdditionalPropertiesClient): + body = models.DifferentSpreadModelDerived( + {"knownProp": "abc", "prop": {"state": "ok"}, "derivedProp": {"state": "ok"}} + ) + assert client.extends_different_spread_model.get() == body + client.extends_different_spread_model.put(body) + + +def test_extends_different_spread_model_array(client: AdditionalPropertiesClient): + body = models.DifferentSpreadModelArrayDerived( + { + "knownProp": "abc", + "prop": [{"state": "ok"}, {"state": "ok"}], + "derivedProp": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert client.extends_different_spread_model_array.get() == body + client.extends_different_spread_model_array.put(body) + + +def test_extends_different_spread_string(client: AdditionalPropertiesClient): + body = models.DifferentSpreadStringDerived({"id": 43.125, "prop": "abc", "derivedProp": "abc"}) + assert client.extends_different_spread_string.get() == body + client.extends_different_spread_string.put(body) + + +def test_extends_float(client: AdditionalPropertiesClient): + body = models.ExtendsFloatAdditionalProperties({"id": 43.125, "prop": 43.125}) + assert client.extends_float.get() == body + client.extends_float.put(body) + + +def test_extends_model(client: AdditionalPropertiesClient): + body = models.ExtendsModelAdditionalProperties({"knownProp": {"state": "ok"}, "prop": {"state": "ok"}}) + assert client.extends_model.get() == body + client.extends_model.put(body) + + +def test_extends_model_array(client: AdditionalPropertiesClient): + body = models.ExtendsModelArrayAdditionalProperties( + { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert client.extends_model_array.get() == body + client.extends_model_array.put(body) + + +def test_extends_string(client: AdditionalPropertiesClient): + body = models.ExtendsStringAdditionalProperties({"name": "ExtendsStringAdditionalProperties", "prop": "abc"}) + assert client.extends_string.get() == body + client.extends_string.put(body) + + +def test_extends_unknown(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalProperties( + { + "name": "ExtendsUnknownAdditionalProperties", + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.extends_unknown.get() == body + client.extends_unknown.put(body) + + +def test_extends_unknown_derived(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalPropertiesDerived( + { + "name": "ExtendsUnknownAdditionalProperties", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.extends_unknown_derived.get() == body + client.extends_unknown_derived.put(body) + + +def test_extends_unknown_discriminated(client: AdditionalPropertiesClient): + body = models.ExtendsUnknownAdditionalPropertiesDiscriminatedDerived( + { + "kind": "derived", + "name": "Derived", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.extends_unknown_discriminated.get() == body + client.extends_unknown_discriminated.put(body) + + +def test_is_float(client: AdditionalPropertiesClient): + body = models.IsFloatAdditionalProperties({"id": 43.125, "prop": 43.125}) + assert client.is_float.get() == body + client.is_float.put(body) + + +def test_is_model(client: AdditionalPropertiesClient): + body = models.IsModelAdditionalProperties({"knownProp": {"state": "ok"}, "prop": {"state": "ok"}}) + assert client.is_model.get() == body + client.is_model.put(body) + + +def test_is_model_array(client: AdditionalPropertiesClient): + body = models.IsModelArrayAdditionalProperties( + { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + ) + assert client.is_model_array.get() == body + client.is_model_array.put(body) + + +def test_is_string(client: AdditionalPropertiesClient): + body = models.IsStringAdditionalProperties({"name": "IsStringAdditionalProperties", "prop": "abc"}) + assert client.is_string.get() == body + client.is_string.put(body) + + +def test_is_unknown(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalProperties( + { + "name": "IsUnknownAdditionalProperties", + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.is_unknown.get() == body + client.is_unknown.put(body) + + +def test_is_unknown_derived(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalPropertiesDerived( + { + "name": "IsUnknownAdditionalProperties", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.is_unknown_derived.get() == body + client.is_unknown_derived.put(body) + + +def test_is_unknown_discriminated(client: AdditionalPropertiesClient): + body = models.IsUnknownAdditionalPropertiesDiscriminatedDerived( + { + "kind": "derived", + "name": "Derived", + "index": 314, + "age": 2.71875, + "prop1": 32, + "prop2": True, + "prop3": "abc", + } + ) + assert client.is_unknown_discriminated.get() == body + client.is_unknown_discriminated.put(body) + + +def test_multiple_spread(client: AdditionalPropertiesClient): + body = {"flag": True, "prop1": "abc", "prop2": 43.125} + assert client.multiple_spread.get() == body + client.multiple_spread.put(body) + + +def test_spread_different_float(client: AdditionalPropertiesClient): + body = {"name": "abc", "prop": 43.125} + assert client.spread_different_float.get() == body + client.spread_different_float.put(body) + + +def test_spread_different_model(client: AdditionalPropertiesClient): + body = {"knownProp": "abc", "prop": {"state": "ok"}} + assert client.spread_different_model.get() == body + client.spread_different_model.put(body) + + +def test_spread_different_model_array(client: AdditionalPropertiesClient): + body = {"knownProp": "abc", "prop": [{"state": "ok"}, {"state": "ok"}]} + assert client.spread_different_model_array.get() == body + client.spread_different_model_array.put(body) + + +def test_spread_different_string(client: AdditionalPropertiesClient): + body = {"id": 43.125, "prop": "abc"} + assert client.spread_different_string.get() == body + client.spread_different_string.put(body) + + +def test_spread_model(client: AdditionalPropertiesClient): + body = {"knownProp": {"state": "ok"}, "prop": {"state": "ok"}} + assert client.spread_model.get() == body + client.spread_model.put(body) + + +def test_spread_model_array(client: AdditionalPropertiesClient): + body = { + "knownProp": [{"state": "ok"}, {"state": "ok"}], + "prop": [{"state": "ok"}, {"state": "ok"}], + } + assert client.spread_model_array.get() == body + client.spread_model_array.put(body) + + +def test_spread_record_discriminated_union(client: AdditionalPropertiesClient): + body = { + "name": "abc", + "prop1": {"fooProp": "abc", "kind": "kind0"}, + "prop2": { + "end": "2021-01-02T00:00:00Z", + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + }, + } + assert client.spread_record_discriminated_union.get() == body + client.spread_record_discriminated_union.put(body) + + +def test_spread_record_non_discriminated_union(client: AdditionalPropertiesClient): + body = { + "name": "abc", + "prop1": {"kind": "kind0", "fooProp": "abc"}, + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert client.spread_record_non_discriminated_union.get() == body + client.spread_record_non_discriminated_union.put(body) + + +def test_spread_record_non_discriminated_union2(client: AdditionalPropertiesClient): + body = { + "name": "abc", + "prop1": {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert client.spread_record_non_discriminated_union2.get() == body + client.spread_record_non_discriminated_union2.put(body) + + +def test_spread_record_non_discriminated_union3(client: AdditionalPropertiesClient): + body = { + "name": "abc", + "prop1": [ + {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + {"kind": "kind1", "start": "2021-01-01T00:00:00Z"}, + ], + "prop2": { + "kind": "kind1", + "start": "2021-01-01T00:00:00Z", + "end": "2021-01-02T00:00:00Z", + }, + } + assert client.spread_record_non_discriminated_union3.get() == body + client.spread_record_non_discriminated_union3.put(body) + + +def test_spread_record_union(client: AdditionalPropertiesClient): + body = {"flag": True, "prop1": "abc", "prop2": 43.125} + assert client.spread_record_union.get() == body + client.spread_record_union.put(body) + + +def test_spread_string(client: AdditionalPropertiesClient): + body = {"name": "SpreadSpringRecord", "prop": "abc"} + assert client.spread_string.get() == body + client.spread_string.put(body) + + +def test_spread_float(client: AdditionalPropertiesClient): + body = {"id": 43.125, "prop": 43.125} + assert client.spread_float.get() == body + client.spread_float.put(body) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_nullable.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_nullable.py new file mode 100644 index 0000000000..c69f89d488 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_nullable.py @@ -0,0 +1,102 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import json +import pytest +from typetest.property.nullable import NullableClient, models +from typetest.property.nullable._model_base import ( # pylint: disable=protected-access + SdkJSONEncoder, +) + +try: + from corehttp.serialization import NULL +except ImportError: + from azure.core.serialization import NULL + + +@pytest.fixture +def client(): + with NullableClient() as client: + yield client + + +def test_bytes(client: NullableClient): + non_null_model = models.BytesProperty(required_property="foo", nullable_property="aGVsbG8sIHdvcmxkIQ==") + non_model = models.BytesProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.bytes.get_non_null() == non_null_model + assert client.bytes.get_null()["nullableProperty"] is None + client.bytes.patch_non_null(body=non_null_model) + client.bytes.patch_null(body=non_model) + + +def test_collections_byte(client: NullableClient): + non_null_model = models.CollectionsByteProperty( + required_property="foo", + nullable_property=["aGVsbG8sIHdvcmxkIQ==", "aGVsbG8sIHdvcmxkIQ=="], + ) + non_model = models.CollectionsByteProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.collections_byte.get_non_null() == non_null_model + assert client.collections_byte.get_null()["nullableProperty"] is None + client.collections_byte.patch_non_null(body=non_null_model) + client.collections_byte.patch_null(body=non_model) + + +def test_collections_model(client: NullableClient): + non_null_model = models.CollectionsModelProperty( + required_property="foo", + nullable_property=[ + models.InnerModel(property="hello"), + models.InnerModel(property="world"), + ], + ) + non_model = models.CollectionsModelProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.collections_model.get_non_null() == non_null_model + assert client.collections_model.get_null()["nullableProperty"] is None + client.collections_model.patch_non_null(body=non_null_model) + client.collections_model.patch_null(body=non_model) + + +def test_collections_string(client: NullableClient): + non_null_model = models.CollectionsStringProperty(required_property="foo", nullable_property=["hello", "world"]) + non_model = models.CollectionsStringProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.collections_string.get_non_null() == non_null_model + assert client.collections_string.get_null()["nullableProperty"] is None + client.collections_string.patch_non_null(body=non_null_model) + client.collections_string.patch_null(body=non_model) + + +def test_datetime(client: NullableClient): + non_null_model = models.DatetimeProperty(required_property="foo", nullable_property="2022-08-26T18:38:00Z") + non_model = models.DatetimeProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.datetime.get_non_null() == non_null_model + assert client.datetime.get_null()["nullableProperty"] is None + client.datetime.patch_non_null(body=non_null_model) + client.datetime.patch_null(body=non_model) + + +def test_duration(client: NullableClient): + non_null_model = models.DurationProperty(required_property="foo", nullable_property="P123DT22H14M12.011S") + non_model = models.DurationProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.duration.get_non_null() == non_null_model + assert client.duration.get_null()["nullableProperty"] is None + client.duration.patch_non_null(body=non_null_model) + client.duration.patch_null(body=non_model) + + +def test_string(client: NullableClient): + non_null_model = models.StringProperty(required_property="foo", nullable_property="hello") + non_model = models.StringProperty(required_property="foo", nullable_property=NULL) + assert '{"requiredProperty": "foo", "nullableProperty": null}' == json.dumps(non_model, cls=SdkJSONEncoder) + assert client.string.get_non_null() == non_null_model + assert client.string.get_null()["nullableProperty"] is None + client.string.patch_non_null(body=non_null_model) + client.string.patch_null(body=non_model) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_optional.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_optional.py new file mode 100644 index 0000000000..84836538d0 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_optional.py @@ -0,0 +1,174 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Any +import pytest +from typetest.property.optional import OptionalClient, models + + +@pytest.fixture +def client(): + with OptionalClient() as client: + yield client + + +def test_boolean_literal(client): + body = models.BooleanLiteralProperty(property=True) + assert client.boolean_literal.get_all() == body + assert client.boolean_literal.get_default() == models.BooleanLiteralProperty() + client.boolean_literal.put_all(body) + client.boolean_literal.put_default(models.BooleanLiteralProperty()) + + +def test_bytes(client): + body = models.BytesProperty(property="aGVsbG8sIHdvcmxkIQ==") + assert client.bytes.get_all() == body + assert client.bytes.get_default() == models.BytesProperty() + client.bytes.put_all(body) + client.bytes.put_default(models.BytesProperty()) + + +def test_collections_byte(client): + body = models.CollectionsByteProperty(property=["aGVsbG8sIHdvcmxkIQ==", "aGVsbG8sIHdvcmxkIQ=="]) + assert client.collections_byte.get_all() == body + assert client.collections_byte.get_default() == models.CollectionsByteProperty() + client.collections_byte.put_all(body) + client.collections_byte.put_default(models.CollectionsByteProperty()) + + +def test_collections_model(client): + body = models.CollectionsModelProperty( + property=[ + models.StringProperty(property="hello"), + models.StringProperty(property="world"), + ] + ) + assert client.collections_model.get_all() == body + assert client.collections_model.get_default() == models.CollectionsModelProperty() + client.collections_model.put_all(body) + client.collections_model.put_default(models.CollectionsModelProperty()) + + +def test_datetime(client): + body = models.DatetimeProperty(property="2022-08-26T18:38:00Z") + assert client.datetime.get_all() == body + assert client.datetime.get_default() == models.DatetimeProperty() + client.datetime.put_all(body) + client.datetime.put_default(models.DatetimeProperty()) + + +def test_duration(client): + body = models.DurationProperty(property="P123DT22H14M12.011S") + assert client.duration.get_all() == body + assert client.duration.get_default() == models.DurationProperty() + client.duration.put_all(body) + client.duration.put_default(models.DurationProperty()) + + +def test_float_literal(client): + body = models.FloatLiteralProperty(property=1.25) + assert client.float_literal.get_all() == body + assert client.float_literal.get_default() == models.FloatLiteralProperty() + client.float_literal.put_all(body) + client.float_literal.put_default(models.FloatLiteralProperty()) + + +def test_int_literal(client): + body = models.IntLiteralProperty(property=1) + assert client.int_literal.get_all() == body + assert client.int_literal.get_default() == models.IntLiteralProperty() + client.int_literal.put_all(body) + client.int_literal.put_default(models.IntLiteralProperty()) + + +def test_plaindate_get_all(client): + body = models.PlainDateProperty(property="2022-12-12") + assert client.plain_date.get_all() == body + + +def test_plaindate_get_default(client): + assert client.plain_date.get_default() == models.PlainDateProperty() + + +def test_plaindate_put_all(client): + body = models.PlainDateProperty(property="2022-12-12") + client.plain_date.put_all(body) + + +def test_plaindate_put_default(client): + client.plain_date.put_default(models.PlainDateProperty()) + + +def test_plaintime_get_all(client): + body = models.PlainTimeProperty(property="13:06:12") + assert client.plain_time.get_all() == body + + +def test_plaintime_get_default(client): + assert client.plain_time.get_default() == models.PlainTimeProperty() + + +def test_plaintime_put_all(client): + body = models.PlainTimeProperty(property="13:06:12") + client.plain_time.put_all(body) + + +def test_plaintime_put_default(client): + client.plain_time.put_default(models.PlainTimeProperty()) + + +def test_required_and_optional(client): + all_body = { + "optionalProperty": "hello", + "requiredProperty": 42, + } + required_only_body = { + "requiredProperty": 42, + } + assert client.required_and_optional.get_all() == all_body + assert client.required_and_optional.get_required_only() == required_only_body + client.required_and_optional.put_all(all_body) + client.required_and_optional.put_required_only(required_only_body) + + +def test_string(client): + body = models.StringProperty(property="hello") + assert client.string.get_all() == body + assert client.string.get_default() == models.StringProperty() + client.string.put_all(body) + client.string.put_default(models.StringProperty()) + + +def test_string_literal(client): + body = models.StringLiteralProperty(property="hello") + assert client.string_literal.get_all() == body + assert client.string_literal.get_default() == models.StringLiteralProperty() + client.string_literal.put_all(body) + client.string_literal.put_default(models.StringLiteralProperty()) + + +def test_union_float_literal(client): + body = models.UnionFloatLiteralProperty(property=2.375) + assert client.union_float_literal.get_all() == body + assert client.union_float_literal.get_default() == models.UnionFloatLiteralProperty() + client.union_float_literal.put_all(body) + client.union_float_literal.put_default(models.UnionFloatLiteralProperty()) + + +def test_union_int_literal(client): + body = models.UnionIntLiteralProperty(property=2) + assert client.union_int_literal.get_all() == body + assert client.union_int_literal.get_default() == models.UnionIntLiteralProperty() + client.union_int_literal.put_all(body) + client.union_int_literal.put_default(models.UnionIntLiteralProperty()) + + +def test_union_string_literal(client): + body = models.UnionStringLiteralProperty(property="world") + assert client.union_string_literal.get_all() == body + assert client.union_string_literal.get_default() == models.UnionStringLiteralProperty() + client.union_string_literal.put_all(body) + client.union_string_literal.put_default(models.UnionStringLiteralProperty()) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_valuetypes.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_valuetypes.py new file mode 100644 index 0000000000..faabc4c707 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_property_valuetypes.py @@ -0,0 +1,286 @@ +# cspell: ignore Hdvcmxk +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from calendar import c +import decimal + +import pytest +import datetime +from typetest.property.valuetypes import ValueTypesClient, models + + +@pytest.fixture +def client(): + with ValueTypesClient() as client: + yield client + + +def test_boolean(client: ValueTypesClient): + body = models.BooleanProperty(property=True) + assert body.property == body["property"] + client.boolean.put(body) + + resp = client.boolean.get() + assert resp.property == resp["property"] == True + + +def test_boolean_literal(client: ValueTypesClient): + body = models.BooleanLiteralProperty(property=True) + assert body.property == body["property"] + client.boolean_literal.put(body) + + resp = client.boolean_literal.get() + assert resp.property == resp["property"] == True + + +def test_bytes(client: ValueTypesClient): + body = models.BytesProperty(property=b"hello, world!") + assert body.property == b"hello, world!" + assert body["property"] == "aGVsbG8sIHdvcmxkIQ==" + client.bytes.put(body) + + resp = client.bytes.get() + assert resp.property == b"hello, world!" + assert resp["property"] == "aGVsbG8sIHdvcmxkIQ==" + + +def test_collections_int(client: ValueTypesClient): + body = models.CollectionsIntProperty(property=[1, 2]) + assert body.property == body["property"] + client.collections_int.put(body) + + resp = client.collections_int.get() + assert resp.property == resp["property"] == [1, 2] + + +def test_collections_model(client: ValueTypesClient): + body = models.CollectionsModelProperty(property=[{"property": "hello"}, {"property": "world"}]) + assert body.property[0].property == body["property"][0]["property"] + client.collections_model.put(body) + + resp = client.collections_model.get() + assert resp.property[1].property == resp["property"][1]["property"] + + +def test_collections_string(client: ValueTypesClient): + body = models.CollectionsStringProperty(property=["hello", "world"]) + assert body.property == body["property"] + client.collections_string.put(body) + + resp = client.collections_string.get() + assert resp.property == resp["property"] == ["hello", "world"] + + +def test_datetime(client): + received_body = client.datetime.get() + assert received_body == {"property": "2022-08-26T18:38:00Z"} + assert received_body.property.year == 2022 + assert received_body.property.month == 8 + assert received_body.property.day == 26 + assert received_body.property.hour == 18 + assert received_body.property.minute == 38 + + client.datetime.put(models.DatetimeProperty(property=datetime.datetime(2022, 8, 26, hour=18, minute=38))) + + +def test_decimal(client: ValueTypesClient): + body = models.DecimalProperty(property=decimal.Decimal("0.33333")) + assert body.property == decimal.Decimal("0.33333") + assert body["property"] == 0.33333 + client.decimal.put(body) + + resp = client.decimal.get() + assert resp.property == decimal.Decimal("0.33333") + assert resp["property"] == 0.33333 + + +def test_decimal128(client: ValueTypesClient): + body = models.Decimal128Property(property=decimal.Decimal("0.33333")) + assert body.property == decimal.Decimal("0.33333") + assert body["property"] == 0.33333 + client.decimal128.put(body) + + resp = client.decimal128.get() + assert resp.property == decimal.Decimal("0.33333") + assert resp["property"] == 0.33333 + + +def test_dictionary_string(client: ValueTypesClient): + body = models.DictionaryStringProperty(property={"k1": "hello", "k2": "world"}) + assert body.property == body["property"] + client.dictionary_string.put(body) + + resp = client.dictionary_string.get() + assert resp.property == resp["property"] == {"k1": "hello", "k2": "world"} + + +def test_duration(client: ValueTypesClient): + body = models.DurationProperty(property="P123DT22H14M12.011S") + assert body.property == datetime.timedelta(days=123, seconds=80052, microseconds=11000) + assert body["property"] == "P123DT22H14M12.011S" + client.duration.put(body) + + resp = client.duration.get() + assert resp.property == datetime.timedelta(days=123, seconds=80052, microseconds=11000) + assert resp["property"] == "P123DT22H14M12.011S" + + +def test_enum(client: ValueTypesClient): + body = models.EnumProperty(property=models.InnerEnum.VALUE_ONE) + assert body.property == body["property"] + client.enum.put(body) + + resp = client.enum.get() + assert resp.property == resp["property"] == "ValueOne" + + +def test_extensible_enum(client: ValueTypesClient): + body = models.ExtensibleEnumProperty(property="UnknownValue") + assert body.property == body["property"] + client.extensible_enum.put(body) + + resp = client.extensible_enum.get() + assert resp.property == resp["property"] == "UnknownValue" + + +def test_float(client: ValueTypesClient): + body = models.FloatProperty(property=43.125) + assert body.property == body["property"] + client.float.put(body) + + resp = client.float.get() + assert resp.property == resp["property"] == 43.125 + + +def test_float_literal(client: ValueTypesClient): + body = models.FloatLiteralProperty(property=43.125) + assert body.property == body["property"] + client.float_literal.put(body) + + resp = client.float_literal.get() + assert resp.property == resp["property"] == 43.125 + + +def test_int(client: ValueTypesClient): + body = models.IntProperty(property=42) + assert body.property == body["property"] + client.int_operations.put(body) + + resp = client.int_operations.get() + assert resp.property == resp["property"] == 42 + + +def test_int_literal(client: ValueTypesClient): + body = models.IntLiteralProperty(property=42) + assert body.property == body["property"] + client.int_literal.put(body) + + resp = client.int_literal.get() + assert resp.property == resp["property"] == 42 + + +def test_model(client: ValueTypesClient): + body = models.ModelProperty(property={"property": "hello"}) + assert body.property.property == body["property"]["property"] + client.model.put(body) + + resp = client.model.get() + assert resp.property.property == resp["property"]["property"] + + +def test_never(client: ValueTypesClient): + assert client.never.get() == models.NeverProperty() + client.never.put(models.NeverProperty()) + + +def test_string(client: ValueTypesClient): + body = models.StringProperty(property="hello") + assert body.property == body["property"] + client.string.put(body) + + resp = client.string.get() + assert resp.property == resp["property"] == "hello" + + +def test_string_literal(client: ValueTypesClient): + body = models.StringLiteralProperty(property="hello") + assert body.property == body["property"] + client.string_literal.put(body) + + resp = client.string_literal.get() + assert resp.property == resp["property"] == "hello" + + +def test_union_enum_value(client: ValueTypesClient): + body = models.UnionEnumValueProperty(property=models.ExtendedEnum.ENUM_VALUE2) + assert body.property == body["property"] + client.union_enum_value.put(body) + + resp = client.union_enum_value.get() + assert resp.property == resp["property"] == "value2" + + +def test_union_float_literal(client: ValueTypesClient): + body = models.UnionFloatLiteralProperty(property=46.875) + assert body.property == body["property"] + client.union_float_literal.put(body) + + resp = client.union_float_literal.get() + assert resp.property == resp["property"] == 46.875 + + +def test_union_int_literal(client: ValueTypesClient): + body = models.UnionIntLiteralProperty(property=42) + assert body.property == body["property"] + client.union_int_literal.put(body) + + resp = client.union_int_literal.get() + assert resp.property == resp["property"] == 42 + + +def test_union_string_literal(client: ValueTypesClient): + body = models.UnionStringLiteralProperty(property="world") + assert body.property == body["property"] + client.union_string_literal.put(body) + + resp = client.union_string_literal.get() + assert resp.property == resp["property"] == "world" + + +def test_unknown_array(client: ValueTypesClient): + body = models.UnknownArrayProperty(property=["hello", "world"]) + assert body.property == body["property"] + client.unknown_array.put(body) + + resp = client.unknown_array.get() + assert resp.property == resp["property"] == ["hello", "world"] + + +def test_unknown_dict(client: ValueTypesClient): + body = models.UnknownDictProperty(property={"k1": "hello", "k2": 42}) + assert body.property == body["property"] + client.unknown_dict.put(body) + + resp = client.unknown_dict.get() + assert resp.property == resp["property"] == {"k1": "hello", "k2": 42} + + +def test_unknown_int(client: ValueTypesClient): + body = models.UnknownIntProperty(property=42) + assert body.property == body["property"] + client.unknown_int.put(body) + + resp = client.unknown_int.get() + assert resp.property == resp["property"] == 42 + + +def test_unknown_string(client: ValueTypesClient): + body = models.UnknownStringProperty(property="hello") + assert body.property == body["property"] + client.unknown_string.put(body) + + resp = client.unknown_string.get() + assert resp.property == resp["property"] == "hello" diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_scalar.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_scalar.py new file mode 100644 index 0000000000..32a1583ac1 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_scalar.py @@ -0,0 +1,53 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import decimal +from functools import reduce + +import pytest +from typetest.scalar import ScalarClient + + +@pytest.fixture +def client(): + with ScalarClient() as client: + yield client + + +def test_scalar_string(client: ScalarClient): + assert client.string.get() == "test" + client.string.put("test") + + +def test_scalar_boolean(client: ScalarClient): + assert client.boolean.get() == True + client.boolean.put(True) + + +def test_scalar_unknown(client: ScalarClient): + assert client.unknown.get() == "test" + client.unknown.put("test") + + +def test_decimal128_type(client: ScalarClient): + assert client.decimal128_type.response_body() == decimal.Decimal("0.33333") + client.decimal128_type.request_body(decimal.Decimal("0.33333")) + client.decimal128_type.request_parameter(value=decimal.Decimal("0.33333")) + + +def test_decimal_type(client: ScalarClient): + assert client.decimal_type.response_body() == decimal.Decimal("0.33333") + client.decimal_type.request_body(decimal.Decimal("0.33333")) + client.decimal_type.request_parameter(value=decimal.Decimal("0.33333")) + + +def test_decimal128_verify(client: ScalarClient): + prepare = client.decimal128_verify.prepare_verify() + client.decimal128_verify.verify(reduce(lambda x, y: x + y, prepare)) + + +def test_decimal_verify(client: ScalarClient): + prepare = client.decimal_verify.prepare_verify() + client.decimal_verify.verify(reduce(lambda x, y: x + y, prepare)) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_typetest_union.py b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_union.py new file mode 100644 index 0000000000..3ca676d10a --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_typetest_union.py @@ -0,0 +1,80 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from typetest.union import UnionClient +from typetest.union import models + + +@pytest.fixture +def client(): + with UnionClient() as client: + yield client + + +def test_enums_only(client: UnionClient): + value = models.EnumsOnlyCases(lr="right", ud="up") + assert client.enums_only.get() == {"prop": value} + client.enums_only.send(prop=value) + + +def test_floats_only(client: UnionClient): + value = 2.2 + assert client.floats_only.get() == {"prop": value} + client.floats_only.send(prop=value) + + +def test_ints_only(client: UnionClient): + value = 2 + assert client.ints_only.get() == {"prop": value} + client.ints_only.send(prop=value) + + +def test_mixed_literals(client: UnionClient): + value = models.MixedLiteralsCases(string_literal="a", int_literal=2, float_literal=3.3, boolean_literal=True) + assert client.mixed_literals.get() == {"prop": value} + client.mixed_literals.send(prop=value) + + +def test_mixed_types(client: UnionClient): + value = models.MixedTypesCases( + model=models.Cat(name="test"), + literal="a", + int_property=2, + boolean=True, + array=[models.Cat(name="test"), "a", 2, True], + ) + assert client.mixed_types.get() == {"prop": value} + client.mixed_types.send(prop=value) + + +def test_models_only(client: UnionClient): + value = models.Cat(name="test") + assert client.models_only.get() == {"prop": value} + client.models_only.send(prop=value) + + +def test_string_and_array(client: UnionClient): + value = models.StringAndArrayCases(string="test", array=["test1", "test2"]) + assert client.string_and_array.get() == {"prop": value} + client.string_and_array.send(prop=value) + + +def test_string_extensible(client: UnionClient): + value = "custom" + assert client.string_extensible.get() == {"prop": value} + client.string_extensible.send(prop=value) + + +def test_string_extensible_named(client: UnionClient): + value = "custom" + assert client.string_extensible_named.get() == {"prop": value} + client.string_extensible_named.send(prop=value) + + +def test_strings_only(client: UnionClient): + value = "b" + assert client.strings_only.get() == {"prop": value} + client.strings_only.send(prop=value) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_added.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_added.py new file mode 100644 index 0000000000..c51b159cba --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_added.py @@ -0,0 +1,33 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.added import AddedClient +from versioning.added.models import ModelV1, ModelV2, EnumV1, EnumV2 + + +@pytest.fixture +def client(): + with AddedClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test_v1(client: AddedClient): + assert client.v1( + ModelV1(prop="foo", enum_prop=EnumV1.ENUM_MEMBER_V2, union_prop=10), + header_v2="bar", + ) == ModelV1(prop="foo", enum_prop=EnumV1.ENUM_MEMBER_V2, union_prop=10) + + +def test_v2(client: AddedClient): + assert client.v2(ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar")) == ModelV2( + prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar" + ) + + +def test_interface_v2(client: AddedClient): + assert client.interface_v2.v2_in_interface( + ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar") + ) == ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER, union_prop="bar") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_made_optional.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_made_optional.py new file mode 100644 index 0000000000..52099a04b7 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_made_optional.py @@ -0,0 +1,20 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.madeoptional import MadeOptionalClient +from versioning.madeoptional.models import TestModel + + +@pytest.fixture +def client(): + with MadeOptionalClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test(client: MadeOptionalClient): + assert client.test( + TestModel(prop="foo"), + ) == TestModel(prop="foo") diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_removed.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_removed.py new file mode 100644 index 0000000000..c29ed0ac25 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_removed.py @@ -0,0 +1,20 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.removed import RemovedClient +from versioning.removed.models import ModelV2, EnumV2 + + +@pytest.fixture +def client(): + with RemovedClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test_v2(client: RemovedClient): + assert client.v2(ModelV2(prop="foo", enum_prop=EnumV2.ENUM_MEMBER_V2, union_prop="bar")) == ModelV2( + prop="foo", enum_prop=EnumV2.ENUM_MEMBER_V2, union_prop="bar" + ) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_renamed_from.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_renamed_from.py new file mode 100644 index 0000000000..d81f258713 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_renamed_from.py @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.renamedfrom import RenamedFromClient +from versioning.renamedfrom.models import NewModel, NewEnum + + +@pytest.fixture +def client(): + with RenamedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test_new_op(client: RenamedFromClient): + assert client.new_op( + NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10), + new_query="bar", + ) == NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) + + +def test_new_interface_test(client: RenamedFromClient): + assert client.new_interface.new_op_in_new_interface( + NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) + ) == NewModel(new_prop="foo", enum_prop=NewEnum.NEW_ENUM_MEMBER, union_prop=10) diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_return_type_changed_from.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_return_type_changed_from.py new file mode 100644 index 0000000000..92decc7204 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_return_type_changed_from.py @@ -0,0 +1,17 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.returntypechangedfrom import ReturnTypeChangedFromClient + + +@pytest.fixture +def client(): + with ReturnTypeChangedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test(client: ReturnTypeChangedFromClient): + assert client.test("test") == "test" diff --git a/packages/http-client-python/test/generic_mock_api_tests/test_versioning_type_changed_from.py b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_type_changed_from.py new file mode 100644 index 0000000000..e10742a815 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/test_versioning_type_changed_from.py @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import pytest +from versioning.typechangedfrom import TypeChangedFromClient +from versioning.typechangedfrom.models import TestModel + + +@pytest.fixture +def client(): + with TypeChangedFromClient(endpoint="http://localhost:3000", version="v2") as client: + yield client + + +def test(client: TypeChangedFromClient): + assert client.test( + TestModel(prop="foo", changed_prop="bar"), + param="baz", + ) == TestModel(prop="foo", changed_prop="bar") diff --git a/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_serialization.py b/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_serialization.py new file mode 100644 index 0000000000..b13e0b52dd --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_serialization.py @@ -0,0 +1,4067 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import copy +import decimal +import json +import datetime +from pathlib import Path +from typing import ( + Any, + Iterable, + List, + Literal, + Dict, + Mapping, + Sequence, + Set, + Tuple, + Optional, + overload, + Union, +) +import pytest +import isodate +import sys +from enum import Enum + +from specialwords._model_base import ( + SdkJSONEncoder, + Model, + rest_field, + _is_model, + rest_discriminator, + _deserialize, +) + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object + + +class BasicResource(Model): + platform_update_domain_count: int = rest_field( + name="platformUpdateDomainCount" + ) # How many times the platform update domain has been counted + platform_fault_domain_count: int = rest_field( + name="platformFaultDomainCount" + ) # How many times the platform fault domain has been counted + virtual_machines: List[Any] = rest_field(name="virtualMachines") # List of virtual machines + + @overload + def __init__( + self, + *, + platform_update_domain_count: int, + platform_fault_domain_count: int, + virtual_machines: List[Any], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Pet(Model): + name: str = rest_field() # my name + species: str = rest_field() # my species + + @overload + def __init__(self, *, name: str, species: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_model_and_dict_equal(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + } + model = BasicResource( + platform_update_domain_count=5, + platform_fault_domain_count=3, + virtual_machines=[], + ) + + assert model == dict_response + assert ( + model.platform_update_domain_count + == model["platformUpdateDomainCount"] + == dict_response["platformUpdateDomainCount"] + == 5 + ) + assert ( + model.platform_fault_domain_count + == model["platformFaultDomainCount"] + == dict_response["platformFaultDomainCount"] + == 3 + ) + assert model.virtual_machines == model["virtualMachines"] == dict_response["virtualMachines"] + + +def test_json_roundtrip(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + } + model = BasicResource( + platform_update_domain_count=5, + platform_fault_domain_count=3, + virtual_machines=[], + ) + with pytest.raises(TypeError): + json.dumps(model) + assert ( + json.dumps(dict(model)) + == '{"platformUpdateDomainCount": 5, "platformFaultDomainCount": 3, "virtualMachines": []}' + ) + assert json.loads(json.dumps(dict(model))) == model == dict_response + + +def test_has_no_property(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + "noProp": "bonjour!", + } + model = BasicResource(dict_response) + assert ( + model.platform_update_domain_count + == model["platformUpdateDomainCount"] + == dict_response["platformUpdateDomainCount"] + == 5 + ) + assert not hasattr(model, "no_prop") + with pytest.raises(AttributeError) as ex: + model.no_prop + + assert str(ex.value) == "'BasicResource' object has no attribute 'no_prop'" + assert model["noProp"] == dict_response["noProp"] == "bonjour!" + + # let's add it to model now + + class BasicResourceWithProperty(BasicResource): + no_prop: str = rest_field(name="noProp") + + model = BasicResourceWithProperty( + platform_update_domain_count=5, + platform_fault_domain_count=3, + virtual_machines=[], + no_prop="bonjour!", + ) + assert model.no_prop == model["noProp"] == dict_response["noProp"] == "bonjour!" + + +def test_original_and_attr_name_same(): + class MyModel(Model): + hello: str = rest_field() + + @overload + def __init__(self, *, hello: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + dict_response = {"hello": "nihao"} + model = MyModel(hello="nihao") + assert model.hello == model["hello"] == dict_response["hello"] + + +class OptionalModel(Model): + optional_str: Optional[str] = rest_field() + optional_time: Optional[datetime.time] = rest_field() + optional_dict: Optional[Dict[str, Optional[Pet]]] = rest_field(name="optionalDict") + optional_model: Optional[Pet] = rest_field() + optional_myself: Optional["OptionalModel"] = rest_field() + + @overload + def __init__( + self, + *, + optional_str: Optional[str] = None, + optional_time: Optional[datetime.time] = None, + optional_dict: Optional[Dict[str, Optional[Pet]]] = None, + optional_myself: Optional["OptionalModel"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_optional_property(): + dict_response = { + "optional_str": "hello!", + "optional_time": None, + "optionalDict": { + "Eugene": { + "name": "Eugene", + "species": "Dog", + }, + "Lady": None, + }, + "optional_model": None, + "optional_myself": { + "optional_str": None, + "optional_time": "11:34:56", + "optionalDict": None, + "optional_model": {"name": "Lady", "species": "Newt"}, + "optional_myself": None, + }, + } + + model = OptionalModel(dict_response) + assert model.optional_str == model["optional_str"] == "hello!" + assert model.optional_time == model["optional_time"] == None + assert ( + model.optional_dict + == model["optionalDict"] + == { + "Eugene": { + "name": "Eugene", + "species": "Dog", + }, + "Lady": None, + } + ) + assert model.optional_dict + assert model.optional_dict["Eugene"].name == model.optional_dict["Eugene"]["name"] == "Eugene" + assert model.optional_dict["Lady"] is None + + assert ( + model.optional_myself + == model["optional_myself"] + == { + "optional_str": None, + "optional_time": "11:34:56", + "optionalDict": None, + "optional_model": {"name": "Lady", "species": "Newt"}, + "optional_myself": None, + } + ) + assert model.optional_myself + assert model.optional_myself.optional_str is None + assert model.optional_myself.optional_time == datetime.time(11, 34, 56) + assert model.optional_myself.optional_dict is None + assert model.optional_myself.optional_model + assert model.optional_myself.optional_model.name == "Lady" + assert model.optional_myself.optional_model.species == "Newt" + assert model.optional_myself.optional_myself is None + + +def test_model_pass_in_none(): + model = OptionalModel(optional_str=None) + assert model.optional_str == None + with pytest.raises(KeyError): + model["optionalStr"] + + +def test_modify_dict(): + model = BasicResource( + platform_update_domain_count=5, + platform_fault_domain_count=3, + virtual_machines=[], + ) + + # now let's modify the model as a dict + model["platformUpdateDomainCount"] = 100 + assert model.platform_update_domain_count == model["platformUpdateDomainCount"] == 100 + + +def test_modify_property(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + } + model = BasicResource( + platform_update_domain_count=5, + platform_fault_domain_count=3, + virtual_machines=[], + ) + + # now let's modify the model through it's properties + model.platform_fault_domain_count = 2000 + model["platformFaultDomainCount"] + assert model.platform_fault_domain_count == model["platformFaultDomainCount"] == 2000 + + +def test_property_is_a_type(): + class Fish(Model): + name: str = rest_field() + species: Literal["Salmon", "Halibut"] = rest_field() + + @overload + def __init__(self, *, name: str, species: Literal["Salmon", "Halibut"]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Fishery(Model): + fish: Fish = rest_field() + + @overload + def __init__(self, *, fish: Fish): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + fishery = Fishery({"fish": {"name": "Benjamin", "species": "Salmon"}}) + assert isinstance(fishery.fish, Fish) + assert fishery.fish.name == fishery.fish["name"] == fishery["fish"]["name"] == "Benjamin" + assert fishery.fish.species == fishery.fish["species"] == fishery["fish"]["species"] == "Salmon" + + +def test_datetime_deserialization(): + class DatetimeModel(Model): + datetime_value: datetime.datetime = rest_field(name="datetimeValue") + + @overload + def __init__(self, *, datetime_value: datetime.datetime): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "9999-12-31T23:59:59.999Z" + val = isodate.parse_datetime(val_str) + model = DatetimeModel({"datetimeValue": val_str}) + assert model["datetimeValue"] == val_str + assert model.datetime_value == val + + class BaseModel(Model): + my_prop: DatetimeModel = rest_field(name="myProp") + + model = BaseModel({"myProp": {"datetimeValue": val_str}}) + assert isinstance(model.my_prop, DatetimeModel) + model.my_prop["datetimeValue"] + assert model.my_prop["datetimeValue"] == model["myProp"]["datetimeValue"] == val_str + assert model.my_prop.datetime_value == val + + +def test_date_deserialization(): + class DateModel(Model): + date_value: datetime.date = rest_field(name="dateValue") + + @overload + def __init__(self, *, date_value: datetime.date): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "2016-02-29" + val = isodate.parse_date(val_str) + model = DateModel({"dateValue": val_str}) + assert model["dateValue"] == val_str + assert model.date_value == val + + class BaseModel(Model): + my_prop: DateModel = rest_field(name="myProp") + + model = BaseModel({"myProp": {"dateValue": val_str}}) + assert isinstance(model.my_prop, DateModel) + assert model.my_prop["dateValue"] == model["myProp"]["dateValue"] == val_str + assert model.my_prop.date_value == val + + +def test_time_deserialization(): + class TimeModel(Model): + time_value: datetime.time = rest_field(name="timeValue") + + @overload + def __init__(self, *, time_value: datetime.time): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "11:34:56" + val = datetime.time(11, 34, 56) + model = TimeModel({"timeValue": val_str}) + assert model["timeValue"] == val_str + assert model.time_value == val + + class BaseModel(Model): + my_prop: TimeModel = rest_field(name="myProp") + + model = BaseModel({"myProp": {"timeValue": val_str}}) + assert isinstance(model.my_prop, TimeModel) + assert model.my_prop["timeValue"] == model["myProp"]["timeValue"] == val_str + assert model.my_prop.time_value == val + + +class SimpleRecursiveModel(Model): + name: str = rest_field() + me: "SimpleRecursiveModel" = rest_field() + + @overload + def __init__(self, *, name: str, me: "SimpleRecursiveModel"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_model_recursion(): + dict_response = {"name": "Snoopy", "me": {"name": "Egg", "me": {"name": "Chicken"}}} + + model = SimpleRecursiveModel(dict_response) + assert model["name"] == model.name == "Snoopy" + assert model["me"] == {"name": "Egg", "me": {"name": "Chicken"}} + assert isinstance(model.me, SimpleRecursiveModel) + assert model.me["name"] == model.me.name == "Egg" + assert model.me["me"] == {"name": "Chicken"} + assert model.me.me.name == "Chicken" + + +def test_dictionary_deserialization(): + class DictionaryModel(Model): + prop: Dict[str, datetime.datetime] = rest_field() + + @overload + def __init__(self, *, prop: datetime.datetime): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "9999-12-31T23:59:59.999Z" + val = isodate.parse_datetime(val_str) + dict_response = {"prop": {"datetime": val_str}} + model = DictionaryModel(dict_response) + assert model["prop"] == {"datetime": val_str} + assert model.prop == {"datetime": val} + + +def test_attr_and_rest_case(): + class ModelTest(Model): + our_attr: str = rest_field(name="ourAttr") + + @overload + def __init__(self, *, our_attr: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + test_model = ModelTest({"ourAttr": "camel"}) + assert test_model.our_attr == test_model["ourAttr"] == "camel" + + test_model = ModelTest(ModelTest({"ourAttr": "camel"})) + assert test_model.our_attr == test_model["ourAttr"] == "camel" + + test_model = ModelTest(our_attr="snake") + assert test_model.our_attr == test_model["ourAttr"] == "snake" + + +def test_dictionary_deserialization_model(): + class DictionaryModel(Model): + prop: Dict[str, Pet] = rest_field() + + @overload + def __init__(self, *, prop: Dict[str, Pet]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + dict_response = { + "prop": { + "Eugene": { + "name": "Eugene", + "species": "Dog", + }, + "Lady": { + "name": "Lady", + "species": "Newt", + }, + } + } + + model = DictionaryModel(dict_response) + assert model["prop"] == { + "Eugene": { + "name": "Eugene", + "species": "Dog", + }, + "Lady": { + "name": "Lady", + "species": "Newt", + }, + } + assert model.prop == { + "Eugene": Pet({"name": "Eugene", "species": "Dog"}), + "Lady": Pet({"name": "Lady", "species": "Newt"}), + } + assert model.prop["Eugene"].name == model.prop["Eugene"]["name"] == "Eugene" + assert model.prop["Eugene"].species == model.prop["Eugene"]["species"] == "Dog" + assert model.prop["Lady"].name == model.prop["Lady"]["name"] == "Lady" + assert model.prop["Lady"].species == model.prop["Lady"]["species"] == "Newt" + + +def test_list_deserialization(): + class ListModel(Model): + prop: List[datetime.datetime] = rest_field() + + @overload + def __init__(self, *, prop: List[datetime.datetime]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "9999-12-31T23:59:59.999Z" + val = isodate.parse_datetime(val_str) + dict_response = {"prop": [val_str, val_str]} + model = ListModel(dict_response) + assert model["prop"] == [val_str, val_str] + assert model.prop == [val, val] + + +def test_list_deserialization_model(): + class ListModel(Model): + prop: List[Pet] = rest_field() + + @overload + def __init__(self, *, prop: List[Pet]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + dict_response = { + "prop": [ + {"name": "Eugene", "species": "Dog"}, + {"name": "Lady", "species": "Newt"}, + ] + } + model = ListModel(dict_response) + assert model["prop"] == [ + {"name": "Eugene", "species": "Dog"}, + {"name": "Lady", "species": "Newt"}, + ] + assert model.prop == [ + Pet({"name": "Eugene", "species": "Dog"}), + Pet({"name": "Lady", "species": "Newt"}), + ] + assert len(model.prop) == 2 + assert model.prop[0].name == model.prop[0]["name"] == "Eugene" + assert model.prop[0].species == model.prop[0]["species"] == "Dog" + assert model.prop[1].name == model.prop[1]["name"] == "Lady" + assert model.prop[1].species == model.prop[1]["species"] == "Newt" + + +def test_set_deserialization(): + class SetModel(Model): + prop: Set[datetime.datetime] = rest_field() + + @overload + def __init__(self, *, prop: Set[datetime.datetime]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "9999-12-31T23:59:59.999Z" + val = isodate.parse_datetime(val_str) + dict_response = {"prop": set([val_str, val_str])} + model = SetModel(dict_response) + assert model["prop"] == set([val_str, val_str]) + assert model.prop == set([val, val]) + + +def test_tuple_deserialization(): + class TupleModel(Model): + prop: Tuple[str, datetime.datetime] = rest_field() + + @overload + def __init__(self, *, prop: Tuple[str, datetime.datetime]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + val_str = "9999-12-31T23:59:59.999Z" + val = isodate.parse_datetime(val_str) + dict_response = {"prop": (val_str, val_str)} + model = TupleModel(dict_response) + assert model["prop"] == (val_str, val_str) + assert model.prop == (val_str, val) + + +def test_list_of_tuple_deserialization_model(): + class Owner(Model): + name: str = rest_field() + pet: Pet = rest_field() + + @overload + def __init__(self, *, name: str, pet: Pet): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class ListOfTupleModel(Model): + prop: List[Tuple[Pet, Owner]] = rest_field() + + @overload + def __init__(self, *, prop: List[Tuple[Pet, Owner]]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + eugene = {"name": "Eugene", "species": "Dog"} + lady = {"name": "Lady", "species": "Newt"} + giacamo = {"name": "Giacamo", "pet": eugene} + elizabeth = {"name": "Elizabeth", "pet": lady} + + dict_response: Dict[str, Any] = {"prop": [(eugene, giacamo), (lady, elizabeth)]} + model = ListOfTupleModel(dict_response) + assert ( + model["prop"] + == model.prop + == [(eugene, giacamo), (lady, elizabeth)] + == [(Pet(eugene), Owner(giacamo)), (Pet(lady), Owner(elizabeth))] + ) + assert len(model.prop[0]) == len(model["prop"][0]) == 2 + assert model.prop[0][0].name == model.prop[0][0]["name"] == "Eugene" + assert model.prop[0][0].species == model.prop[0][0]["species"] == "Dog" + assert model.prop[0][1].name == "Giacamo" + assert model.prop[0][1].pet == model.prop[0][0] + assert model.prop[0][1].pet.name == model.prop[0][1]["pet"]["name"] == "Eugene" + assert model.prop[1][0] == model.prop[1][1].pet + + +class RecursiveModel(Model): + name: str = rest_field() + list_of_me: Optional[List["RecursiveModel"]] = rest_field(name="listOfMe") + dict_of_me: Optional[Dict[str, "RecursiveModel"]] = rest_field(name="dictOfMe") + dict_of_list_of_me: Optional[Dict[str, List["RecursiveModel"]]] = rest_field(name="dictOfListOfMe") + list_of_dict_of_me: Optional[List[Dict[str, "RecursiveModel"]]] = rest_field(name="listOfDictOfMe") + + @overload + def __init__( + self, + *, + name: str, + list_of_me: Optional[List["RecursiveModel"]] = None, + dict_of_me: Optional[Dict[str, "RecursiveModel"]] = None, + dict_of_list_of_me: Optional[Dict[str, List["RecursiveModel"]]] = None, + list_of_dict_of_me: Optional[List[Dict[str, "RecursiveModel"]]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_model_recursion_complex(): + dict_response = { + "name": "it's me!", + "listOfMe": [ + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ], + "dictOfMe": { + "me": { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + }, + "dictOfListOfMe": { + "many mes": [ + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ] + }, + "listOfDictOfMe": [ + { + "me": { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + } + ], + } + + model = RecursiveModel(dict_response) + assert model.name == model["name"] == "it's me!" + assert model["listOfMe"] == [ + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ] + assert model.list_of_me == [ + RecursiveModel( + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ) + ] + assert model.list_of_me + assert model.list_of_me[0].name == "it's me!" + assert model.list_of_me[0].list_of_me is None + assert isinstance(model.list_of_me, List) + assert isinstance(model.list_of_me[0], RecursiveModel) + + assert model["dictOfMe"] == { + "me": { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + } + assert model.dict_of_me == { + "me": RecursiveModel( + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ) + } + + assert isinstance(model.dict_of_me, Dict) + assert isinstance(model.dict_of_me["me"], RecursiveModel) + + assert model["dictOfListOfMe"] == { + "many mes": [ + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ] + } + assert model.dict_of_list_of_me == { + "many mes": [ + RecursiveModel( + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ) + ] + } + assert isinstance(model.dict_of_list_of_me, Dict) + assert isinstance(model.dict_of_list_of_me["many mes"], List) + assert isinstance(model.dict_of_list_of_me["many mes"][0], RecursiveModel) + + assert model["listOfDictOfMe"] == [ + { + "me": { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + } + ] + assert model.list_of_dict_of_me == [ + { + "me": RecursiveModel( + { + "name": "it's me!", + "listOfMe": None, + "dictOfMe": None, + "dictOfListOfMe": None, + "listOfDictOfMe": None, + } + ) + } + ] + assert isinstance(model.list_of_dict_of_me, List) + assert isinstance(model.list_of_dict_of_me[0], Dict) + assert isinstance(model.list_of_dict_of_me[0]["me"], RecursiveModel) + + assert model.as_dict() == model == dict_response + + +def test_literals(): + class LiteralModel(Model): + species: Literal["Mongoose", "Eagle", "Penguin"] = rest_field() + age: Literal[1, 2, 3] = rest_field() + + @overload + def __init__( + self, + *, + species: Literal["Mongoose", "Eagle", "Penguin"], + age: Literal[1, 2, 3], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + dict_response = {"species": "Mongoose", "age": 3} + model = LiteralModel(dict_response) + assert model.species == model["species"] == "Mongoose" + assert model.age == model["age"] == 3 + + dict_response = {"species": "invalid", "age": 5} + model = LiteralModel(dict_response) + assert model["species"] == "invalid" + assert model["age"] == 5 + + assert model.species == "invalid" + + assert model.age == 5 + + +def test_deserialization_callback_override(): + def _callback(obj): + return [str(entry) for entry in obj] + + class MyModel(Model): + prop: Sequence[float] = rest_field() + + @overload + def __init__(self, *, prop: Sequence[float]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + model_without_callback = MyModel(prop=[1.3, 2.4, 3.5]) + assert model_without_callback.prop == [1.3, 2.4, 3.5] + assert model_without_callback["prop"] == [1.3, 2.4, 3.5] + + class MyModel2(Model): + prop: Sequence[int] = rest_field(type=_callback) + + @overload + def __init__(self, *, prop: Any): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + model_with_callback = MyModel2(prop=[1.3, 2.4, 3.5]) + assert model_with_callback.prop == ["1.3", "2.4", "3.5"] + # since the deserialize function is not roundtrip-able, once we deserialize + # the serialized version is the same + assert model_with_callback["prop"] == [1.3, 2.4, 3.5] + + +def test_deserialization_callback_override_parent(): + class ParentNoCallback(Model): + prop: Sequence[float] = rest_field() + + @overload + def __init__(self, *, prop: Sequence[float]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _callback(obj): + return set([str(entry) for entry in obj]) + + class ChildWithCallback(ParentNoCallback): + prop: Sequence[float] = rest_field(type=_callback) + + @overload + def __init__(self, *, prop: Sequence[float]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + parent_model = ParentNoCallback(prop=[1, 1, 2, 3]) + assert parent_model.prop == parent_model["prop"] == [1, 1, 2, 3] + + child_model = ChildWithCallback(prop=[1, 1, 2, 3]) + assert child_model.prop == set(["1", "1", "2", "3"]) + assert child_model["prop"] == [1, 1, 2, 3] + + +def test_inheritance_basic(): + def _callback(obj): + return [str(e) for e in obj] + + class Parent(Model): + parent_prop: List[int] = rest_field(name="parentProp", type=_callback) + prop: str = rest_field() + + @overload + def __init__(self, *, parent_prop: List[int], prop: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Child(Parent): + pass + + c = Child(parent_prop=[1, 2, 3], prop="hello") + assert c == {"parentProp": [1, 2, 3], "prop": "hello"} + assert c.parent_prop == ["1", "2", "3"] + assert c.prop == "hello" + + +class ParentA(Model): + prop: float = rest_field() + + @overload + def __init__(self, *, prop: Any): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class ParentB(ParentA): + prop: str = rest_field() + bcd_prop: Optional[List["ParentB"]] = rest_field(name="bcdProp") + + @overload + def __init__(self, *, prop: Any, bcd_prop: Optional[List["ParentB"]] = None): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class ParentC(ParentB): + prop: float = rest_field() + cd_prop: ParentA = rest_field(name="cdProp") + + @overload + def __init__(self, *, prop: Any, bcd_prop: List[ParentB], cd_prop: ParentA, **kwargs): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class ChildD(ParentC): + d_prop: Tuple[ParentA, ParentB, ParentC, Optional["ChildD"]] = rest_field(name="dProp") + + @overload + def __init__( + self, + *, + prop: Any, + bcd_prop: List[ParentB], + cd_prop: ParentA, + d_prop: Tuple[ParentA, ParentB, ParentC, Optional["ChildD"]], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_model_dict_comparisons(): + class Inner(Model): + prop: str = rest_field() + + @overload + def __init__( + self, + *, + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + inner: Inner = rest_field() + + @overload + def __init__( + self, + *, + inner: Inner, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _tests(outer): + assert outer.inner.prop == outer["inner"].prop == outer.inner["prop"] == outer["inner"]["prop"] == "hello" + assert outer.inner == outer["inner"] == {"prop": "hello"} + assert outer == {"inner": {"prop": "hello"}} + + _tests(Outer(inner=Inner(prop="hello"))) + _tests(Outer({"inner": {"prop": "hello"}})) + + +def test_model_dict_comparisons_list(): + class Inner(Model): + prop: str = rest_field() + + @overload + def __init__( + self, + *, + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + inner: List[Inner] = rest_field() + + @overload + def __init__( + self, + *, + inner: List[Inner], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _tests(outer): + assert ( + outer.inner[0].prop + == outer["inner"][0].prop + == outer.inner[0]["prop"] + == outer["inner"][0]["prop"] + == "hello" + ) + assert outer.inner == outer["inner"] == [{"prop": "hello"}] + assert outer == {"inner": [{"prop": "hello"}]} + + _tests(Outer(inner=[Inner(prop="hello")])) + _tests(Outer({"inner": [{"prop": "hello"}]})) + + +def test_model_dict_comparisons_dict(): + class Inner(Model): + prop: str = rest_field() + + @overload + def __init__( + self, + *, + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + inner: Dict[str, Inner] = rest_field() + + @overload + def __init__( + self, + *, + inner: Dict[str, Inner], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _tests(outer): + assert ( + outer.inner["key"].prop + == outer["inner"]["key"].prop + == outer.inner["key"]["prop"] + == outer["inner"]["key"]["prop"] + == "hello" + ) + assert outer.inner == outer["inner"] == {"key": {"prop": "hello"}} + with pytest.raises(AttributeError): + outer.inner.key + assert outer.inner["key"] == outer["inner"]["key"] == {"prop": "hello"} + assert outer == {"inner": {"key": {"prop": "hello"}}} + + _tests(Outer(inner={"key": Inner(prop="hello")})) + _tests(Outer({"inner": {"key": {"prop": "hello"}}})) + + +def test_inheritance_4_levels(): + a = ParentA(prop=3.4) + assert a.prop == 3.4 + assert a["prop"] == 3.4 + assert a == {"prop": 3.4} + assert isinstance(a, Model) + + b = ParentB(prop=3.4, bcd_prop=[ParentB(prop=4.3)]) + assert b.prop == "3.4" + assert b["prop"] == 3.4 + assert b.bcd_prop == [ParentB(prop=4.3)] + assert b["bcdProp"] != [{"prop": 4.3, "bcdProp": None}] + assert b["bcdProp"] == [{"prop": 4.3}] + assert b.bcd_prop + assert b.bcd_prop[0].prop == "4.3" + assert b.bcd_prop[0].bcd_prop is None + assert b == {"prop": 3.4, "bcdProp": [{"prop": 4.3}]} + assert isinstance(b, ParentB) + assert isinstance(b, ParentA) + + c = ParentC(prop=3.4, bcd_prop=[b], cd_prop=a) + assert c.prop == c["prop"] == 3.4 + assert c.bcd_prop == [b] + assert c.bcd_prop + assert isinstance(c.bcd_prop[0], ParentB) + assert c["bcdProp"] == [b] == [{"prop": 3.4, "bcdProp": [{"prop": 4.3}]}] + assert c.cd_prop == a + assert c["cdProp"] == a == {"prop": 3.4} + assert isinstance(c.cd_prop, ParentA) + + d = ChildD( + prop=3.4, + bcd_prop=[b], + cd_prop=a, + d_prop=( + a, + b, + c, + ChildD(prop=3.4, bcd_prop=[b], cd_prop=a, d_prop=(a, b, c, None)), + ), + ) + assert d == { + "prop": 3.4, + "bcdProp": [b], + "cdProp": a, + "dProp": ( + a, + b, + c, + {"prop": 3.4, "bcdProp": [b], "cdProp": a, "dProp": (a, b, c, None)}, + ), + } + assert d.prop == d["prop"] == 3.4 + assert d.bcd_prop == [b] + assert d.bcd_prop + assert isinstance(d.bcd_prop[0], ParentB) + assert d.cd_prop == a + assert isinstance(d.cd_prop, ParentA) + assert d.d_prop[0] == a # at a + assert isinstance(d.d_prop[0], ParentA) + assert d.d_prop[1] == b + assert isinstance(d.d_prop[1], ParentB) + assert d.d_prop[2] == c + assert isinstance(d.d_prop[2], ParentC) + assert isinstance(d.d_prop[3], ChildD) + + assert isinstance(d.d_prop[3].d_prop[0], ParentA) + assert isinstance(d.d_prop[3].d_prop[1], ParentB) + assert isinstance(d.d_prop[3].d_prop[2], ParentC) + assert d.d_prop[3].d_prop[3] is None + + +def test_multiple_inheritance_basic(): + class ParentOne(Model): + parent_one_prop: str = rest_field(name="parentOneProp") + + @overload + def __init__( + self, + *, + parent_one_prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class ParentTwo(Model): + parent_two_prop: int = rest_field(name="parentTwoProp", type=lambda x: str(x)) + + @overload + def __init__( + self, + *, + parent_two_prop: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Child(ParentOne, ParentTwo): + @overload + def __init__( + self, + *, + parent_one_prop: str, + parent_two_prop: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + c = Child(parent_one_prop="Hello", parent_two_prop=3) + assert c == {"parentOneProp": "Hello", "parentTwoProp": 3} + assert c.parent_one_prop == "Hello" + assert c.parent_two_prop == "3" + assert isinstance(c, Child) + assert isinstance(c, ParentOne) + assert isinstance(c, ParentTwo) + + +def test_multiple_inheritance_mro(): + class A(Model): + prop: str = rest_field() + + @overload + def __init__(self, *, prop: str) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class B(Model): + prop: int = rest_field(type=lambda x: int(x)) + + @overload + def __init__(self, *, prop: str) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class C(A, B): + pass + + assert A(prop="1").prop == "1" + assert B(prop="1").prop == 1 + assert C(prop="1").prop == "1" # A should take precedence over B + + +class Feline(Model): + meows: bool = rest_field() + hisses: bool = rest_field() + siblings: Optional[List["Feline"]] = rest_field() + + @overload + def __init__( + self, + *, + meows: bool, + hisses: bool, + siblings: Optional[List["Feline"]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Owner(Model): + first_name: str = rest_field(name="firstName", type=lambda x: x.capitalize()) + last_name: str = rest_field(name="lastName", type=lambda x: x.capitalize()) + + @overload + def __init__( + self, + *, + first_name: str, + last_name: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class PetModel(Model): + name: str = rest_field() + owner: Owner = rest_field() + + @overload + def __init__(self, *, name: str, owner: Owner): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Cat(PetModel, Feline): + likes_milk: bool = rest_field(name="likesMilk", type=lambda x: True) + + @overload + def __init__( + self, + *, + name: str, + owner: Owner, + meows: bool, + hisses: bool, + likes_milk: bool, + siblings: Optional[List[Feline]], + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class CuteThing(Model): + how_cute_am_i: float = rest_field(name="howCuteAmI") + + @overload + def __init__(self, *, how_cute_am_i: float): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Kitten(Cat, CuteThing): + eats_mice_yet: bool = rest_field(name="eatsMiceYet") + + @overload + def __init__( + self, + *, + name: str, + owner: Owner, + meows: bool, + hisses: bool, + likes_milk: bool, + siblings: Optional[List[Feline]], + how_cute_am_i: float, + eats_mice_yet: bool, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_multiple_inheritance_complex(): + cat = Cat( + name="Stephanie", + owner=Owner(first_name="cecil", last_name="cai"), # gets capitalized in attr + meows=True, + hisses=True, + likes_milk=False, # likes_milk will change to True on the attribute + siblings=[Feline(meows=True, hisses=False)], + ) + assert cat == { + "name": "Stephanie", + "owner": { + "firstName": "cecil", + "lastName": "cai", + }, + "meows": True, + "hisses": True, + "likesMilk": False, + "siblings": [ + { + "meows": True, + "hisses": False, + } + ], + } + assert cat.name == "Stephanie" + assert isinstance(cat.owner, Owner) + assert cat.owner.first_name == "Cecil" + assert cat.owner.last_name == "Cai" + assert cat.meows + assert cat.hisses + assert cat.likes_milk + assert cat.siblings + assert len(cat.siblings) == 1 + assert isinstance(cat.siblings[0], Feline) + + kitten = Kitten( + name="Stephanie", + owner=Owner(first_name="cecil", last_name="cai"), # gets capitalized in attr + meows=True, + hisses=True, + likes_milk=False, # likes_milk will change to True on the attribute + siblings=[Feline(meows=True, hisses=False)], + how_cute_am_i=1.0, + eats_mice_yet=True, + ) + assert kitten != { + "name": "Stephanie", + "owner": { + "firstName": "cecil", + "lastName": "cai", + }, + "meows": True, + "hisses": True, + "likesMilk": False, + "siblings": [{"meows": True, "hisses": False, "siblings": None}], # we don't automatically set None here + "howCuteAmI": 1.0, + "eatsMiceYet": True, + } + assert kitten == { + "name": "Stephanie", + "owner": { + "firstName": "cecil", + "lastName": "cai", + }, + "meows": True, + "hisses": True, + "likesMilk": False, + "siblings": [ + { + "meows": True, + "hisses": False, + } + ], + "howCuteAmI": 1.0, + "eatsMiceYet": True, + } + assert kitten.name == "Stephanie" + assert isinstance(kitten.owner, Owner) + assert kitten.owner.first_name == "Cecil" + assert kitten.owner.last_name == "Cai" + assert kitten.meows + assert kitten.hisses + assert kitten.likes_milk + assert kitten.siblings + assert len(kitten.siblings) == 1 + assert isinstance(kitten.siblings[0], Feline) + assert kitten.eats_mice_yet + assert kitten.how_cute_am_i == 1.0 + assert isinstance(kitten, PetModel) + assert isinstance(kitten, Cat) + assert isinstance(kitten, Feline) + assert isinstance(kitten, CuteThing) + + +class A(Model): + b: "B" = rest_field() + + @overload + def __init__(self, b: "B"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class B(Model): + c: "C" = rest_field() + + @overload + def __init__(self, *, c: "C"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class C(Model): + d: str = rest_field() + + @overload + def __init__(self, *, d: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_nested_creation(): + a = A({"b": {"c": {"d": "hello"}}}) + assert isinstance(a["b"], Model) + assert isinstance(a["b"]["c"], Model) + assert a["b"]["c"] == a["b"].c == a.b.c == {"d": "hello"} + + assert ( + a["b"]["c"]["d"] + == a["b"].c.d + == a.b["c"].d + == a["b"]["c"].d + == a["b"].c["d"] + == a.b["c"]["d"] + == a.b.c.d + == "hello" + ) + + +def test_nested_setting(): + a = A({"b": {"c": {"d": "hello"}}}) + + # set with dict + a["b"]["c"]["d"] = "setwithdict" + assert ( + a["b"]["c"]["d"] + == a["b"].c.d + == a.b["c"].d + == a["b"]["c"].d + == a["b"].c["d"] + == a.b["c"]["d"] + == a.b.c.d + == "setwithdict" + ) + + # set with attr + a.b.c.d = "setwithattr" + assert a["b"]["c"]["d"] == "setwithattr" + assert ( + a["b"]["c"]["d"] + == a["b"].c.d + == a.b["c"].d + == a["b"]["c"].d + == a["b"].c["d"] + == a.b["c"]["d"] + == a.b.c.d + == "setwithattr" + ) + + +class BaseModel(Model): + inner_model: "InnerModel" = rest_field(name="innerModel") + + @overload + def __init__(self, *, inner_model: "InnerModel"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class InnerModel(Model): + datetime_field: datetime.datetime = rest_field(name="datetimeField") + + @overload + def __init__(self, *, datetime_field: datetime.datetime): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_nested_deserialization(): + serialized_datetime = "9999-12-31T23:59:59.999Z" + + model = BaseModel({"innerModel": {"datetimeField": serialized_datetime}}) + assert model.inner_model["datetimeField"] == model["innerModel"]["datetimeField"] == serialized_datetime + assert ( + model.inner_model.datetime_field + == model["innerModel"].datetime_field + == isodate.parse_datetime(serialized_datetime) + ) + + new_serialized_datetime = "2022-12-31T23:59:59.999Z" + model.inner_model.datetime_field = isodate.parse_datetime(new_serialized_datetime) + assert model.inner_model["datetimeField"] == "2022-12-31T23:59:59.999000Z" + + +class X(Model): + y: "Y" = rest_field() + + @overload + def __init__(self, *, y: "Y"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Y(Model): + z: "Z" = rest_field() + + @overload + def __init__(self, *, z: "Z"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class Z(Model): + z_val: datetime.datetime = rest_field(name="zVal") + + @overload + def __init__(self, *, z_val: datetime.datetime): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_nested_update(): + serialized_datetime = "9999-12-31T23:59:59.999Z" + parsed_datetime = isodate.parse_datetime(serialized_datetime) + x = X({"y": {"z": {"zVal": serialized_datetime}}}) + assert x.y.z.z_val == x["y"].z.z_val == x.y["z"].z_val == x["y"]["z"].z_val == parsed_datetime + assert x.y.z["zVal"] == x.y["z"]["zVal"] == x["y"].z["zVal"] == x["y"]["z"]["zVal"] == serialized_datetime + + +def test_deserialization_is(): + # test without datetime deserialization + a = A({"b": {"c": {"d": "hello"}}}) + assert a.b is a.b + assert a.b.c is a.b.c + assert a.b.c.d is a.b.c.d + + serialized_datetime = "9999-12-31T23:59:59.999Z" + x = X({"y": {"z": {"zVal": serialized_datetime}}}) + assert x.y is x.y + assert x.y.z is x.y.z + + assert x.y.z.z_val == isodate.parse_datetime(serialized_datetime) + + +class InnerModelWithReadonly(Model): + normal_property: str = rest_field(name="normalProperty") + readonly_property: str = rest_field(name="readonlyProperty", visibility=["read"]) + + @overload + def __init__(self, *, normal_property: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class ModelWithReadonly(Model): + normal_property: str = rest_field(name="normalProperty") + readonly_property: str = rest_field(name="readonlyProperty", visibility=["read"]) + inner_model: InnerModelWithReadonly = rest_field(name="innerModel") + + @overload + def __init__(self, *, normal_property: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +def test_readonly(): + # we pass the dict to json, so readonly shouldn't show up in the JSON version + value = { + "normalProperty": "normal", + "readonlyProperty": "readonly", + "innerModel": {"normalProperty": "normal", "readonlyProperty": "readonly"}, + } + model = ModelWithReadonly(value) + assert model.as_dict(exclude_readonly=True) == { + "normalProperty": "normal", + "innerModel": {"normalProperty": "normal"}, + } + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == value + assert model == value + assert model["readonlyProperty"] == model.readonly_property == "readonly" + assert model["innerModel"]["readonlyProperty"] == model.inner_model.readonly_property == "readonly" + + +def test_readonly_set(): + value = { + "normalProperty": "normal", + "readonlyProperty": "readonly", + "innerModel": {"normalProperty": "normal", "readonlyProperty": "readonly"}, + } + + model = ModelWithReadonly(value) + assert model.normal_property == model["normalProperty"] == "normal" + assert model.readonly_property == model["readonlyProperty"] == "readonly" + assert model.inner_model.normal_property == model.inner_model["normalProperty"] == "normal" + assert model.inner_model.readonly_property == model.inner_model["readonlyProperty"] == "readonly" + + assert model.as_dict(exclude_readonly=True) == { + "normalProperty": "normal", + "innerModel": {"normalProperty": "normal"}, + } + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == value + + model["normalProperty"] = "setWithDict" + model["readonlyProperty"] = "setWithDict" + model.inner_model["normalProperty"] = "setWithDict" + model.inner_model["readonlyProperty"] = "setWithDict" + + assert model.normal_property == model["normalProperty"] == "setWithDict" + assert model.readonly_property == model["readonlyProperty"] == "setWithDict" + assert model.inner_model.normal_property == model.inner_model["normalProperty"] == "setWithDict" + assert model.inner_model.readonly_property == model.inner_model["readonlyProperty"] == "setWithDict" + assert model.as_dict(exclude_readonly=True) == { + "normalProperty": "setWithDict", + "innerModel": {"normalProperty": "setWithDict"}, + } + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == { + "normalProperty": "setWithDict", + "readonlyProperty": "setWithDict", + "innerModel": { + "normalProperty": "setWithDict", + "readonlyProperty": "setWithDict", + }, + } + + +def test_incorrect_initialization(): + class MyModel(Model): + id: int = rest_field() + field: str = rest_field() + + @overload + def __init__( + self, + *, + id: int, + field: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + with pytest.raises(TypeError): + MyModel(1, "field") + + with pytest.raises(TypeError): + MyModel(id=1, field="field", unknown="me") + + +def test_serialization_initialization_and_setting(): + serialized_datetime = "9999-12-31T23:59:59.999000Z" + parsed_datetime = isodate.parse_datetime(serialized_datetime) + + # pass in parsed + z = Z(z_val=parsed_datetime) + assert z.z_val == parsed_datetime + assert z["zVal"] == serialized_datetime + + # pass in dict + z = Z({"zVal": serialized_datetime}) + assert z.z_val == parsed_datetime + assert z["zVal"] == serialized_datetime + + # assert setting + serialized_datetime = "2022-12-31T23:59:59.999000Z" + z.z_val = isodate.parse_datetime(serialized_datetime) + assert z["zVal"] == serialized_datetime + + +def test_copy_of_input(): + class TestModel(Model): + data: List[int] = rest_field() + + @overload + def __init__(self, *, data: List[int]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + raw = [1, 2, 3] + m = TestModel(data=raw) + assert not m.data is raw + assert m.data == raw + raw.append(4) + assert m.data == [1, 2, 3] + + +def test_inner_model_custom_serializer(): + class InnerModel(Model): + prop: str = rest_field(type=lambda x: x[::-1]) + + @overload + def __init__(self, *, prop: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class OuterModel(Model): + inner: InnerModel = rest_field() + + @overload + def __init__(self, *, inner: InnerModel): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + outer = OuterModel({"inner": {"prop": "hello"}}) + assert outer.inner["prop"] == outer["inner"]["prop"] == "hello" + assert outer.inner.prop == outer["inner"].prop == "olleh" # cspell: ignore olleh + + +def test_default_value(): + class MyModel(Model): + prop_default_str: str = rest_field(name="propDefaultStr", default="hello") + prop_optional_str: Optional[str] = rest_field(name="propOptionalStr", default=None) + prop_default_int: int = rest_field(name="propDefaultInt", default=1) + prop_optional_int: Optional[int] = rest_field(name="propOptionalInt", default=None) + + @overload + def __init__( + self, + *, + prop_default_str: str = "hello", + prop_optional_str: Optional[str] = "propOptionalStr", + prop_default_int: int = 1, + prop_optional_int: Optional[int] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + my_model = MyModel() + assert my_model.prop_default_str == my_model["propDefaultStr"] == "hello" + assert my_model.prop_optional_str is my_model["propOptionalStr"] is None + assert my_model.prop_default_int == my_model["propDefaultInt"] == 1 + assert my_model.prop_optional_int is my_model["propOptionalInt"] is None + assert my_model == { + "propDefaultStr": "hello", + "propOptionalStr": None, + "propDefaultInt": 1, + "propOptionalInt": None, + } + + my_model = MyModel(prop_default_str="goodbye") + assert my_model.prop_default_str == my_model["propDefaultStr"] == "goodbye" + assert my_model.prop_optional_str is my_model["propOptionalStr"] is None + assert my_model.prop_default_int == my_model["propDefaultInt"] == 1 + assert my_model.prop_optional_int is my_model["propOptionalInt"] is None + assert my_model == { + "propDefaultStr": "goodbye", + "propOptionalStr": None, + "propDefaultInt": 1, + "propOptionalInt": None, + } + + my_model = MyModel(prop_optional_int=4) + assert my_model.prop_default_str == my_model["propDefaultStr"] == "hello" + assert my_model.prop_optional_str is my_model["propOptionalStr"] is None + assert my_model.prop_default_int == my_model["propDefaultInt"] == 1 + assert my_model.prop_optional_int == my_model["propOptionalInt"] == 4 + assert my_model == { + "propDefaultStr": "hello", + "propOptionalStr": None, + "propDefaultInt": 1, + "propOptionalInt": 4, + } + + my_model = MyModel({"propDefaultInt": 5}) + assert my_model.prop_default_str == my_model["propDefaultStr"] == "hello" + assert my_model.prop_optional_str is my_model["propOptionalStr"] is None + assert my_model.prop_default_int == my_model["propDefaultInt"] == 5 + assert my_model.prop_optional_int is my_model["propOptionalInt"] is None + assert my_model == { + "propDefaultStr": "hello", + "propOptionalStr": None, + "propDefaultInt": 5, + "propOptionalInt": None, + } + + +def test_pass_models_in_dict(): + class Inner(Model): + str_property: str = rest_field(name="strProperty") + + @overload + def __init__( + self, + *, + str_property: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + inner_property: Inner = rest_field(name="innerProperty") + + @overload + def __init__( + self, + *, + inner_property: Inner, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _tests(model: Outer): + assert ( + {"innerProperty": {"strProperty": "hello"}} + == {"innerProperty": Inner(str_property="hello")} + == {"innerProperty": Inner({"strProperty": "hello"})} + == Outer(inner_property=Inner(str_property="hello")) + == Outer(inner_property=Inner({"strProperty": "hello"})) + == Outer({"innerProperty": {"strProperty": "hello"}}) + == Outer({"innerProperty": Inner(str_property="hello")}) + == Outer({"innerProperty": Inner({"strProperty": "hello"})}) + == model + ) + + _tests(Outer(inner_property=Inner(str_property="hello"))) + _tests(Outer(inner_property=Inner({"strProperty": "hello"}))) + _tests(Outer({"innerProperty": {"strProperty": "hello"}})) + _tests(Outer({"innerProperty": Inner(str_property="hello")})) + _tests(Outer({"innerProperty": Inner({"strProperty": "hello"})})) + + +def test_mutability_list(): + class Inner(Model): + str_property: str = rest_field(name="strProperty") + + @overload + def __init__( + self, + *, + str_property: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Middle(Model): + inner_property: List[Inner] = rest_field(name="innerProperty") + prop: str = rest_field() + + @overload + def __init__( + self, + *, + inner_property: List[Inner], + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + middle_property: Middle = rest_field(name="middleProperty") + + @overload + def __init__( + self, + *, + middle_property: Model, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + original_dict = { + "middleProperty": { + "innerProperty": [{"strProperty": "hello"}], + "prop": "original", + } + } + model = Outer(original_dict) + assert model is not original_dict + + # set with dict syntax + assert model.middle_property is model["middleProperty"] + middle_property = model.middle_property + middle_property["prop"] = "new" + assert model["middleProperty"] is model.middle_property is middle_property + assert model["middleProperty"]["prop"] == model.middle_property.prop == "new" + + # set with attr syntax + middle_property.prop = "newest" + assert model["middleProperty"] is model.middle_property is middle_property + assert model["middleProperty"]["prop"] == model.middle_property.prop == "newest" + + # modify innerproperty list + assert model["middleProperty"]["innerProperty"][0] is model.middle_property.inner_property[0] + assert ( + model["middleProperty"]["innerProperty"][0] + is model.middle_property["innerProperty"][0] + is model["middleProperty"].inner_property[0] + is model.middle_property.inner_property[0] + ) + inner_property = model["middleProperty"]["innerProperty"][0] + + # set with dict syntax + inner_property["strProperty"] = "nihao" + assert ( + model["middleProperty"]["innerProperty"][0] + is model.middle_property["innerProperty"][0] + is model["middleProperty"].inner_property[0] + is model.middle_property.inner_property[0] + ) + assert ( + model["middleProperty"]["innerProperty"][0]["strProperty"] + == model.middle_property["innerProperty"][0]["strProperty"] + == model["middleProperty"].inner_property[0]["strProperty"] + == model.middle_property.inner_property[0]["strProperty"] + == model["middleProperty"]["innerProperty"][0].str_property + == model.middle_property["innerProperty"][0].str_property + == model["middleProperty"].inner_property[0].str_property + == model.middle_property.inner_property[0].str_property + == "nihao" + ) + + +def test_mutability_dict(): + class Inner(Model): + str_property: str = rest_field(name="strProperty") + + @overload + def __init__( + self, + *, + str_property: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Middle(Model): + inner_property: Dict[str, Inner] = rest_field(name="innerProperty") + prop: str = rest_field() + + @overload + def __init__( + self, + *, + inner_property: Dict[str, Inner], + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + middle_property: Middle = rest_field(name="middleProperty") + + @overload + def __init__( + self, + *, + middle_property: Model, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + original_dict = { + "middleProperty": { + "innerProperty": {"inner": {"strProperty": "hello"}}, + "prop": "original", + } + } + model = Outer(original_dict) + assert model is not original_dict + + # set with dict syntax + assert model.middle_property is model["middleProperty"] + middle_property = model.middle_property + middle_property["prop"] = "new" + assert model["middleProperty"] is model.middle_property is middle_property + assert ( + model["middleProperty"]["prop"] + == model["middleProperty"].prop + == model.middle_property.prop + == model.middle_property["prop"] + == "new" + ) + + # set with attr syntax + middle_property.prop = "newest" + assert model["middleProperty"] is model.middle_property is middle_property + assert model["middleProperty"]["prop"] == model.middle_property.prop == "newest" + + # modify innerproperty list + assert model["middleProperty"]["innerProperty"]["inner"] is model.middle_property.inner_property["inner"] + assert ( + model["middleProperty"]["innerProperty"]["inner"] + is model.middle_property["innerProperty"]["inner"] + is model["middleProperty"].inner_property["inner"] + is model.middle_property.inner_property["inner"] + ) + inner_property = model["middleProperty"]["innerProperty"]["inner"] + + # set with dict syntax + inner_property["strProperty"] = "nihao" + assert ( + model["middleProperty"]["innerProperty"]["inner"] + is model.middle_property["innerProperty"]["inner"] + is model["middleProperty"].inner_property["inner"] + is model.middle_property.inner_property["inner"] + ) + assert ( + model["middleProperty"]["innerProperty"]["inner"]["strProperty"] + == model.middle_property["innerProperty"]["inner"]["strProperty"] + == model["middleProperty"].inner_property["inner"]["strProperty"] + == model.middle_property.inner_property["inner"]["strProperty"] + == model["middleProperty"]["innerProperty"]["inner"].str_property + == model.middle_property["innerProperty"]["inner"].str_property + == model["middleProperty"].inner_property["inner"].str_property + == model.middle_property.inner_property["inner"].str_property + == "nihao" + ) + + +def test_del_model(): + class TestModel(Model): + x: Optional[int] = rest_field() + + my_dict = {} + my_dict["x"] = None + + assert my_dict["x"] is None + + my_model = TestModel({}) + my_model["x"] = None + + assert my_model["x"] is my_model.x is None + + my_model = TestModel({"x": 7}) + my_model.x = None + + assert "x" not in my_model + assert my_model.x is None + + with pytest.raises(KeyError): + del my_model["x"] + my_model.x = 8 + + del my_model["x"] + assert "x" not in my_model + assert my_model.x is my_model.get("x") is None + + with pytest.raises(AttributeError): + del my_model.x + my_model.x = None + assert "x" not in my_model + assert my_model.x is my_model.get("x") is None + + +def test_pop_model(): + class Inner(Model): + str_property: str = rest_field(name="strProperty") + + @overload + def __init__( + self, + *, + str_property: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Middle(Model): + inner_property: Dict[str, Inner] = rest_field(name="innerProperty") + prop: str = rest_field() + + @overload + def __init__( + self, + *, + inner_property: Dict[str, Inner], + prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class Outer(Model): + middle_property: Middle = rest_field(name="middleProperty") + + @overload + def __init__( + self, + *, + middle_property: Model, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + original_dict = { + "middleProperty": { + "innerProperty": {"inner": {"strProperty": "hello"}}, + "prop": "original", + } + } + model_dict = Outer(original_dict) # model we will access with dict syntax + model_attr = Outer(original_dict) # model we will access with attr syntax + + assert model_dict is not original_dict is not model_attr + assert ( + original_dict["middleProperty"]["innerProperty"]["inner"].pop("strProperty") + == model_dict["middleProperty"]["innerProperty"]["inner"].pop("strProperty") + == model_attr.middle_property.inner_property["inner"].pop("strProperty") + == "hello" + ) + + with pytest.raises(KeyError): + original_dict["middleProperty"]["innerProperty"]["inner"].pop("strProperty") + with pytest.raises(KeyError): + model_dict["middleProperty"]["innerProperty"]["inner"].pop("strProperty") + with pytest.raises(KeyError): + model_attr.middle_property.inner_property["inner"].pop("strProperty") + + +def test_contains(): + class ParentA(Model): + a_prop: str = rest_field(name="aProp") + + @overload + def __init__( + self, + *, + a_prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class ParentB(Model): + b_prop: str = rest_field(name="bProp") + + @overload + def __init__( + self, + *, + b_prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + class ChildC(ParentA, ParentB): + c_prop: str = rest_field(name="cProp") + + @overload + def __init__( + self, + *, + a_prop: str, + b_prop: str, + c_prop: str, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + parent_a_dict = {"aProp": "a"} + assert "aProp" in parent_a_dict + + parent_a = ParentA(parent_a_dict) + assert "aProp" in parent_a + assert not "a_prop" in parent_a + + parent_a.a_prop = None # clear it out + assert "aProp" not in parent_a + + parent_b_dict = {"bProp": "b"} + assert "bProp" in parent_b_dict + + parent_b = ParentB(parent_b_dict) + assert "bProp" in parent_b + assert "b_prop" not in parent_b + + parent_b.b_prop = None # clear it out + assert "bProp" not in parent_b + + props = ["aProp", "bProp", "cProp"] + child_c_dict = {"aProp": "a", "bProp": "b", "cProp": "c"} + assert all(p for p in props if p in child_c_dict) + + child_c = ChildC(child_c_dict) + assert all(p for p in props if p in child_c) + assert not any(p for p in ["a_prop", "b_prop", "c_prop"] if p in child_c) + + child_c.a_prop = None + child_c.b_prop = None + child_c.c_prop = None + + assert not any(p for p in props if p in child_c) + + +def test_iter(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + } + assert isinstance(iter(dict_response), Iterable) + model = BasicResource(dict_response) + assert isinstance(iter(model), Iterable) + + assert ( + list(iter(dict_response)) + == list(iter(model)) + == ["platformUpdateDomainCount", "platformFaultDomainCount", "virtualMachines"] + ) + + +def test_len(): + dict_response = { + "platformUpdateDomainCount": 5, + "platformFaultDomainCount": 3, + "virtualMachines": [], + } + model = BasicResource(dict_response) + assert len(dict_response) == len(model) == 3 + + dict_response.pop("platformUpdateDomainCount") + model.pop("platformUpdateDomainCount") + assert len(dict_response) == len(model) == 2 + + +def test_keys(): + class Inner(Model): + str_prop: str = rest_field(name="strProp") + + class Outer(Model): + inner_prop: Inner = rest_field(name="innerProp") + + outer_dict = {"innerProp": {"strProp": "hello"}} + outer = Outer(outer_dict) + assert outer.keys() == outer_dict.keys() + outer_dict["newProp"] = "hello" + outer["newProp"] = "hello" + + assert outer.keys() == outer_dict.keys() + + outer_dict.pop("newProp") + outer.pop("newProp") + assert outer_dict.keys() == outer.keys() + + +def test_values(): + class Inner(Model): + str_prop: str = rest_field(name="strProp") + + class Outer(Model): + inner_prop: Inner = rest_field(name="innerProp") + + outer_dict = {"innerProp": {"strProp": "hello"}} + outer = Outer(outer_dict) + + assert list(outer.values()) == list(outer_dict.values()) + assert len(outer.values()) == len(outer_dict.values()) == 1 + assert list(outer.values())[0]["strProp"] == list(outer_dict.values())[0]["strProp"] == "hello" + + outer_dict["innerProp"]["strProp"] = "goodbye" + outer.inner_prop.str_prop = "goodbye" + + assert list(outer.inner_prop.values()) == list(outer_dict["innerProp"].values()) + + +def test_items(): + class Inner(Model): + str_prop: str = rest_field(name="strProp") + + class Outer(Model): + inner_prop: Inner = rest_field(name="innerProp") + + outer_dict = {"innerProp": {"strProp": "hello"}} + outer = Outer(outer_dict) + + assert list(outer.items()) == list(outer_dict.items()) + + outer_dict["innerProp"]["strProp"] = "goodbye" + outer.inner_prop.str_prop = "goodbye" + + assert list(outer.inner_prop.items()) == list(outer_dict["innerProp"].items()) + + outer_dict["newProp"] = "bonjour" + outer["newProp"] = "bonjour" + + assert list(outer.items()) == list(outer_dict.items()) + + +def test_get(): + class MyModel(Model): + prop: str = rest_field() + rest_prop: str = rest_field(name="restProp") + + my_dict = {"prop": "hello", "restProp": "bonjour"} + my_model = MyModel(my_dict) + + assert my_dict.get("prop") == my_model.get("prop") == "hello" + my_dict["prop"] = "nihao" + my_model.prop = "nihao" + + assert my_dict.get("prop") == my_model.get("prop") == "nihao" + + my_dict["restProp"] = "buongiorno" + my_model.rest_prop = "buongiorno" + + assert my_dict.get("restProp") == my_model.get("restProp") == "buongiorno" + assert my_dict.get("rest_prop") is None # attr case should not work here + + my_dict["newProp"] = "i'm new" + my_model["newProp"] = "i'm new" + + assert my_dict.get("newProp") == my_model.get("newProp") == "i'm new" + assert my_dict.get("nonexistent") is my_model.get("nonexistent") is None + + assert my_dict.get("nonexistent", 0) == my_model.get("nonexistent", 0) == 0 + + +def test_pop(): + class MyModel(Model): + prop: str = rest_field() + rest_prop: str = rest_field(name="restProp") + + my_dict = {"prop": "hello", "restProp": "bonjour"} + my_model = MyModel(my_dict) + + assert my_dict.pop("prop") == my_model.pop("prop") == "hello" + with pytest.raises(KeyError): + my_dict.pop("prop") + with pytest.raises(KeyError): + my_model.pop("prop") + + my_dict["prop"] = "nihao" + my_model.prop = "nihao" + + assert my_dict.pop("prop") == my_model.pop("prop") == "nihao" + + with pytest.raises(KeyError): + my_dict.pop("prop") + with pytest.raises(KeyError): + my_model.pop("prop") + + my_dict["restProp"] = "buongiorno" + my_model.rest_prop = "buongiorno" + + assert my_dict.pop("restProp") == my_model.pop("restProp") == "buongiorno" + with pytest.raises(KeyError): + my_dict.pop("rest_prop") # attr case should not work here + + my_dict["newProp"] = "i'm new" + my_model["newProp"] = "i'm new" + + assert my_dict.pop("newProp") == my_model.pop("newProp") == "i'm new" + assert my_dict.pop("nonexistent", 0) == my_model.pop("nonexistent", 0) == 0 + + +def test_popitem(): + class ModelA(Model): + a_str_prop: str = rest_field(name="aStrProp") + + class ModelB(Model): + b_str_prop: str = rest_field(name="bStrProp") + + class ModelC(Model): + c_str_prop: str = rest_field(name="cStrProp") + + class MainModel(Model): + a_prop: ModelA = rest_field(name="aProp") + b_prop: ModelB = rest_field(name="bProp") + c_prop: ModelC = rest_field(name="cProp") + + my_dict = { + "aProp": {"aStrProp": "a"}, + "bProp": {"bStrProp": "b"}, + "cProp": {"cStrProp": "c"}, + } + + def _tests(my_dict: Dict[str, Any], my_model: MainModel): + my_dict = copy.deepcopy(my_dict) # so we don't get rid of the dict each time we run tests + + # pop c prop + dict_popitem = my_dict.popitem() + model_popitem = my_model.popitem() + assert dict_popitem[0] == model_popitem[0] == "cProp" + assert dict_popitem[1]["cStrProp"] == model_popitem[1]["cStrProp"] == model_popitem[1].c_str_prop == "c" + + # pop b prop + dict_popitem = my_dict.popitem() + model_popitem = my_model.popitem() + assert dict_popitem[0] == model_popitem[0] == "bProp" + assert dict_popitem[1]["bStrProp"] == model_popitem[1]["bStrProp"] == model_popitem[1].b_str_prop == "b" + + # pop a prop + dict_popitem = my_dict.popitem() + model_popitem = my_model.popitem() + assert dict_popitem[0] == model_popitem[0] == "aProp" + assert dict_popitem[1]["aStrProp"] == model_popitem[1]["aStrProp"] == model_popitem[1].a_str_prop == "a" + + with pytest.raises(KeyError): + my_dict.popitem() + + with pytest.raises(KeyError): + my_model.popitem() + + _tests(my_dict, MainModel(my_dict)) + _tests( + my_dict, + MainModel( + a_prop=ModelA(a_str_prop="a"), + b_prop=ModelB(b_str_prop="b"), + c_prop=ModelC(c_str_prop="c"), + ), + ) + + +def test_clear(): + class ModelA(Model): + a_str_prop: str = rest_field(name="aStrProp") + + class ModelB(Model): + b_str_prop: str = rest_field(name="bStrProp") + + class ModelC(Model): + c_str_prop: str = rest_field(name="cStrProp") + + class MainModel(Model): + a_prop: ModelA = rest_field(name="aProp") + b_prop: ModelB = rest_field(name="bProp") + c_prop: ModelC = rest_field(name="cProp") + + my_dict = { + "aProp": {"aStrProp": "a"}, + "bProp": {"bStrProp": "b"}, + "cProp": {"cStrProp": "c"}, + } + + def _tests(my_dict: Dict[str, Any], my_model: MainModel): + my_dict = copy.deepcopy(my_dict) # so we don't get rid of the dict each time we run tests + + assert my_dict["aProp"] == my_model.a_prop == my_model["aProp"] == {"aStrProp": "a"} + my_dict.clear() + my_model.clear() + assert my_dict == my_model == {} + + assert my_model.a_prop is None + assert my_model.b_prop is None + assert my_model.c_prop is None + + my_dict.clear() + my_model.clear() + assert my_dict == my_model == {} + + _tests(my_dict, MainModel(my_dict)) + _tests( + my_dict, + MainModel( + a_prop=ModelA(a_str_prop="a"), + b_prop=ModelB(b_str_prop="b"), + c_prop=ModelC(c_str_prop="c"), + ), + ) + + +def test_update(): + class ModelA(Model): + a_str_prop: str = rest_field(name="aStrProp") + + class ModelB(Model): + b_str_prop: str = rest_field(name="bStrProp") + + class ModelC(Model): + c_str_prop: str = rest_field(name="cStrProp") + + class MainModel(Model): + a_prop: ModelA = rest_field(name="aProp") + b_prop: ModelB = rest_field(name="bProp") + c_prop: ModelC = rest_field(name="cProp") + + my_dict = { + "aProp": {"aStrProp": "a"}, + "bProp": {"bStrProp": "b"}, + "cProp": {"cStrProp": "c"}, + } + + def _tests(my_dict: Dict[str, Any], my_model: MainModel): + my_dict = copy.deepcopy(my_dict) # so we don't get rid of the dict each time we run tests + + assert my_dict["aProp"] == my_model.a_prop == my_model["aProp"] == {"aStrProp": "a"} + my_dict.update({"aProp": {"aStrProp": "newA"}}) + my_model.a_prop.update({"aStrProp": "newA"}) + assert my_dict["aProp"] == my_model.a_prop == my_model["aProp"] == {"aStrProp": "newA"} + + my_dict["bProp"].update({"newBProp": "hello"}) + my_model.b_prop.update({"newBProp": "hello"}) + + assert my_dict["bProp"] == my_model.b_prop == my_model["bProp"] == {"bStrProp": "b", "newBProp": "hello"} + + my_dict.update({"dProp": "hello"}) + my_model.update({"dProp": "hello"}) + + assert my_dict["dProp"] == my_model["dProp"] == "hello" + + _tests(my_dict, MainModel(my_dict)) + _tests( + my_dict, + MainModel( + a_prop=ModelA(a_str_prop="a"), + b_prop=ModelB(b_str_prop="b"), + c_prop=ModelC(c_str_prop="c"), + ), + ) + + +def test_setdefault(): + class Inner(Model): + str_prop: str = rest_field(name="strProp", default="modelDefault") + + class Outer(Model): + inner_prop: Inner = rest_field(name="innerProp") + + og_dict = {"innerProp": {}} + og_dict["innerProp"].setdefault("strProp", "actualDefault") + og_model = Outer(og_dict) + og_model.inner_prop.setdefault("strProp", "actualDefault") + + assert og_dict["innerProp"] == og_model["innerProp"] == og_model.inner_prop == {"strProp": "actualDefault"} + + assert ( + og_dict["innerProp"].setdefault("strProp") + == og_model["innerProp"].setdefault("strProp") + == og_model.inner_prop.setdefault("strProp") + == "actualDefault" + ) + + assert og_dict.setdefault("newProp") is og_model.setdefault("newProp") is None + assert og_dict["newProp"] is og_model["newProp"] is None + + +def test_repr(): + class ModelA(Model): + a_str_prop: str = rest_field(name="aStrProp") + + class ModelB(Model): + b_str_prop: str = rest_field(name="bStrProp") + + class ModelC(Model): + c_str_prop: str = rest_field(name="cStrProp") + + class MainModel(Model): + a_prop: ModelA = rest_field(name="aProp") + b_prop: ModelB = rest_field(name="bProp") + c_prop: ModelC = rest_field(name="cProp") + + my_dict = { + "aProp": {"aStrProp": "a"}, + "bProp": {"bStrProp": "b"}, + "cProp": {"cStrProp": "c"}, + } + + assert repr(my_dict) == repr(MainModel(my_dict)) + + +##### REWRITE BODY COMPLEX INTO THIS FILE ##### + + +def test_complex_basic(): + class Basic(Model): + id: Optional[int] = rest_field(default=None) + name: Optional[str] = rest_field(default=None) + color: Optional[Literal["cyan", "Magenta", "YELLOW", "blacK"]] = rest_field(default=None) + + @overload + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + color: Optional[Literal["cyan", "Magenta", "YELLOW", "blacK"]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + basic = Basic(id=2, name="abc", color="Magenta") + assert basic == {"id": 2, "name": "abc", "color": "Magenta"} + + basic.id = 3 + basic.name = "new_name" + basic.color = "blacK" + + assert basic == {"id": 3, "name": "new_name", "color": "blacK"} + + basic["id"] = 4 + basic["name"] = "newest_name" + basic["color"] = "YELLOW" + + assert basic == {"id": 4, "name": "newest_name", "color": "YELLOW"} + + +def test_complex_boolean_wrapper(): + class BooleanWrapper(Model): + field_true: Optional[bool] = rest_field(default=None) + field_false: Optional[bool] = rest_field(default=None) + + @overload + def __init__( + self, + *, + field_true: Optional[bool] = None, + field_false: Optional[bool] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + bool_model = BooleanWrapper(field_true=True, field_false=False) + assert bool_model == {"field_true": True, "field_false": False} + bool_model.field_true = False + bool_model.field_false = True + assert bool_model == {"field_true": False, "field_false": True} + + bool_model["field_true"] = True + bool_model["field_false"] = False + assert bool_model == {"field_true": True, "field_false": False} + + +def test_complex_byte_wrapper(): + class ByteWrapper(Model): + default: Optional[bytes] = rest_field(default=None) + base64: Optional[bytes] = rest_field(default=None, format="base64") + base64url: Optional[bytes] = rest_field(default=None, format="base64url") + list_base64: Optional[List[bytes]] = rest_field(default=None, format="base64") + map_base64url: Optional[Dict[str, bytes]] = rest_field(default=None, format="base64url") + + @overload + def __init__( + self, + *, + default: Optional[bytes] = None, + base64: Optional[bytes] = None, + base64url: Optional[bytes] = None, + list_base64: Optional[List[bytes]] = None, + map_base64url: Optional[Dict[str, bytes]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + byte_string = bytes("test", "utf-8") + mod = ByteWrapper( + default=byte_string, + base64=byte_string, + base64url=byte_string, + list_base64=[byte_string, byte_string], + map_base64url={"key1": byte_string, "key2": byte_string}, + ) + decoded = "dGVzdA==" + decoded_urlsafe = "dGVzdA" + + def _tests(mod: ByteWrapper): + assert mod == { + "default": decoded, + "base64": decoded, + "base64url": decoded_urlsafe, + "list_base64": [decoded, decoded], + "map_base64url": {"key1": decoded_urlsafe, "key2": decoded_urlsafe}, + } + assert mod.default == byte_string + assert mod.base64 == byte_string + assert mod.base64url == byte_string + assert mod.list_base64 == [byte_string, byte_string] + assert mod.map_base64url == {"key1": byte_string, "key2": byte_string} + assert mod["default"] == decoded + assert mod["base64"] == decoded + assert mod["base64url"] == decoded_urlsafe + assert mod["list_base64"] == [decoded, decoded] + assert mod["map_base64url"] == { + "key1": decoded_urlsafe, + "key2": decoded_urlsafe, + } + + _tests(mod) + mod.default = byte_string + mod.base64 = byte_string + mod.base64url = byte_string + mod.list_base64 = [byte_string, byte_string] + mod.map_base64url = {"key1": byte_string, "key2": byte_string} + _tests(mod) + mod["default"] = decoded + mod["base64"] = decoded + mod["base64url"] = decoded_urlsafe + mod["list_base64"] = [decoded, decoded] + mod["map_base64url"] = {"key1": decoded_urlsafe, "key2": decoded_urlsafe} + _tests(mod) + + +def test_complex_byte_array_wrapper(): + class ByteArrayWrapper(Model): + default: Optional[bytearray] = rest_field(default=None) + base64: Optional[bytearray] = rest_field(default=None, format="base64") + base64url: Optional[bytearray] = rest_field(default=None, format="base64url") + list_base64: Optional[List[bytearray]] = rest_field(default=None, format="base64") + map_base64url: Optional[Dict[str, bytearray]] = rest_field(default=None, format="base64url") + + @overload + def __init__( + self, + *, + default: Optional[bytearray] = None, + base64: Optional[bytearray] = None, + base64url: Optional[bytearray] = None, + list_base64: Optional[List[bytearray]] = None, + map_base64url: Optional[Dict[str, bytearray]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + byte_array = bytearray("test".encode("utf-8")) + decoded = "dGVzdA==" + decoded_urlsafe = "dGVzdA" + + def _tests(model: ByteArrayWrapper): + assert model == { + "default": decoded, + "base64": decoded, + "base64url": decoded_urlsafe, + "list_base64": [decoded, decoded], + "map_base64url": {"key1": decoded_urlsafe, "key2": decoded_urlsafe}, + } + assert model.default == byte_array + assert model.base64 == byte_array + assert model.base64url == byte_array + assert model.list_base64 == [byte_array, byte_array] + assert model.map_base64url == {"key1": byte_array, "key2": byte_array} + assert model["default"] == decoded + assert model["base64"] == decoded + assert model["base64url"] == decoded_urlsafe + assert model["list_base64"] == [decoded, decoded] + assert model["map_base64url"] == { + "key1": decoded_urlsafe, + "key2": decoded_urlsafe, + } + + _tests( + ByteArrayWrapper( + default=byte_array, + base64=byte_array, + base64url=byte_array, + list_base64=[byte_array, byte_array], + map_base64url={"key1": byte_array, "key2": byte_array}, + ) + ) + _tests( + ByteArrayWrapper( + { + "default": decoded, + "base64": decoded, + "base64url": decoded_urlsafe, + "list_base64": [decoded, decoded], + "map_base64url": {"key1": decoded_urlsafe, "key2": decoded_urlsafe}, + } + ) + ) + + +def test_complex_datetime_wrapper(): + class DatetimeWrapper(Model): + default: datetime.datetime = rest_field(default=None) + rfc3339: datetime.datetime = rest_field(default=None, format="rfc3339") + rfc7231: datetime.datetime = rest_field(default=None, format="rfc7231") + unix: datetime.datetime = rest_field(default=None, format="unix-timestamp") + list_rfc3339: List[datetime.datetime] = rest_field(default=None, format="rfc3339") + dict_rfc7231: Dict[str, datetime.datetime] = rest_field(default=None, format="rfc7231") + + @overload + def __init__( + self, + *, + default: Optional[datetime.datetime] = None, + rfc3339: Optional[datetime.datetime] = None, + rfc7231: Optional[datetime.datetime] = None, + unix: Optional[datetime.datetime] = None, + list_rfc3339: Optional[List[datetime.datetime]] = None, + dict_rfc7231: Optional[Dict[str, datetime.datetime]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + rfc3339 = "2023-06-27T06:11:09Z" + rfc7231 = "Tue, 27 Jun 2023 06:11:09 GMT" + unix = 1687846269 + dt = datetime.datetime(2023, 6, 27, 6, 11, 9, tzinfo=datetime.timezone.utc) + + def _tests(model: DatetimeWrapper): + assert model["default"] == rfc3339 + assert model["rfc3339"] == rfc3339 + assert model["rfc7231"] == rfc7231 + assert model["unix"] == unix + assert model["list_rfc3339"] == [rfc3339, rfc3339] + assert model["dict_rfc7231"] == {"key1": rfc7231, "key2": rfc7231} + assert model.default == model.rfc3339 == model.rfc7231 == model.unix == dt + assert model.list_rfc3339 == [dt, dt] + assert model.dict_rfc7231 == {"key1": dt, "key2": dt} + + _tests( + DatetimeWrapper( + default=dt, + rfc3339=dt, + rfc7231=dt, + unix=dt, + list_rfc3339=[dt, dt], + dict_rfc7231={"key1": dt, "key2": dt}, + ) + ) + _tests( + DatetimeWrapper( + { + "default": rfc3339, + "rfc3339": rfc3339, + "rfc7231": rfc7231, + "unix": unix, + "list_rfc3339": [rfc3339, rfc3339], + "dict_rfc7231": {"key1": rfc7231, "key2": rfc7231}, + } + ) + ) + + +def test_complex_date_wrapper(): + class DateWrapper(Model): + field: datetime.date = rest_field(default=None) + leap: datetime.date = rest_field(default=None) + + @overload + def __init__( + self, + *, + field: Optional[datetime.date] = None, + leap: Optional[datetime.date] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + field = "0001-01-01" + leap = "2016-02-29" + + def _tests(model: DateWrapper): + assert model.field == isodate.parse_date(field) + assert model["field"] == field + + assert model.leap == isodate.parse_date(leap) + assert model["leap"] == leap + + model.field = isodate.parse_date(leap) + assert model.field == isodate.parse_date(leap) + assert model["field"] == leap + + model["field"] = field + assert model.field == isodate.parse_date(field) + assert model["field"] == field + + _tests(DateWrapper({"field": field, "leap": leap})) + _tests(DateWrapper(field=isodate.parse_date(field), leap=isodate.parse_date(leap))) + + +class DictionaryWrapper(Model): + default_program: Dict[str, str] = rest_field(name="defaultProgram", default=None) + + @overload + def __init__( + self, + *, + default_program: Optional[Dict[str, str]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +default_program = { + "txt": "notepad", + "bmp": "mspaint", + "xls": "excel", + "exe": "", + "": None, +} + + +@pytest.mark.parametrize( + "model", + [ + DictionaryWrapper({"defaultProgram": default_program}), + DictionaryWrapper(default_program=default_program), + ], +) +def test_complex_dictionary_wrapper(model: DictionaryWrapper): + assert model == {"defaultProgram": default_program} + assert model.default_program == model["defaultProgram"] == default_program + + +@pytest.mark.parametrize( + "model", + [DictionaryWrapper({"defaultProgram": {}}), DictionaryWrapper(default_program={})], +) +def test_complex_dictionary_wrapper_empty(model: DictionaryWrapper): + assert model == {"defaultProgram": {}} + assert model.default_program == model["defaultProgram"] == {} + + +@pytest.mark.parametrize( + "model", + [ + DictionaryWrapper({"defaultProgram": None}), + DictionaryWrapper(default_program=None), + ], +) +def test_complex_dictionary_wrapper_none(model: DictionaryWrapper): + assert model == {"defaultProgram": None} + assert model.default_program is None + + +class ArrayWrapper(Model): + array: Optional[List[str]] = rest_field(default=None) + + @overload + def __init__( + self, + *, + array: Optional[List[str]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +array_value = [ + "1, 2, 3, 4", + "", + None, + "&S#$(*Y", + "The quick brown fox jumps over the lazy dog", +] + + +@pytest.mark.parametrize("model", [ArrayWrapper(array=array_value), ArrayWrapper({"array": array_value})]) +def test_complex_array_wrapper(model: ArrayWrapper): + assert model == {"array": array_value} + assert model.array == model["array"] == array_value + + model.array = None + with pytest.raises(KeyError): + model["array"] + assert model.array is None + + model["array"] = [1, 2, 3, 4, 5] + assert model.array == ["1", "2", "3", "4", "5"] + assert model["array"] == [1, 2, 3, 4, 5] + + +@pytest.mark.parametrize("model", [ArrayWrapper(array=[]), ArrayWrapper({"array": []})]) +def test_complex_array_wrapper_empty(model: ArrayWrapper): + assert model == {"array": []} + assert model.array == model["array"] == [] + + model.array = ["bonjour"] + assert model.array == model["array"] == ["bonjour"] + + +@pytest.mark.parametrize("model", [ArrayWrapper(array=None), ArrayWrapper({"array": None})]) +def test_complex_array_wrapper_none(model: ArrayWrapper): + assert model == {"array": None} + assert model.array is model["array"] is None + + model.array = ["bonjour"] + assert model.array == model["array"] == ["bonjour"] + + +class PetComplex(Model): + id: Optional[int] = rest_field(default=None) + name: Optional[str] = rest_field(default=None) + + @overload + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class DogComplex(PetComplex): + food: Optional[str] = rest_field(default=None) + + @overload + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + food: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class CatComplex(PetComplex): + color: Optional[str] = rest_field(default=None) + hates: Optional[List[DogComplex]] = rest_field(default=None) + + @overload + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + food: Optional[str] = None, + color: Optional[str] = None, + hates: Optional[List[DogComplex]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +@pytest.mark.parametrize( + "model", + [ + CatComplex( + id=2, + name="Siamese", + hates=[ + DogComplex(id=1, name="Potato", food="tomato"), + DogComplex(id=-1, name="Tomato", food="french fries"), + ], + ), + CatComplex( + id=2, + name="Siamese", + hates=[ + DogComplex(id=1, name="Potato", food="tomato"), + {"id": -1, "name": "Tomato", "food": "french fries"}, + ], + ), + CatComplex( + id=2, + name="Siamese", + hates=[ + {"id": 1, "name": "Potato", "food": "tomato"}, + {"id": -1, "name": "Tomato", "food": "french fries"}, + ], + ), + ], +) +def test_complex_inheritance(model): + assert model.id == model["id"] == 2 + assert model.name == model["name"] == "Siamese" + assert model.hates + assert model.hates[1] == model["hates"][1] == {"id": -1, "name": "Tomato", "food": "french fries"} + model["breed"] = "persian" + model["color"] = "green" + with pytest.raises(AttributeError): + model.breed + assert model == { + "id": 2, + "name": "Siamese", + "color": "green", + "breed": "persian", + "hates": [ + DogComplex(id=1, name="Potato", food="tomato"), + DogComplex(id=-1, name="Tomato", food="french fries"), + ], + } + + +def test_required_prop_not_passed(): + class ModelWithRequiredProperty(Model): + required_property: int = rest_field(name="requiredProperty") + + @overload + def __init__( + self, + *, + required_property: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + model = ModelWithRequiredProperty() + assert model.required_property is None + with pytest.raises(KeyError): + model["requiredProperty"] + + model = ModelWithRequiredProperty({}) + assert model.required_property is None + with pytest.raises(KeyError): + model["requiredProperty"] + + +def test_null_serialization(core_library): + dict_response = { + "name": "it's me!", + "listOfMe": [ + { + "name": "it's me!", + } + ], + "dictOfMe": { + "me": { + "name": "it's me!", + } + }, + "dictOfListOfMe": { + "many mes": [ + { + "name": "it's me!", + } + ] + }, + "listOfDictOfMe": None, + } + model = RecursiveModel(dict_response) + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == dict_response + + assert model.as_dict() == dict_response + + model.list_of_me = core_library.serialization.NULL + model.dict_of_me = None + model.list_of_dict_of_me = [ + { + "me": { + "name": "it's me!", + } + } + ] + model.dict_of_list_of_me["many mes"][0].list_of_me = core_library.serialization.NULL + model.dict_of_list_of_me["many mes"][0].dict_of_me = None + model.list_of_dict_of_me[0]["me"].list_of_me = core_library.serialization.NULL + model.list_of_dict_of_me[0]["me"].dict_of_me = None + + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == { + "name": "it's me!", + "listOfMe": None, + "dictOfListOfMe": { + "many mes": [ + { + "name": "it's me!", + "listOfMe": None, + } + ] + }, + "listOfDictOfMe": [ + { + "me": { + "name": "it's me!", + "listOfMe": None, + } + } + ], + } + + assert model.as_dict() == { + "name": "it's me!", + "listOfMe": None, + "dictOfListOfMe": { + "many mes": [ + { + "name": "it's me!", + "listOfMe": None, + } + ] + }, + "listOfDictOfMe": [ + { + "me": { + "name": "it's me!", + "listOfMe": None, + } + } + ], + } + + +class UnionBaseModel(Model): + name: str = rest_field() + + @overload + def __init__(self, *, name: str): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UnionModel1(UnionBaseModel): + prop1: int = rest_field() + + @overload + def __init__(self, *, name: str, prop1: int): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UnionModel2(UnionBaseModel): + prop2: int = rest_field() + + @overload + def __init__(self, *, name: str, prop2: int): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +MyNamedUnion = Union["UnionModel1", "UnionModel2"] + + +class ModelWithNamedUnionProperty(Model): + named_union: "MyNamedUnion" = rest_field(name="namedUnion") + + @overload + def __init__(self, *, named_union: "MyNamedUnion"): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ModelWithSimpleUnionProperty(Model): + simple_union: Union[int, List[int]] = rest_field(name="simpleUnion") + + @overload + def __init__(self, *, simple_union: Union[int, List[int]]): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +def test_union(): + simple = ModelWithSimpleUnionProperty(simple_union=1) + assert simple.simple_union == simple["simpleUnion"] == 1 + simple = ModelWithSimpleUnionProperty(simple_union=[1, 2]) + assert simple.simple_union == simple["simpleUnion"] == [1, 2] + named = ModelWithNamedUnionProperty() + assert not _is_model(named.named_union) + named.named_union = UnionModel1(name="model1", prop1=1) + assert _is_model(named.named_union) + assert named.named_union == named["namedUnion"] == {"name": "model1", "prop1": 1} + named = ModelWithNamedUnionProperty(named_union=UnionModel2(name="model2", prop2=2)) + assert named.named_union == named["namedUnion"] == {"name": "model2", "prop2": 2} + named = ModelWithNamedUnionProperty({"namedUnion": {"name": "model2", "prop2": 2}}) + assert named.named_union == named["namedUnion"] == {"name": "model2", "prop2": 2} + + +def test_as_dict(): + class CatComplex(PetComplex): + color: Optional[str] = rest_field(default=None) + hates: Optional[List[DogComplex]] = rest_field(default=None, visibility=["read"]) + + @overload + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + food: Optional[str] = None, + color: Optional[str] = None, + hates: Optional[List[DogComplex]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + model = CatComplex( + id=2, + name="Siamese", + hates=[ + DogComplex(id=1, name="Potato", food="tomato"), + DogComplex(id=-1, name="Tomato", food="french fries"), + ], + ) + assert model.as_dict(exclude_readonly=True) == { + "id": 2, + "name": "Siamese", + "color": None, + } + + +class Fish(Model): + __mapping__: Dict[str, Model] = {} + age: int = rest_field() + kind: Literal[None] = rest_discriminator(name="kind") + + @overload + def __init__( + self, + *, + age: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind: Literal[None] = None + + +class Shark(Fish, discriminator="shark"): + __mapping__: Dict[str, Model] = {} + kind: Literal["shark"] = rest_discriminator(name="kind") + sharktype: Literal[None] = rest_discriminator(name="sharktype") + + @overload + def __init__( + self, + *, + age: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind: Literal["shark"] = "shark" + self.sharktype: Literal[None] = None + + +class GoblinShark(Shark, discriminator="goblin"): + sharktype: Literal["goblin"] = rest_discriminator(name="sharktype") + + @overload + def __init__( + self, + *, + age: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.sharktype: Literal["goblin"] = "goblin" + + +class Salmon(Fish, discriminator="salmon"): + kind: Literal["salmon"] = rest_discriminator(name="kind") + friends: Optional[List["Fish"]] = rest_field() + hate: Optional[Dict[str, "Fish"]] = rest_field() + partner: Optional["Fish"] = rest_field() + + @overload + def __init__( + self, + *, + age: int, + friends: Optional[List["Fish"]] = None, + hate: Optional[Dict[str, "Fish"]] = None, + partner: Optional["Fish"] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.kind: Literal["salmon"] = "salmon" + + +class SawShark(Shark, discriminator="saw"): + sharktype: Literal["saw"] = rest_discriminator(name="sharktype") + + @overload + def __init__( + self, + *, + age: int, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): ... + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.sharktype: Literal["saw"] = "saw" + + +def test_discriminator(): + input = { + "age": 1, + "kind": "salmon", + "partner": { + "age": 2, + "kind": "shark", + "sharktype": "saw", + }, + "friends": [ + { + "age": 2, + "kind": "salmon", + "partner": { + "age": 3, + "kind": "salmon", + }, + "hate": { + "key1": { + "age": 4, + "kind": "salmon", + }, + "key2": { + "age": 2, + "kind": "shark", + "sharktype": "goblin", + }, + }, + }, + { + "age": 3, + "kind": "shark", + "sharktype": "goblin", + }, + ], + "hate": { + "key3": { + "age": 3, + "kind": "shark", + "sharktype": "saw", + }, + "key4": { + "age": 2, + "kind": "salmon", + "friends": [ + { + "age": 1, + "kind": "salmon", + }, + { + "age": 4, + "kind": "shark", + "sharktype": "goblin", + }, + ], + }, + }, + } + + model = Salmon(input) + assert model == input + assert model.partner.age == 2 + assert model.partner == SawShark(age=2) + assert model.friends[0].hate["key2"] == GoblinShark(age=2) + + +def test_body_bytes_format(): + assert json.dumps(bytes("test", "utf-8"), cls=SdkJSONEncoder) == '"dGVzdA=="' + assert json.dumps(bytearray("test", "utf-8"), cls=SdkJSONEncoder) == '"dGVzdA=="' + assert json.dumps(bytes("test", "utf-8"), cls=SdkJSONEncoder, format="base64") == '"dGVzdA=="' + assert json.dumps(bytes("test", "utf-8"), cls=SdkJSONEncoder, format="base64url") == '"dGVzdA"' + assert json.dumps(bytearray("test", "utf-8"), cls=SdkJSONEncoder, format="base64") == '"dGVzdA=="' + assert json.dumps(bytearray("test", "utf-8"), cls=SdkJSONEncoder, format="base64url") == '"dGVzdA"' + + assert ( + json.dumps([bytes("test", "utf-8"), bytes("test", "utf-8")], cls=SdkJSONEncoder) == '["dGVzdA==", "dGVzdA=="]' + ) + assert ( + json.dumps([bytearray("test", "utf-8"), bytearray("test", "utf-8")], cls=SdkJSONEncoder) + == '["dGVzdA==", "dGVzdA=="]' + ) + assert ( + json.dumps( + [bytes("test", "utf-8"), bytes("test", "utf-8")], + cls=SdkJSONEncoder, + format="base64", + ) + == '["dGVzdA==", "dGVzdA=="]' + ) + assert ( + json.dumps( + [bytes("test", "utf-8"), bytes("test", "utf-8")], + cls=SdkJSONEncoder, + format="base64url", + ) + == '["dGVzdA", "dGVzdA"]' + ) + assert ( + json.dumps( + [bytearray("test", "utf-8"), bytearray("test", "utf-8")], + cls=SdkJSONEncoder, + format="base64", + ) + == '["dGVzdA==", "dGVzdA=="]' + ) + assert ( + json.dumps( + [bytearray("test", "utf-8"), bytearray("test", "utf-8")], + cls=SdkJSONEncoder, + format="base64url", + ) + == '["dGVzdA", "dGVzdA"]' + ) + + assert ( + json.dumps( + {"a": bytes("test", "utf-8"), "b": bytes("test", "utf-8")}, + cls=SdkJSONEncoder, + ) + == '{"a": "dGVzdA==", "b": "dGVzdA=="}' + ) + assert ( + json.dumps( + {"a": bytearray("test", "utf-8"), "b": bytearray("test", "utf-8")}, + cls=SdkJSONEncoder, + ) + == '{"a": "dGVzdA==", "b": "dGVzdA=="}' + ) + assert ( + json.dumps( + {"a": bytes("test", "utf-8"), "b": bytes("test", "utf-8")}, + cls=SdkJSONEncoder, + format="base64", + ) + == '{"a": "dGVzdA==", "b": "dGVzdA=="}' + ) + assert ( + json.dumps( + {"a": bytes("test", "utf-8"), "b": bytes("test", "utf-8")}, + cls=SdkJSONEncoder, + format="base64url", + ) + == '{"a": "dGVzdA", "b": "dGVzdA"}' + ) + assert ( + json.dumps( + {"a": bytearray("test", "utf-8"), "b": bytearray("test", "utf-8")}, + cls=SdkJSONEncoder, + format="base64", + ) + == '{"a": "dGVzdA==", "b": "dGVzdA=="}' + ) + assert ( + json.dumps( + {"a": bytearray("test", "utf-8"), "b": bytearray("test", "utf-8")}, + cls=SdkJSONEncoder, + format="base64url", + ) + == '{"a": "dGVzdA", "b": "dGVzdA"}' + ) + + +def test_decimal_deserialization(): + class DecimalModel(Model): + decimal_value: decimal.Decimal = rest_field(name="decimalValue") + + @overload + def __init__(self, *, decimal_value: decimal.Decimal): ... + + @overload + def __init__(self, mapping: Mapping[str, Any], /): ... + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + model = DecimalModel({"decimalValue": 0.33333}) + assert model["decimalValue"] == 0.33333 + assert model.decimal_value == decimal.Decimal("0.33333") + + class BaseModel(Model): + my_prop: DecimalModel = rest_field(name="myProp") + + model = BaseModel({"myProp": {"decimalValue": 0.33333}}) + assert isinstance(model.my_prop, DecimalModel) + assert model.my_prop["decimalValue"] == model["myProp"]["decimalValue"] == 0.33333 + assert model.my_prop.decimal_value == decimal.Decimal("0.33333") + + +def test_decimal_serialization(): + assert json.dumps(decimal.Decimal("0.33333"), cls=SdkJSONEncoder) == "0.33333" + assert ( + json.dumps([decimal.Decimal("0.33333"), decimal.Decimal("0.33333")], cls=SdkJSONEncoder) == "[0.33333, 0.33333]" + ) + assert ( + json.dumps( + {"a": decimal.Decimal("0.33333"), "b": decimal.Decimal("0.33333")}, + cls=SdkJSONEncoder, + ) + == '{"a": 0.33333, "b": 0.33333}' + ) + + +def test_int_as_str_deserialization(): + class IntAsStrModel(Model): + int_as_str_value: int = rest_field(name="intAsStrValue", format="str") + + model = IntAsStrModel({"intAsStrValue": "123"}) + assert model["intAsStrValue"] == "123" + assert model.int_as_str_value == 123 + + class BaseModel(Model): + my_prop: IntAsStrModel = rest_field(name="myProp") + + model = BaseModel({"myProp": {"intAsStrValue": "123"}}) + assert isinstance(model.my_prop, IntAsStrModel) + assert model.my_prop["intAsStrValue"] == model["myProp"]["intAsStrValue"] == "123" + assert model.my_prop.int_as_str_value == 123 + + +def test_deserialize(): + expected = {"name": "name", "role": "role"} + result = _deserialize(JSON, expected) + assert result == expected + + +def test_enum_deserialization(): + class MyEnum(Enum): + A = "a" + B = "b" + + class ModelWithEnumProperty(Model): + enum_property: Union[str, MyEnum] = rest_field(name="enumProperty") + enum_property_optional: Optional[Union[str, MyEnum]] = rest_field(name="enumPropertyOptional") + enum_property_optional_none: Optional[Union[str, MyEnum]] = rest_field(name="enumPropertyOptionalNone") + + raw_input = { + "enumProperty": "a", + "enumPropertyOptional": "b", + "enumPropertyOptionalNone": None, + } + + def check_func(target: ModelWithEnumProperty): + assert target.enum_property == MyEnum.A + assert target["enumProperty"] == "a" + assert isinstance(target.enum_property, Enum) + assert isinstance(target["enumProperty"], str) + + assert target.enum_property_optional == MyEnum.B + assert target["enumPropertyOptional"] == "b" + assert isinstance(target.enum_property_optional, Enum) + assert isinstance(target["enumPropertyOptional"], str) + + assert target.enum_property_optional_none is None + assert target["enumPropertyOptionalNone"] is None + + model = ModelWithEnumProperty(raw_input) + check_func(model) + + result = _deserialize(List[ModelWithEnumProperty], [raw_input]) + for item in result: + check_func(item) + + +def test_not_mutating_original_dict(): + class MyInnerModel(Model): + property: str = rest_field() + + class MyModel(Model): + property: MyInnerModel = rest_field() + + origin = {"property": {"property": "hello"}} + + dpg_model = MyModel(origin) + assert dpg_model["property"]["property"] == "hello" + + origin["property"]["property"] = "world" + assert dpg_model["property"]["property"] == "hello" + + +def test_model_init_io(): + class BytesModel(Model): + property: bytes = rest_field() + + JPG = Path(__file__).parent.parent / "data/image.jpg" + with open(JPG, "rb") as f: + b = BytesModel({"property": f}) + assert b.property == f + assert b["property"] == f + with open(JPG, "rb") as f: + b = BytesModel(property=f) + assert b.property == f + assert b["property"] == f + + +def test_additional_properties_serialization(): + value = { + "name": "test", + "modelProp": {"name": "test"}, + "stringProp": "string", + "intProp": 1, + "floatProp": 1.0, + "boolProp": True, + "listProp": [1, 2, 3], + "dictProp": {"key": "value"}, + "noneProp": None, + "datetimeProp": "2023-06-27T06:11:09Z", + "durationProp": "P1D", + } + + class NormalModel(Model): + prop: str = rest_field(name="name") + + class AdditionalPropertiesModel(Model): + name: str = rest_field(name="name") + + model = AdditionalPropertiesModel(name="test") + prop = NormalModel(prop="test") + model["modelProp"] = prop + model["stringProp"] = "string" + model["intProp"] = 1 + model["floatProp"] = 1.0 + model["boolProp"] = True + model["listProp"] = [1, 2, 3] + model["dictProp"] = {"key": "value"} + model["noneProp"] = None + model["datetimeProp"] = datetime.datetime(2023, 6, 27, 6, 11, 9, tzinfo=datetime.timezone.utc) + model["durationProp"] = datetime.timedelta(days=1) + + assert json.loads(json.dumps(model, cls=SdkJSONEncoder)) == value diff --git a/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_xml_serialization.py b/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_xml_serialization.py new file mode 100644 index 0000000000..2337da9d89 --- /dev/null +++ b/packages/http-client-python/test/generic_mock_api_tests/unittests/test_model_base_xml_serialization.py @@ -0,0 +1,1001 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import xml.etree.ElementTree as ET + +from typing import ( + List, + Literal, + Dict, +) + +from specialwords._model_base import ( + _get_element, + Model, + rest_field, + rest_discriminator, + _deserialize_xml, +) + + +def assert_xml_equals(x1, x2): + ET.dump(x1) + ET.dump(x2) + + assert x1.tag == x2.tag + assert (x1.text or "").strip() == (x2.text or "").strip() + # assert x1.tail == x2.tail # Swagger does not change tail + assert x1.attrib == x2.attrib + assert len(x1) == len(x2) + for c1, c2 in zip(x1, x2): + assert_xml_equals(c1, c2) + + +class TestXmlDeserialization: + def test_basic(self): + """Test an ultra basic XML.""" + basic_xml = """ + + 12 + + 12.34 + + true + + test + + """ + + class XmlModel(Model): + int_field: int = rest_field(name="int", xml={"name": "Int"}) + empty_int: int = rest_field(name="empty_int", xml={"name": "EmptyInt"}) + float_field: float = rest_field(name="float", xml={"name": "Float"}) + empty_float: float = rest_field(name="empty_float", xml={"name": "EmptyFloat"}) + bool_field: bool = rest_field(name="bool", xml={"name": "Bool"}) + empty_bool: bool = rest_field(name="empty_bool", xml={"name": "EmptyBool"}) + string: str = rest_field(name="string", xml={"name": "String"}) + empty_string: str = rest_field(name="empty_string", xml={"name": "EmptyString"}) + not_set: str = rest_field(name="not_set", xml={"name": "NotSet"}) + country: str = rest_field(name="country", xml={"name": "country", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + + assert result.int_field == 12 + assert result.empty_int is None + assert result.float_field == 12.34 + assert result.empty_float is None + assert result.bool_field is True + assert result.empty_bool is None + assert result.string == "test" + assert result.country == "france" + assert result.empty_string == "" + assert result.not_set is None + + def test_basic_unicode(self): + """Test a XML with unicode.""" + basic_xml = """ + """ + + class XmlModel(Model): + language: str = rest_field(name="language", xml={"name": "language", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + + assert result.language == "français" + + def test_basic_text(self): + """Test a XML with unicode.""" + basic_xml = """ + I am text""" + + class XmlModel(Model): + language: str = rest_field(name="language", xml={"name": "language", "attribute": True}) + content: str = rest_field(name="content", xml={"name": "content", "text": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + + assert result.language == "english" + assert result.content == "I am text" + + def test_dict_type(self): + """Test dict type.""" + basic_xml = """ + + + value1 + value2 + + """ + + class XmlModel(Model): + metadata: Dict[str, str] = rest_field(name="Metadata", xml={"name": "Metadata"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + + assert len(result.metadata) == 2 + assert result.metadata["Key1"] == "value1" + assert result.metadata["Key2"] == "value2" + + def test_basic_empty_list(self): + """Test an basic XML with an empty node.""" + basic_xml = """ + + + """ + + class XmlModel(Model): + age: List[str] = rest_field(name="age", xml={"name": "Age"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + assert result.age == [] + + def test_basic_empty_list_unwrapped(self): + """Test an basic XML with an empty node.""" + basic_xml = """ + """ + + class XmlModel(Model): + age: List[str] = rest_field(name="age", xml={"name": "Age", "unwrapped": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + assert result.age is None + + def test_list_wrapped_items_name_basic_types(self): + """Test XML list and wrap, items is basic type and there is itemsName.""" + + basic_xml = """ + + + granny + fuji + + """ + + class AppleBarrel(Model): + good_apples: List[str] = rest_field(name="GoodApples", xml={"name": "GoodApples", "itemsName": "Apple"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + result = _deserialize_xml(AppleBarrel, basic_xml) + assert result.good_apples == ["granny", "fuji"] + + def test_list_not_wrapped_items_name_basic_types(self): + """Test XML list and no wrap, items is basic type and there is itemsName.""" + + basic_xml = """ + + granny + fuji + """ + + class AppleBarrel(Model): + good_apples: List[str] = rest_field( + name="GoodApples", + xml={"name": "GoodApples", "unwrapped": True, "itemsName": "Apple"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + result = _deserialize_xml(AppleBarrel, basic_xml) + assert result.good_apples == ["granny", "fuji"] + + def test_list_wrapped_items_name_complex_types(self): + """Test XML list and wrap, items is ref and there is itemsName.""" + + basic_xml = """ + + + + + + """ + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + good_apples: List[Apple] = rest_field(name="GoodApples", xml={"name": "GoodApples", "itemsName": "Apple"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + result = _deserialize_xml(AppleBarrel, basic_xml) + assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] + + def test_list_not_wrapped_items_name_complex_types(self): + """Test XML list and wrap, items is ref and there is itemsName.""" + + basic_xml = """ + + + + """ + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + good_apples: List[Apple] = rest_field( + name="GoodApples", + xml={"name": "GoodApples", "unwrapped": True, "itemsName": "Apple"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + result = _deserialize_xml(AppleBarrel, basic_xml) + assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] + + def test_list_not_wrapped_items_name_complex_types(self): + """Test XML list and wrap, items is ref and there is itemsName.""" + + basic_xml = """ + + + + """ + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + good_apples: List[Apple] = rest_field( + name="GoodApples", + xml={"name": "GoodApples", "unwrapped": True, "itemsName": "Apple"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + result = _deserialize_xml(AppleBarrel, basic_xml) + assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] + + def test_basic_additional_properties(self): + """Test additional properties.""" + basic_xml = """ + + text + + a + b + c + + + a + b + + """ + + class XmlModel(Model): + name: str = rest_field(name="name", xml={"name": "Name"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + + assert result.name is None + assert result["add1"] == "text" + assert result["add2"] == ["a", "b", "c"] + assert result["add3"] == {"a": "a", "b": "b"} + + def test_basic_namespace(self): + """Test an ultra basic XML.""" + basic_xml = """ + + 37 + """ + + class XmlModel(Model): + age: int = rest_field( + name="age", + xml={ + "name": "Age", + "prefix": "fictional", + "ns": "http://characters.example.com", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + result = _deserialize_xml(XmlModel, basic_xml) + assert result.age == 37 + + def test_complex_namespace(self): + """Test recursive namespace.""" + basic_xml = """ + + + lmazuel + + + + testpolicy + + + + 12 + + """ + + class QueueDescriptionResponseAuthor(Model): + name: str = rest_field(name="name", xml={"ns": "http://www.w3.org/2005/Atom"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"ns": "http://www.w3.org/2005/Atom"} + + class AuthorizationRule(Model): + type: str = rest_field( + name="type", + xml={ + "attribute": True, + "prefix": "i", + "ns": "http://www.w3.org/2001/XMLSchema-instance", + }, + ) + key_name: str = rest_field( + name="KeyName", + xml={"ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"} + + class MessageCountDetails(Model): + active_message_count: int = rest_field( + name="ActiveMessageCount", + xml={ + "prefix": "d2p1", + "ns": "http://schemas.microsoft.com/netservices/2011/06/servicebus", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = { + "name": "CountDetails", + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + } + + class XmlRoot(Model): + author: QueueDescriptionResponseAuthor = rest_field(name="author") + authorization_rules: List[AuthorizationRule] = rest_field( + name="AuthorizationRules", + xml={ + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + }, + ) + message_count_details: MessageCountDetails = rest_field( + name="MessageCountDetails", + xml={ + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "entry", "ns": "http://www.w3.org/2005/Atom"} + + result = _deserialize_xml(XmlRoot, basic_xml) + + assert result.author.name == "lmazuel" + assert result.authorization_rules[0].key_name == "testpolicy" + assert result.authorization_rules[0].type == "SharedAccessAuthorizationRule" + assert result.message_count_details.active_message_count == 12 + + def test_polymorphic_deserialization(self): + basic_xml = """ + + + 12 + + """ + + class RuleFilter(Model): + __mapping__: Dict[str, Model] = {} + type: Literal[None] = rest_discriminator( + name="type", + xml={ + "attribute": True, + "prefix": "xsi", + "ns": "http://www.w3.org/2001/XMLSchema-instance", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.type: Literal[None] = None + + _xml = {"name": "Filter"} + + class CorrelationFilter(RuleFilter): + type: Literal["CorrelationFilter"] = rest_discriminator( + name="type", + xml={ + "attribute": True, + "prefix": "xsi", + "ns": "http://www.w3.org/2001/XMLSchema-instance", + }, + ) + correlation_id: int = rest_field(name="CorrelationId") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.type: Literal["CorrelationFilter"] = "CorrelationFilter" + + class SqlFilter(RuleFilter): + type: Literal["SqlFilter"] = rest_discriminator( + name="type", + xml={ + "attribute": True, + "prefix": "xsi", + "ns": "http://www.w3.org/2001/XMLSchema-instance", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.type: Literal["SqlFilter"] = "SqlFilter" + + class XmlRoot(Model): + filter: RuleFilter = rest_field(name="Filter") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "entry"} + + result = _deserialize_xml(XmlRoot, basic_xml) + + assert isinstance(result.filter, CorrelationFilter) + assert result.filter.correlation_id == 12 + + +class TestXmlSerialization: + def test_basic(self): + """Test an ultra basic XML.""" + basic_xml = ET.fromstring( + """ + + 37 + """ + ) + + class XmlModel(Model): + age: int = rest_field(xml={"name": "Age"}) + country: str = rest_field(xml={"name": "country", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel(age=37, country="france") + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_basic_unicode(self): + """Test a XML with unicode.""" + basic_xml = ET.fromstring( + """ + """.encode( + "utf-8" + ) + ) + + class XmlModel(Model): + language: str = rest_field(xml={"name": "language", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel(language="français") + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_nested_unicode(self): + class XmlModel(Model): + message_text: str = rest_field(name="MessageText", xml={"name": "MessageText"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Message"} + + xml_model_no_unicode = XmlModel(message_text="message1") + xml_element = _get_element(xml_model_no_unicode) + xml_content = ET.tostring(xml_element, encoding="utf8") + assert ( + xml_content + == b"\nmessage1" + ) + + xml_model_with_unicode = XmlModel(message_text="message1㚈") + xml_element = _get_element(xml_model_with_unicode) + xml_content = ET.tostring(xml_element, encoding="utf8") + assert ( + xml_content + == b"\nmessage1\xe3\x9a\x88" + ) + + def test_type_basic(self): + """Test basic types.""" + basic_xml = ET.fromstring( + """ + + 37 + true + """ + ) + + class XmlModel(Model): + age: int = rest_field(name="age", xml={"name": "Age"}) + enabled: bool = rest_field(name="enabled", xml={"name": "Enabled"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel(age=37, enabled=True) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_basic_text(self): + """Test a XML with unicode.""" + basic_xml = ET.fromstring( + """ + I am text""" + ) + + class XmlModel(Model): + language: str = rest_field(name="language", xml={"name": "language", "attribute": True}) + content: str = rest_field(name="content", xml={"text": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel(language="english", content="I am text") + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_dict_type(self): + """Test dict type.""" + basic_xml = ET.fromstring( + """ + + + value1 + value2 + + """ + ) + + class XmlModel(Model): + metadata: Dict[str, str] = rest_field(name="Metadata", xml={"name": "Metadata"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel( + metadata={ + "Key1": "value1", + "Key2": "value2", + } + ) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_additional_properties(self): + """Test additional properties.""" + basic_xml = ET.fromstring( + """ + + test + text + + a + b + c + + + a + b + + """ + ) + + class XmlModel(Model): + name: str = rest_field(name="name", xml={"name": "Name"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel( + { + "name": "test", + "add1": "text", + "add2": ["a", "b", "c"], + "add3": {"a": "a", "b": "b"}, + } + ) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_wrapped_basic_types(self): + """Test XML list and wrap, items is basic type and there is no itemsName.""" + + basic_xml = ET.fromstring( + """ + + + granny + fuji + + """ + ) + + class AppleBarrel(Model): + good_apples: List[str] = rest_field(name="GoodApples", xml={"name": "GoodApples"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + xml_model = AppleBarrel(good_apples=["granny", "fuji"]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_not_wrapped_basic_types(self): + """Test XML list and no wrap, items is basic type and there is no itemsName.""" + + basic_xml = ET.fromstring( + """ + + granny + fuji + """ + ) + + class AppleBarrel(Model): + good_apples: List[str] = rest_field(name="GoodApples", xml={"name": "GoodApples", "unwrapped": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + xml_model = AppleBarrel(good_apples=["granny", "fuji"]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_wrapped_basic_types_items_name(self): + """Test XML list and wrap, items is basic type and itemsName.""" + + basic_xml = ET.fromstring( + """ + + + granny + fuji + + """ + ) + + class AppleBarrel(Model): + good_apples: List[str] = rest_field(name="GoodApples", xml={"name": "GoodApples", "itemsName": "string"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + xml_model = AppleBarrel(good_apples=["granny", "fuji"]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_not_wrapped_basic_types_items_name(self): + """Test XML list and no wrap, items is basic type and itemsName.""" + + basic_xml = ET.fromstring( + """ + + granny + fuji + """ + ) + + class AppleBarrel(Model): + good_apples: List[str] = rest_field( + name="GoodApples", + xml={"name": "GoodApples", "unwrapped": True, "itemsName": "string"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + xml_model = AppleBarrel(good_apples=["granny", "fuji"]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_wrapped_complex_types(self): + """Test XML list and wrap, items is ref.""" + + basic_xml = ET.fromstring( + """ + + + + + + """ + ) + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + good_apples: List[Apple] = rest_field(name="GoodApples", xml={"name": "GoodApples"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "AppleBarrel"} + + test = AppleBarrel({"GoodApples": [{"name": "granny"}, {"name": "fuji"}]}) + xml_model = AppleBarrel(good_apples=[Apple(name="granny"), Apple(name="fuji")]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_list_not_wrapped_complex_types(self): + """Test XML list and wrap, items is ref.""" + + basic_xml = ET.fromstring( + """ + + + + """ + ) + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + good_apples: List[Apple] = rest_field(name="GoodApples", xml={"name": "GoodApples", "unwrapped": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + xml_model = AppleBarrel(good_apples=[Apple(name="granny"), Apple(name="fuji")]) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_two_complex_same_type(self): + """Two different attribute are same type""" + + basic_xml = ET.fromstring( + """ + + + + """ + ) + + class Apple(Model): + name: str = rest_field(name="name", xml={"name": "name", "attribute": True}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Apple"} + + class AppleBarrel(Model): + eu_apple: Apple = rest_field(name="EuropeanApple", xml={"name": "EuropeanApple"}) + us_apple: Apple = rest_field(name="USAApple", xml={"name": "USAApple"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + xml_model = AppleBarrel( + eu_apple=Apple(name="granny"), + us_apple=Apple(name="fuji"), + ) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_basic_namespace(self): + """Test an ultra basic XML.""" + basic_xml = ET.fromstring( + """ + + 37 + """ + ) + + class XmlModel(Model): + age: int = rest_field( + name="age", + xml={ + "name": "Age", + "prefix": "fictional", + "ns": "http://characters.example.com", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "Data"} + + xml_model = XmlModel( + age=37, + ) + assert_xml_equals(_get_element(xml_model), basic_xml) + + def test_complex_namespace(self): + """Test recursive namespace.""" + basic_xml = ET.fromstring( + """ + + + lmazuel + + + + testpolicy + + + + 12 + + """ + ) + + class QueueDescriptionResponseAuthor(Model): + name: str = rest_field(name="name", xml={"ns": "http://www.w3.org/2005/Atom"}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"ns": "http://www.w3.org/2005/Atom"} + + class AuthorizationRule(Model): + type: str = rest_field( + name="type", + xml={ + "attribute": True, + "prefix": "i", + "ns": "http://www.w3.org/2001/XMLSchema-instance", + }, + ) + key_name: str = rest_field( + name="KeyName", + xml={"ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"}, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"} + + class MessageCountDetails(Model): + active_message_count: int = rest_field( + name="ActiveMessageCount", + xml={ + "prefix": "d2p1", + "ns": "http://schemas.microsoft.com/netservices/2011/06/servicebus", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = { + "name": "CountDetails", + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + } + + class XmlRoot(Model): + author: QueueDescriptionResponseAuthor = rest_field(name="author") + authorization_rules: List[AuthorizationRule] = rest_field( + name="AuthorizationRules", + xml={ + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + }, + ) + message_count_details: MessageCountDetails = rest_field( + name="MessageCountDetails", + xml={ + "ns": "http://schemas.microsoft.com/netservices/2010/10/servicebus/connect", + }, + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + _xml = {"name": "entry", "ns": "http://www.w3.org/2005/Atom"} + + xml_model = XmlRoot( + author=QueueDescriptionResponseAuthor(name="lmazuel"), + authorization_rules=[AuthorizationRule(type="SharedAccessAuthorizationRule", key_name="testpolicy")], + message_count_details=MessageCountDetails(active_message_count=12), + ) + assert_xml_equals(_get_element(xml_model), basic_xml) diff --git a/packages/http-client-python/test/unbranded/mock_api_tests/asynctests/test_unbranded_async.py b/packages/http-client-python/test/unbranded/mock_api_tests/asynctests/test_unbranded_async.py new file mode 100644 index 0000000000..1ba8cc5750 --- /dev/null +++ b/packages/http-client-python/test/unbranded/mock_api_tests/asynctests/test_unbranded_async.py @@ -0,0 +1,24 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import traceback +import pytest +from typetest.scalar.aio import ScalarClient +from corehttp.exceptions import HttpResponseError + + +@pytest.fixture +async def client(): + async with ScalarClient() as client: + yield client + + +@pytest.mark.asyncio +async def test_track_back(client: ScalarClient): + try: + await client.string.put("to raise exception") + except HttpResponseError: + track_back = traceback.format_exc().lower() + assert "azure" not in track_back + assert "microsoft" not in track_back diff --git a/packages/http-client-python/test/unbranded/mock_api_tests/cadl-ranch-config.yaml b/packages/http-client-python/test/unbranded/mock_api_tests/cadl-ranch-config.yaml new file mode 100644 index 0000000000..488d624247 --- /dev/null +++ b/packages/http-client-python/test/unbranded/mock_api_tests/cadl-ranch-config.yaml @@ -0,0 +1,27 @@ +unsupportedScenarios: Azure_ClientGenerator_Core_Access_InternalOperation + Azure_ClientGenerator_Core_Access_PublicOperation + Azure_ClientGenerator_Core_Access_RelativeModelInOperation + Azure_ClientGenerator_Core_Access_SharedModelInOperation + Azure_ClientGenerator_Core_Usage_ModelInOperation + Azure_Core_Basic_createOrReplace + Azure_Core_Basic_createOrUpdate + Azure_Core_Basic_delete + Azure_Core_Basic_export + Azure_Core_Basic_get + Azure_Core_Basic_list + Azure_Core_Basic_listWithCustomPageModel + Azure_Core_Basic_listWithPage + Azure_Core_Basic_listWithParameters + Azure_Core_Basic_TwoModelsAsPageItem + Azure_Core_Lro_Rpc_longRunningRpc + Azure_Core_Lro_Standard_createOrReplace + Azure_Core_Lro_Standard_delete + Azure_Core_Lro_Standard_export + Azure_Core_Scalar_AzureLocationScalar_get + Azure_Core_Scalar_AzureLocationScalar_header + Azure_Core_Scalar_AzureLocationScalar_post + Azure_Core_Scalar_AzureLocationScalar_put + Azure_Core_Scalar_AzureLocationScalar_query + Azure_Core_Traits_repeatableAction + Azure_Core_Traits_smokeTest + Azure_SpecialHeaders_XmsClientRequestId diff --git a/packages/http-client-python/test/unbranded/mock_api_tests/conftest.py b/packages/http-client-python/test/unbranded/mock_api_tests/conftest.py new file mode 100644 index 0000000000..fafe7b22ce --- /dev/null +++ b/packages/http-client-python/test/unbranded/mock_api_tests/conftest.py @@ -0,0 +1,35 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import os +import subprocess +import signal +import pytest +import re +from pathlib import Path + + +def start_server_process(): + path = Path(os.path.dirname(__file__)) / Path("../../../node_modules/@azure-tools/cadl-ranch-specs") + os.chdir(path.resolve()) + cmd = "cadl-ranch serve ./http" + if os.name == "nt": + return subprocess.Popen(cmd, shell=True) + return subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid) + + +def terminate_server_process(process): + if os.name == "nt": + process.kill() + else: + os.killpg(os.getpgid(process.pid), signal.SIGTERM) # Send the signal to all the process groups + + +@pytest.fixture(scope="session", autouse=True) +def testserver(): + """Start cadl ranch mock api tests""" + server = start_server_process() + yield + terminate_server_process(server) diff --git a/packages/http-client-python/test/unbranded/mock_api_tests/test_unbranded.py b/packages/http-client-python/test/unbranded/mock_api_tests/test_unbranded.py new file mode 100644 index 0000000000..10ded3a218 --- /dev/null +++ b/packages/http-client-python/test/unbranded/mock_api_tests/test_unbranded.py @@ -0,0 +1,57 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +import os +import re +from subprocess import getoutput +from pathlib import Path +import traceback +from importlib import import_module +import pytest +from typetest.scalar import ScalarClient +from corehttp.exceptions import HttpResponseError + + +@pytest.fixture +def client(): + with ScalarClient() as client: + yield client + + +def test_module(): + with pytest.raises(ModuleNotFoundError): + import_module("azure") + + +def test_track_back(client: ScalarClient): + try: + client.string.put("to raise exception") + except HttpResponseError: + track_back = traceback.format_exc().lower() + assert "azure" not in track_back + assert "microsoft" not in track_back + + +def check_sensitive_word(folder: Path, word: str) -> str: + special_folders = ["__pycache__", "pytest_cache"] + if os.name == "nt": + skip_folders = "|".join(special_folders) + output = getoutput( + f"powershell \"ls -r -Path {folder} | where fullname -notmatch '{skip_folders}' | Select-String -Pattern '{word}'\"" + ).replace("\\", "/") + else: + skip_folders = "{" + ",".join(special_folders) + "}" + output = getoutput(f"grep -ri --exclude-dir={skip_folders} {word} {folder}") + + result = set() + for item in re.findall(f"{folder.as_posix()}[^:]+", output.replace("\n", "")): + result.add(Path(item).relative_to(folder).parts[0]) + return sorted(list(result)) + + +def test_sensitive_word(): + check_folder = (Path(os.path.dirname(__file__)) / "../generated").resolve() + assert [] == check_sensitive_word(check_folder, "azure") + # after update cadl-ranch, it shall also equal to [] + assert ["authentication-oauth2", "authentication-union"] == check_sensitive_word(check_folder, "microsoft") diff --git a/packages/http-client-python/test/unbranded/requirements.txt b/packages/http-client-python/test/unbranded/requirements.txt new file mode 100644 index 0000000000..d134b02185 --- /dev/null +++ b/packages/http-client-python/test/unbranded/requirements.txt @@ -0,0 +1,71 @@ +aiohttp;python_full_version>="3.5.2" +requests==2.32.2 +pytest +pytest-asyncio==0.14.0;python_full_version>="3.5.2" +corehttp==1.0.0b3 +pyright==1.1.379 +pylint==3.2.7 +tox==4.18.1 +mypy==1.10.1 + +# common test case +-e ./generated/authentication-api-key +-e ./generated/authentication-http-custom +-e ./generated/authentication-oauth2 +-e ./generated/authentication-union +-e ./generated/client-naming +-e ./generated/encode-duration +-e ./generated/encode-numeric +-e ./generated/parameters-basic +-e ./generated/parameters-collection-format +-e ./generated/parameters-spread +-e ./generated/resiliency-srv-driven1 +-e ./generated/resiliency-srv-driven2 +-e ./generated/serialization-encoded-name-json +-e ./generated/server-endpoint-not-defined +-e ./generated/server-path-multiple +-e ./generated/server-path-single +-e ./generated/server-versions-versioned +-e ./generated/server-versions-not-versioned +-e ./generated/special-words +-e ./generated/typetest-array +-e ./generated/typetest-dictionary +-e ./generated/typetest-enum-extensible +-e ./generated/typetest-enum-fixed +-e ./generated/typetest-model-enumdiscriminator +-e ./generated/typetest-model-nesteddiscriminator +-e ./generated/typetest-model-notdiscriminated +-e ./generated/typetest-model-singlediscriminator +-e ./generated/typetest-model-recursive +-e ./generated/typetest-model-usage +-e ./generated/typetest-model-visibility +-e ./generated/typetest-property-nullable +-e ./generated/typetest-property-optional +-e ./generated/typetest-property-valuetypes +-e ./generated/typetest-property-additionalproperties +-e ./generated/typetest-scalar +-e ./generated/typetest-union +-e ./generated/typetest-model-empty +-e ./generated/headasbooleantrue +-e ./generated/headasbooleanfalse +-e ./generated/parameters-body-optionality +-e ./generated/special-headers-repeatability +-e ./generated/special-headers-conditional-request +-e ./generated/encode-datetime +-e ./generated/encode-bytes +-e ./generated/client-structure-default +-e ./generated/client-structure-multiclient +-e ./generated/client-structure-renamedoperation +-e ./generated/client-structure-twooperationgroup +-e ./generated/payload-content-negotiation +-e ./generated/payload-json-merge-patch +-e ./generated/payload-pageable +-e ./generated/payload-multipart +-e ./generated/payload-media-type +-e ./generated/payload-xml +-e ./generated/versioning-added +-e ./generated/versioning-madeoptional +-e ./generated/versioning-removed +-e ./generated/versioning-renamedfrom +-e ./generated/versioning-returntypechangedfrom +-e ./generated/versioning-typechangedfrom diff --git a/packages/http-client-python/test/unbranded/tox.ini b/packages/http-client-python/test/unbranded/tox.ini new file mode 100644 index 0000000000..7106ff67f6 --- /dev/null +++ b/packages/http-client-python/test/unbranded/tox.ini @@ -0,0 +1,35 @@ +[tox] +envlist=py38, py310, py311, py312 +skipsdist=True + +[testenv] +passenv=* +deps= + -r requirements.txt +commands= + pytest + +[testenv:ci] +commands = + pytest mock_api_tests ../generic_mock_api_tests + +[testenv:lint] +deps= + -r requirements.txt +commands = + pip install azure-pylint-guidelines-checker==0.4.1 --index-url="https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/" + python ../../eng/scripts/ci/run_pylint.py -t unbranded -s "generated" {posargs} + +[testenv:mypy] +commands = + python ../../eng/scripts/ci/run_mypy.py -t unbranded -s "generated" {posargs} + +[testenv:pyright] +commands = + python ../../eng/scripts/ci/run_pyright.py -t unbranded -s "generated" {posargs} + +[testenv:apiview] +envlist=py311 +commands = + pip install ../../../../../azure-sdk-tools/packages/python-packages/apiview-stub-generator --extra-index-url="https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/" + python ../../eng/scripts/ci/run_apiview.py -t unbranded -s "generated" {posargs} diff --git a/packages/http-client-python/test/utils.test.ts b/packages/http-client-python/test/utils.test.ts new file mode 100644 index 0000000000..46868fbef3 --- /dev/null +++ b/packages/http-client-python/test/utils.test.ts @@ -0,0 +1,22 @@ +import { strictEqual } from "assert"; +import { describe, it } from "vitest"; +import { camelToSnakeCase } from "../emitter/src/utils.js"; + +describe("typespec-python: utils", () => { + it("camelToSnakeCase", async () => { + const cases = { + StandardSSD: "standard_ssd", + StandardSSDLRS: "standard_ssdlrs", + QRCode: "qr_code", + MicroQRCode: "micro_qr_code", + detection_01: "detection01", + "v1.1-preview.1": "v1_1_preview1", + pointInTimeUTC: "point_in_time_utc", + diskSizeGB: "disk_size_gb", + lastModifiedTS: "last_modified_ts", + }; + for (const [input, expected] of Object.entries(cases)) { + strictEqual(camelToSnakeCase(input), expected); + } + }); +}); diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 2820e5edbb..d3f583bbeb 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -3,3 +3,4 @@ packages: - "e2e" - "!packages/http-client-csharp/**" - "!packages/http-client-java/**" + - "!packages/http-client-python/**"