diff --git a/generator/konfig-dash/.changeset/selfish-ads-rest.md b/generator/konfig-dash/.changeset/selfish-ads-rest.md
new file mode 100644
index 000000000..ca11acda4
--- /dev/null
+++ b/generator/konfig-dash/.changeset/selfish-ads-rest.md
@@ -0,0 +1,7 @@
+---
+'konfig-openapi-spec': minor
+'konfig-cli': minor
+'konfig-lib': minor
+---
+
+add readmeHeaderSnippet configuration
diff --git a/generator/konfig-dash/api/src/functions/formatPython/formatPython.ts b/generator/konfig-dash/api/src/functions/formatPython/formatPython.ts
index 9b3299a6b..de3d25035 100644
--- a/generator/konfig-dash/api/src/functions/formatPython/formatPython.ts
+++ b/generator/konfig-dash/api/src/functions/formatPython/formatPython.ts
@@ -1,6 +1,6 @@
import type { APIGatewayEvent, Context } from 'aws-lambda'
import { urlForBlackdApi } from 'src/lib/urlForBlackdApi'
-import axios from 'axios'
+import axios, { AxiosError } from 'axios'
import {
CORS_HEADERS_METHOD_HEADERS,
CORS_HEADERS_ORIGIN,
@@ -30,22 +30,35 @@ export const handler = async (event: APIGatewayEvent, context: Context) => {
}
}
if (event.body === null) throw Error('Missing Request body')
- const { data: formattedSource } = await axios.post(
- urlForBlackdApi(),
- event.body
- )
+ try {
+ const { data: formattedSource } = await axios.post(
+ urlForBlackdApi(),
+ event.body
+ )
- return {
- statusCode: 200,
- headers: {
- ...CORS_HEADERS_ORIGIN,
- 'Content-Type': 'text/plain',
- },
- // For some reason blackd returns an empty string if the
- // code snippet is already formatted so we have to handle
- // that edge case with an empty string check
- // From: https://black.readthedocs.io/en/stable/usage_and_configuration/black_as_a_server.html
- // "HTTP 204: If the input is already well-formatted. The response body is empty."
- body: formattedSource == '' ? event.body : formattedSource,
+ return {
+ statusCode: 200,
+ headers: {
+ ...CORS_HEADERS_ORIGIN,
+ 'Content-Type': 'text/plain',
+ },
+ // For some reason blackd returns an empty string if the
+ // code snippet is already formatted so we have to handle
+ // that edge case with an empty string check
+ // From: https://black.readthedocs.io/en/stable/usage_and_configuration/black_as_a_server.html
+ // "HTTP 204: If the input is already well-formatted. The response body is empty."
+ body: formattedSource == '' ? event.body : formattedSource,
+ }
+ } catch (e) {
+ if (e instanceof AxiosError) {
+ return {
+ statusCode: 500,
+ headers: {
+ ...CORS_HEADERS_ORIGIN,
+ 'Content-Type': 'text/plain',
+ },
+ body: event.body,
+ }
+ }
}
}
diff --git a/generator/konfig-dash/api/src/lib/prepare-java-request-properties.ts b/generator/konfig-dash/api/src/lib/prepare-java-request-properties.ts
index 54b2957b8..7c1eaf005 100644
--- a/generator/konfig-dash/api/src/lib/prepare-java-request-properties.ts
+++ b/generator/konfig-dash/api/src/lib/prepare-java-request-properties.ts
@@ -14,9 +14,16 @@ import {
* Now all thats necessary to give the Java Generator more context is:
*
* 1. Modify the AdditionalProperties schema in the Java API's OpenAPI specification at api.yaml
- * 2. Run generate-models.sh
- * 3. Make updates to KonfigYaml.ts / KonfigYamlCommon.ts
- * 4. Extract data from body and return as a key-value pair in the properties object
+ * (file located at: [KONFIG REPO]/misc/openapi-generator-configs/openapi-generator-api/api.yaml)
+ * 2. Update JavaGenerateApiRequestBody.ts file with same changes as api.yaml
+ * 3. Run generate-models.sh
+ * (file located at: [KONFIG REPO]/misc/openapi-generator-configs/openapi-generator-api/generate-models.sh)
+ * 4. Make updates to KonfigYaml.ts / KonfigYamlCommon.ts
+ * 5. Extract data from body and return as a key-value pair in the properties object (in this function implementation)
+ *
+ * Note: If you are adding a configuration that points to a file like "readmeHeaderSnippet", you need to add code to
+ * "/generator/konfig-dash/packages/konfig-cli/src/commands/generate.ts" to read the file contents and send the contents
+ * to the generator api instead.
*/
export function prepareJavaRequestProperties({
body,
@@ -42,6 +49,10 @@ export function prepareJavaRequestProperties({
properties['gitRepoName'] = git.repoName
}
+ if ('readmeHeaderSnippet' in generatorConfig) {
+ properties['readmeHeaderSnippet'] = generatorConfig.readmeHeaderSnippet
+ }
+
if ('outputDirectory' in generatorConfig) {
properties['outputDirectory'] = generatorConfig.outputDirectory
}
diff --git a/generator/konfig-dash/packages/konfig-cli/src/commands/generate.ts b/generator/konfig-dash/packages/konfig-cli/src/commands/generate.ts
index 4210a6588..1d267c1e1 100644
--- a/generator/konfig-dash/packages/konfig-cli/src/commands/generate.ts
+++ b/generator/konfig-dash/packages/konfig-cli/src/commands/generate.ts
@@ -20,7 +20,7 @@ import {
GeneratorGitConfig,
} from 'konfig-lib'
import globby from 'globby'
-import { Konfig } from 'konfig-typescript-sdk'
+import { Konfig, KonfigError } from 'konfig-typescript-sdk'
import * as fs from 'fs-extra'
import axios, { AxiosError } from 'axios'
import * as os from 'os'
@@ -52,6 +52,7 @@ import { isSubmodule } from '../util/is-submodule'
import { getHostForGenerateApi } from '../util/get-host-for-generate-api'
import { getSdkDefaultBranch } from '../util/get-sdk-default-branch'
import { insertTableOfContents } from '../util/insert-table-of-contents'
+import boxen from 'boxen'
function getOutputDir(
outputFlag: string | undefined,
@@ -1166,22 +1167,7 @@ export default class Deploy extends Command {
])
for (const markdownPath of markdownFiles) {
const markdown = fs.readFileSync(markdownPath, 'utf-8')
- const pythonSnippetRegex =
- // rewrite the following regex to not include "```" in the match
- /\`\`\`python\r?\n([\s\S]*?)\r?\n\`\`\`/g
-
- // find all code snippets in the markdown string that matches typescriptSnippetRegex
- // and format them and replace the code snippets with the formatted code snippets
- const formattedMarkdown = await replaceAsync(
- markdown,
- pythonSnippetRegex,
- async (_, codeSnippet) => {
- const { data: formattedCodeSnippet } =
- await konfig.sdk.formatPython(codeSnippet)
- return '```python\n' + formattedCodeSnippet + '```'
- }
- )
- fs.writeFileSync(markdownPath, formattedMarkdown)
+ await formatPythonSnippet({ markdown, markdownPath, konfig })
}
CliUx.ux.action.stop()
@@ -1415,11 +1401,17 @@ function handleReadmeSnippet<
C extends object & {
readmeSnippet?: string
asyncReadmeSnippet?: string
+ readmeHeaderSnippet?: string
readmeDescriptionSnippet?: string
}
>({ config }: { config: C }): C {
if (config.readmeSnippet !== undefined)
config.readmeSnippet = fs.readFileSync(config.readmeSnippet, 'utf-8')
+ if (config.readmeHeaderSnippet !== undefined)
+ config.readmeHeaderSnippet = fs.readFileSync(
+ config.readmeHeaderSnippet,
+ 'utf-8'
+ )
if (config.asyncReadmeSnippet !== undefined)
config.asyncReadmeSnippet = fs.readFileSync(
config.asyncReadmeSnippet,
@@ -1564,6 +1556,60 @@ function constructGoGenerationRequest({
return requestGo
}
+async function formatPythonSnippet({
+ markdown,
+ markdownPath,
+ konfig,
+}: {
+ konfig: Konfig
+ markdownPath: string
+ markdown: string
+}) {
+ const pythonSnippetRegex = /\`\`\`python\r?\n([\s\S]*?)\r?\n\`\`\`/g
+
+ // find all code snippets in the markdown string that matches typescriptSnippetRegex
+ // and format them and replace the code snippets with the formatted code snippets
+ try {
+ const formattedMarkdown = await replaceAsync(
+ markdown,
+ pythonSnippetRegex,
+ async (match, codeSnippet, offset) => {
+ // Check if the block is preceded by a line ending with '>'
+ const blockStartIndex = offset - 1
+ const startOfLineIndex =
+ markdown.lastIndexOf('\n', blockStartIndex - 1) + 1
+ const lineBeforeBlock = markdown.substring(
+ startOfLineIndex,
+ blockStartIndex
+ )
+
+ if (lineBeforeBlock.endsWith('>')) {
+ // If it is, we leave the match unaltered
+ return match
+ } else {
+ // If it's not, proceed with formatting
+ const { data: formattedCodeSnippet } = await konfig.sdk.formatPython(
+ codeSnippet
+ )
+ return '```python\n' + formattedCodeSnippet + '```'
+ }
+ }
+ )
+ fs.writeFileSync(markdownPath, formattedMarkdown)
+ } catch (e) {
+ if (e instanceof KonfigError)
+ if (typeof e.responseBody === 'string') {
+ console.log(
+ boxen(e.responseBody, {
+ title: "Warning: Couldn't format Python code snippet",
+ titleAlignment: 'center',
+ borderColor: 'yellow',
+ })
+ )
+ }
+ }
+}
+
function constructPhpGenerationRequest({
configDir,
phpGeneratorConfig,
diff --git a/generator/konfig-dash/packages/konfig-lib/src/JavaGenerateApiRequestBody.ts b/generator/konfig-dash/packages/konfig-lib/src/JavaGenerateApiRequestBody.ts
index 3d86a3774..4707a25a4 100644
--- a/generator/konfig-dash/packages/konfig-lib/src/JavaGenerateApiRequestBody.ts
+++ b/generator/konfig-dash/packages/konfig-lib/src/JavaGenerateApiRequestBody.ts
@@ -3,6 +3,7 @@ import { z } from './zod'
import { TemplateFiles } from './TemplateFiles'
import {
clientStateWithExamples,
+ readmeHeaderSnippet,
topLevelOperationsOrderedSchema,
} from './KonfigYaml'
import { tagPrioritySchema } from './KonfigYamlCommon'
@@ -11,6 +12,7 @@ const additionalProperties = z
.object({
useDescriptionInOperationTableDocumentation: z.boolean().optional(),
apiPackage: z.string().optional(),
+ readmeHeaderSnippet: readmeHeaderSnippet,
artifactId: z.string().optional(),
artifactUrl: z.string().optional(),
authorEmail: z.string().describe('engineering@acme.com').optional(),
diff --git a/generator/konfig-dash/packages/konfig-lib/src/KonfigYaml.ts b/generator/konfig-dash/packages/konfig-lib/src/KonfigYaml.ts
index 040499a7d..ec2689729 100644
--- a/generator/konfig-dash/packages/konfig-lib/src/KonfigYaml.ts
+++ b/generator/konfig-dash/packages/konfig-lib/src/KonfigYaml.ts
@@ -198,8 +198,16 @@ export const pythonResponseTypeVersion = z
"Choose which version of Konfig's implementation of responses for the Python SDK to use."
)
+export const readmeHeaderSnippet = z
+ .string()
+ .optional()
+ .describe(
+ 'A snippet of markdown that will be inserted at the top of the README.md file. This is useful for adding a custom header to the README.md file that is not generated by Konfig.'
+ )
+
export const pythonConfig = z.object({
useDescriptionInOperationTableDocumentation,
+ readmeHeaderSnippet,
language: z.literal('python').default('python'),
packageName: z.string().describe('acme_client'),
projectName: z.string().describe('acme-python-sdk'),
diff --git a/generator/konfig-dash/packages/konfig-openapi-spec/openapi.yaml b/generator/konfig-dash/packages/konfig-openapi-spec/openapi.yaml
index db34b4e46..b0e62b933 100644
--- a/generator/konfig-dash/packages/konfig-openapi-spec/openapi.yaml
+++ b/generator/konfig-dash/packages/konfig-openapi-spec/openapi.yaml
@@ -539,6 +539,11 @@ components:
type: boolean
description: Whether or not to use the operation's description in the operation
table documentation. By default the summary is used.
+ readmeHeaderSnippet:
+ type: string
+ description: A snippet of markdown that will be inserted at the top of the
+ README.md file. This is useful for adding a custom header to
+ the README.md file that is not generated by Konfig.
language:
type: string
enum:
@@ -2215,6 +2220,11 @@ components:
type: boolean
description: Whether or not to use the operation's description in the operation
table documentation. By default the summary is used.
+ readmeHeaderSnippet:
+ type: string
+ description: A snippet of markdown that will be inserted at the top of the
+ README.md file. This is useful for adding a custom header
+ to the README.md file that is not generated by Konfig.
language:
type: string
enum:
diff --git a/generator/konfig-generator-api/.idea/runConfigurations/OpenApiGeneratorApplication.xml b/generator/konfig-generator-api/.idea/runConfigurations/OpenApiGeneratorApplication.xml
index b00c35912..67b35f2a9 100644
--- a/generator/konfig-generator-api/.idea/runConfigurations/OpenApiGeneratorApplication.xml
+++ b/generator/konfig-generator-api/.idea/runConfigurations/OpenApiGeneratorApplication.xml
@@ -3,7 +3,7 @@
-
+
diff --git a/generator/konfig-generator-api/src/main/java/com/konfigthis/api/GenerateApi.java b/generator/konfig-generator-api/src/main/java/com/konfigthis/api/GenerateApi.java
index 1da568031..68327a93f 100644
--- a/generator/konfig-generator-api/src/main/java/com/konfigthis/api/GenerateApi.java
+++ b/generator/konfig-generator-api/src/main/java/com/konfigthis/api/GenerateApi.java
@@ -324,6 +324,7 @@ default Map transformAdditionalPropertiesToMap(AdditionalPropert
putIfPresent(map, "swiftPackagePath", additionalProperties.getSwiftPackagePath());
putIfPresent(map, "apiDocumentationAuthenticationPartial", additionalProperties.getApiDocumentationAuthenticationPartial());
putIfPresent(map, "readmeSnippet", additionalProperties.getReadmeSnippet());
+ putIfPresent(map, "readmeHeaderSnippet", additionalProperties.getReadmeHeaderSnippet());
putIfPresent(map, "asyncReadmeSnippet", additionalProperties.getAsyncReadmeSnippet());
putIfPresent(map, "readmeSupportingDescriptionSnippet", additionalProperties.getReadmeSupportingDescriptionSnippet());
putIfPresent(map, "readmeDescriptionSnippet", additionalProperties.getReadmeDescriptionSnippet());
diff --git a/generator/konfig-generator-api/src/main/java/com/konfigthis/model/AdditionalProperties.java b/generator/konfig-generator-api/src/main/java/com/konfigthis/model/AdditionalProperties.java
index 0b01351a7..266f6e8bf 100644
--- a/generator/konfig-generator-api/src/main/java/com/konfigthis/model/AdditionalProperties.java
+++ b/generator/konfig-generator-api/src/main/java/com/konfigthis/model/AdditionalProperties.java
@@ -223,6 +223,9 @@ public class AdditionalProperties {
@JsonProperty("readmeSnippet")
private String readmeSnippet;
+ @JsonProperty("readmeHeaderSnippet")
+ private String readmeHeaderSnippet;
+
@JsonProperty("asyncReadmeSnippet")
private String asyncReadmeSnippet;
@@ -1468,6 +1471,25 @@ public void setReadmeSnippet(String readmeSnippet) {
this.readmeSnippet = readmeSnippet;
}
+ public AdditionalProperties readmeHeaderSnippet(String readmeHeaderSnippet) {
+ this.readmeHeaderSnippet = readmeHeaderSnippet;
+ return this;
+ }
+
+ /**
+ * Get readmeHeaderSnippet
+ * @return readmeHeaderSnippet
+ */
+
+ @Schema(name = "readmeHeaderSnippet", required = false)
+ public String getReadmeHeaderSnippet() {
+ return readmeHeaderSnippet;
+ }
+
+ public void setReadmeHeaderSnippet(String readmeHeaderSnippet) {
+ this.readmeHeaderSnippet = readmeHeaderSnippet;
+ }
+
public AdditionalProperties asyncReadmeSnippet(String asyncReadmeSnippet) {
this.asyncReadmeSnippet = asyncReadmeSnippet;
return this;
@@ -1697,6 +1719,7 @@ public boolean equals(Object o) {
Objects.equals(this.userAgent, additionalProperties.userAgent) &&
Objects.equals(this.npmName, additionalProperties.npmName) &&
Objects.equals(this.readmeSnippet, additionalProperties.readmeSnippet) &&
+ Objects.equals(this.readmeHeaderSnippet, additionalProperties.readmeHeaderSnippet) &&
Objects.equals(this.asyncReadmeSnippet, additionalProperties.asyncReadmeSnippet) &&
Objects.equals(this.readmeSupportingDescriptionSnippet, additionalProperties.readmeSupportingDescriptionSnippet) &&
Objects.equals(this.readmeDescriptionSnippet, additionalProperties.readmeDescriptionSnippet) &&
@@ -1708,7 +1731,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(objectPropertyNamingConvention, dependencies, readmeHeader, isGitSubmodule, gitDefaultBranch, gitRepoName, clientName, pubName, pubLibrary, pubDescription, pubAuthor, pythonResponseTypeVersion, pubAuthorEmail, pubHomepage, pubPublishTo, pubRepository, pubVersion, readmeOperation, moduleName, gitLabProjectId, outputDirectory, topLevelOperations, omitInfoDescription, omitModelDocumentation, omitApiDocumentation, useSecurityKeyParamNameAsPropertyName, tagPriority, useDescriptionInOperationTableDocumentation, setSkipSerializationToTrueByDefault, includeFetchAdapter, packagistUsername, toStringReturnsJson, includeEventSourceParser, keepAllParametersOptional, apiDocumentationAuthenticationPartial, composerPackageName, defaultTimeout, supportPhp7, useSingleRequestParameter, artifactUrl, artifactId, groupId, invokerPackage, modelPackage, apiPackage, projectName, podVersion, removeKonfigBranding, podName, classPrefix, authorName, authorEmail, podAuthors, swiftPackagePath, disallowAdditionalPropertiesIfNotPresent, packageVersion, packageUrl, npmVersion, gemName, gemVersion, userAgent, npmName, readmeSnippet, asyncReadmeSnippet, readmeSupportingDescriptionSnippet, readmeDescriptionSnippet, apiKeyAlias, clientState, clientStateWithExamples, clientStateIsOptional);
+ return Objects.hash(objectPropertyNamingConvention, dependencies, readmeHeader, isGitSubmodule, gitDefaultBranch, gitRepoName, clientName, pubName, pubLibrary, pubDescription, pubAuthor, pythonResponseTypeVersion, pubAuthorEmail, pubHomepage, pubPublishTo, pubRepository, pubVersion, readmeOperation, moduleName, gitLabProjectId, outputDirectory, topLevelOperations, omitInfoDescription, omitModelDocumentation, omitApiDocumentation, useSecurityKeyParamNameAsPropertyName, tagPriority, useDescriptionInOperationTableDocumentation, setSkipSerializationToTrueByDefault, includeFetchAdapter, packagistUsername, toStringReturnsJson, includeEventSourceParser, keepAllParametersOptional, apiDocumentationAuthenticationPartial, composerPackageName, defaultTimeout, supportPhp7, useSingleRequestParameter, artifactUrl, artifactId, groupId, invokerPackage, modelPackage, apiPackage, projectName, podVersion, removeKonfigBranding, podName, classPrefix, authorName, authorEmail, podAuthors, swiftPackagePath, disallowAdditionalPropertiesIfNotPresent, packageVersion, packageUrl, npmVersion, gemName, gemVersion, userAgent, npmName, readmeSnippet, readmeHeaderSnippet, asyncReadmeSnippet, readmeSupportingDescriptionSnippet, readmeDescriptionSnippet, apiKeyAlias, clientState, clientStateWithExamples, clientStateIsOptional);
}
@Override
@@ -1778,6 +1801,7 @@ public String toString() {
sb.append(" userAgent: ").append(toIndentedString(userAgent)).append("\n");
sb.append(" npmName: ").append(toIndentedString(npmName)).append("\n");
sb.append(" readmeSnippet: ").append(toIndentedString(readmeSnippet)).append("\n");
+ sb.append(" readmeHeaderSnippet: ").append(toIndentedString(readmeHeaderSnippet)).append("\n");
sb.append(" asyncReadmeSnippet: ").append(toIndentedString(asyncReadmeSnippet)).append("\n");
sb.append(" readmeSupportingDescriptionSnippet: ").append(toIndentedString(readmeSupportingDescriptionSnippet)).append("\n");
sb.append(" readmeDescriptionSnippet: ").append(toIndentedString(readmeDescriptionSnippet)).append("\n");
diff --git a/generator/konfig-generator-api/src/main/resources/python/README.handlebars b/generator/konfig-generator-api/src/main/resources/python/README.handlebars
index c22b81127..02f79c5c7 100644
--- a/generator/konfig-generator-api/src/main/resources/python/README.handlebars
+++ b/generator/konfig-generator-api/src/main/resources/python/README.handlebars
@@ -19,6 +19,10 @@
{{> readme_badges}}
{{/if}}
+{{#if readmeHeaderSnippet}}
+
+{{{readmeHeaderSnippet}}}
+{{/if}}
## Table of Contents
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/LICENSE b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/LICENSE
new file mode 100644
index 000000000..c374a793e
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/LICENSE
@@ -0,0 +1,7 @@
+Copyright (c) 2023- Konfig, Inc. (https://konfigthis.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/README.md b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/README.md
new file mode 100644
index 000000000..74a44a227
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/README.md
@@ -0,0 +1,5 @@
+# konfig
+
+|Language|Version|Package Manager|Documentation|Source|
+|-|-|-|-|-|
+|Python|1.0.0-beta.1|[PyPI](https://pypi.org/project/python-readme-header-snippet/1.0.0-beta.1)|[Documentation](https://github.com/konfig-dev/konfig/tree/main/python/README.md)|[Source](https://github.com/konfig-dev/konfig/tree/main/python)|
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/api.yaml b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/api.yaml
new file mode 100644
index 000000000..89ee42497
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/api.yaml
@@ -0,0 +1,44 @@
+openapi: 3.0.3
+info:
+ title: python-readme-header-snippet API
+ description: A simple API based for testing python-readme-header-snippet.
+ version: 1.0.0
+ termsOfService: http://example.com/terms/
+ contact:
+ name: API Support
+ url: http://example.com/support
+ email: support@example.com
+ license:
+ name: Apache 2.0
+ url: http://www.apache.org/licenses/LICENSE-2.0.html
+servers:
+ - description: Live API server
+ url: https://python-readme-header-snippet.konfigthis.com
+tags:
+ - name: Test
+paths:
+ /simple-endpoint:
+ get:
+ tags:
+ - Test
+ summary: Fetches a JSON value based on input parameter
+ operationId: Test_fetch
+ description: Provide an input parameter to receive a JSON value with properties.
+ responses:
+ '200':
+ description: Successful response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/TestFetchResponse'
+components:
+ securitySchemes:
+ ApiKeyAuth:
+ type: apiKey
+ in: header
+ name: X-API-KEY
+ schemas:
+ TestFetchResponse:
+ type: object
+security:
+ - ApiKeyAuth: []
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/header.md b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/header.md
new file mode 100644
index 000000000..7afe859d6
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/header.md
@@ -0,0 +1,29 @@
+> [!WARNING]
+> This SDK has breaking changes from `< 1.0.0` versions.
+> All methods now return Pydantic models.
+>
+> ### Before (`< 1.0.0`)
+>
+> Previously, you had to use the `[]` syntax to access response values. This
+> required a little more code for every property access.
+>
+> ```python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.body["project_id"])
+> ```
+>
+> ### After (`>= 1.0.0`)
+>
+> With Pydantic-based response values, you can use the `.` syntax to access. This
+> is slightly less verbose and looks more Pythonic.
+>
+> ```python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.project_id)
+> ```
+>
+> See the [Raw HTTP Response](#raw-http-response) for accessing raw HTTP response values like headers and status codes.
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/konfig.yaml b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/konfig.yaml
new file mode 100644
index 000000000..42eed2cdc
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/konfig.yaml
@@ -0,0 +1,15 @@
+# yaml-language-server: $schema=https://unpkg.com/konfig-lib@latest/konfig-yaml.schema.json
+
+outputDirectory: /tmp/python-readme-header-snippet-sdks-out
+specPath: api.yaml
+generators:
+ python:
+ version: 1.0.0-beta.1
+ outputDirectory: python
+ clientName: PythonReadmeHeaderSnippetClient
+ packageName: python_readme_header_snippet
+ projectName: python-readme-header-snippet
+ readmeHeaderSnippet: header.md
+ git:
+ userId: konfig-dev
+ repoId: konfig/tree/main/python
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.gitignore b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.gitignore
new file mode 100644
index 000000000..f40692cc5
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.gitignore
@@ -0,0 +1,70 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+dev-requirements.txt.log
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+venv/
+.venv/
+.python-version
+.pytest_cache
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+#Ipython Notebook
+.ipynb_checkpoints
+
+# PyCharm files
+.idea
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.vscode/settings.json b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.vscode/settings.json
new file mode 100644
index 000000000..199fd258c
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/.vscode/settings.json
@@ -0,0 +1,9 @@
+{
+ "python.testing.pytestArgs": [],
+ "python.testing.unittestEnabled": false,
+ "python.testing.pytestEnabled": true,
+ "[python]": {
+ "editor.defaultFormatter": "ms-python.black-formatter"
+ },
+ "python.formatting.provider": "none"
+}
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/LICENSE b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/LICENSE
new file mode 100644
index 000000000..c374a793e
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/LICENSE
@@ -0,0 +1,7 @@
+Copyright (c) 2023- Konfig, Inc. (https://konfigthis.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/README.md b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/README.md
new file mode 100644
index 000000000..1b2f0296f
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/README.md
@@ -0,0 +1,165 @@
+# python-readme-header-snippet
+
+A simple API based for testing python-readme-header-snippet.
+
+
+[![PyPI](https://img.shields.io/badge/PyPI-v1.0.0beta.1-blue)](https://pypi.org/project/python-readme-header-snippet/1.0.0-beta.1)
+[![README.md](https://img.shields.io/badge/README-Click%20Here-green)](https://github.com/konfig-dev/konfig/tree/main/python#readme)
+[![More Info](https://img.shields.io/badge/More%20Info-Click%20Here-orange)](http://example.com/support)
+
+> [!WARNING]
+> This SDK has breaking changes from `< 1.0.0` versions.
+> All methods now return Pydantic models.
+>
+> ### Before (`< 1.0.0`)
+>
+> Previously, you had to use the `[]` syntax to access response values. This
+> required a little more code for every property access.
+>
+> ```python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.body["project_id"])
+> ```
+>
+> ### After (`>= 1.0.0`)
+>
+> With Pydantic-based response values, you can use the `.` syntax to access. This
+> is slightly less verbose and looks more Pythonic.
+>
+> ```python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.project_id)
+> ```
+>
+> See the [Raw HTTP Response](#raw-http-response) for accessing raw HTTP response values like headers and status codes.
+
+## Table of Contents
+
+
+
+- [Requirements](#requirements)
+- [Installing](#installing)
+- [Getting Started](#getting-started)
+- [Async](#async)
+- [Raw HTTP Response](#raw-http-response)
+- [Reference](#reference)
+ * [`pythonreadmeheadersnippetclient.test.fetch`](#pythonreadmeheadersnippetclienttestfetch)
+
+
+
+## Requirements
+
+Python >=3.7
+
+## Installing
+
+```sh
+pip install python-readme-header-snippet==1.0.0-beta.1
+```
+
+## Getting Started
+
+```python
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key="YOUR_API_KEY",
+)
+
+try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = pythonreadmeheadersnippetclient.test.fetch()
+except ApiException as e:
+ print("Exception when calling TestApi.fetch: %s\n" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+```
+
+## Async
+
+`async` support is available by prepending `a` to any method.
+
+```python
+import asyncio
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key="YOUR_API_KEY",
+)
+
+
+async def main():
+ try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = await pythonreadmeheadersnippetclient.test.afetch()
+ except ApiException as e:
+ print("Exception when calling TestApi.fetch: %s\n" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+
+
+asyncio.run(main())
+```
+
+## Raw HTTP Response
+
+To access raw HTTP response values, use the `.raw` namespace.
+
+```python
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key="YOUR_API_KEY",
+)
+
+try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = pythonreadmeheadersnippetclient.test.raw.fetch()
+ pprint(fetch_response.headers)
+ pprint(fetch_response.status)
+ pprint(fetch_response.round_trip_time)
+except ApiException as e:
+ print("Exception when calling TestApi.fetch: %s\n" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+```
+
+
+## Reference
+### `pythonreadmeheadersnippetclient.test.fetch`
+
+Provide an input parameter to receive a JSON value with properties.
+
+#### 🛠️ Usage
+
+```python
+fetch_response = pythonreadmeheadersnippetclient.test.fetch()
+```
+
+#### 🌐 Endpoint
+
+`/simple-endpoint` `get`
+
+[🔙 **Back to Table of Contents**](#table-of-contents)
+
+---
+
+
+## Author
+This Python package is automatically generated by [Konfig](https://konfigthis.com)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/poetry.lock b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/poetry.lock
new file mode 100644
index 000000000..3d97c95ce
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/poetry.lock
@@ -0,0 +1,1723 @@
+# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+
+[[package]]
+name = "aiohttp"
+version = "3.8.6"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"},
+ {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"},
+ {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"},
+ {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"},
+ {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"},
+ {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"},
+ {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"},
+ {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"},
+ {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"},
+ {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"},
+ {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"},
+ {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"},
+ {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"},
+ {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"},
+ {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"},
+ {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"},
+ {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"},
+ {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"},
+ {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"},
+ {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"},
+ {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"},
+ {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"},
+ {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"},
+ {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"},
+ {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"},
+ {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"},
+ {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"},
+ {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"},
+ {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"},
+ {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"},
+ {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"},
+ {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"},
+ {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"},
+ {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"},
+ {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"},
+ {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"},
+ {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"},
+ {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"},
+ {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"},
+ {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"},
+ {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"},
+ {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"},
+ {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"},
+ {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"},
+ {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"},
+ {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"},
+ {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"},
+ {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"},
+ {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"},
+ {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"},
+ {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"},
+ {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"},
+ {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"},
+ {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"},
+ {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"},
+ {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"},
+ {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"},
+ {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"},
+ {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"},
+ {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"},
+ {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"},
+ {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"},
+ {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<4.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "annotated-types"
+version = "0.5.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"},
+ {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
+
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "asynctest"
+version = "0.13.0"
+description = "Enhance the standard unittest package with features for testing asyncio libraries"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
+ {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.1.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "black"
+version = "23.3.0"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
+ {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
+ {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
+ {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
+ {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
+ {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
+ {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
+ {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
+ {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
+ {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
+ {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
+ {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
+ {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
+ {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
+typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "bleach"
+version = "6.0.0"
+description = "An easy safelist-based HTML-sanitizing tool."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"},
+ {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"},
+]
+
+[package.dependencies]
+six = ">=1.9.0"
+webencodings = "*"
+
+[package.extras]
+css = ["tinycss2 (>=1.1.0,<1.2)"]
+
+[[package]]
+name = "build"
+version = "0.10.0"
+description = "A simple, correct Python build frontend"
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "build-0.10.0-py3-none-any.whl", hash = "sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171"},
+ {file = "build-0.10.0.tar.gz", hash = "sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "os_name == \"nt\""}
+importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""}
+packaging = ">=19.0"
+pyproject_hooks = "*"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"]
+test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"]
+typing = ["importlib-metadata (>=5.1)", "mypy (==0.991)", "tomli", "typing-extensions (>=3.7.4.3)"]
+virtualenv = ["virtualenv (>=20.0.35)"]
+
+[[package]]
+name = "certifi"
+version = "2023.7.22"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
+ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = "*"
+files = [
+ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
+ {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
+ {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
+ {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
+ {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
+ {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
+ {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
+ {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
+ {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
+ {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
+ {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
+ {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
+ {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
+ {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
+ {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
+ {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
+ {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
+ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
+ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.2.7"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
+ {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
+ {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
+ {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
+ {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
+ {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
+ {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
+ {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
+ {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
+ {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"},
+ {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"},
+ {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"},
+ {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"},
+ {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
+ {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
+ {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
+ {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
+ {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
+ {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
+ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+]
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "cryptography"
+version = "41.0.5"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"},
+ {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"},
+ {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"},
+ {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"},
+ {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"},
+ {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"},
+ {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"},
+ {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"},
+ {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"},
+ {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"},
+ {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"},
+ {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"},
+ {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"},
+ {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"},
+ {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"},
+ {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"},
+ {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"},
+ {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"},
+ {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"},
+ {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"},
+ {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"},
+ {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"},
+ {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"},
+]
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+nox = ["nox"]
+pep8test = ["black", "check-sdist", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "docutils"
+version = "0.20.1"
+description = "Docutils -- Python Documentation Utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"},
+ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.3"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
+ {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "frozendict"
+version = "2.3.8"
+description = "A simple immutable dictionary"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "frozendict-2.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d188d062084fba0e4bf32719ff7380b26c050b932ff164043ce82ab90587c52b"},
+ {file = "frozendict-2.3.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2a4e818ac457f6354401dcb631527af25e5a20fcfc81e6b5054b45fc245caca"},
+ {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a506d807858fa961aaa5b48dab6154fdc6bd045bbe9310788bbff141bb42d13"},
+ {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:750632cc890d8ee9484fe6d31b261159144b6efacc08e1317fe46accd1410373"},
+ {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ee5fe2658a8ac9a57f748acaf563f6a47f80b8308cbf0a04fac0ba057d41f75"},
+ {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23c4bb46e6b8246e1e7e49b5593c2bc09221db0d8f31f7c092be8dfb42b9e620"},
+ {file = "frozendict-2.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:c31abc8acea309b132dde441856829f6003a3d242da8b54bce4c0f2a3c8c63f0"},
+ {file = "frozendict-2.3.8-cp310-cp310-win_arm64.whl", hash = "sha256:9ea5520e85447ff8d4681e181941e482662817ccba921b7cb3f87922056d892a"},
+ {file = "frozendict-2.3.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f83fed36497af9562ead5e9fb8443224ba2781786bd3b92b1087cb7d0ff20135"},
+ {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27c5c1d29d0eda7979253ec88abc239da1313b38f39f4b16984db3b3e482300"},
+ {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c785de7f1a13f15963945f400656b18f057c2fc76c089dacf127a2bb188c03"},
+ {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8cf35ddd25513428ec152614def9696afb93ae5ec0eb54fa6aa6206eda77ac4c"},
+ {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ffc684773de7c88724788fa9787d0016fd75830412d58acbd9ed1a04762c675b"},
+ {file = "frozendict-2.3.8-cp36-cp36m-win_amd64.whl", hash = "sha256:4c258aab9c8488338634f2ec670ef049dbf0ab0e7a2fa9bc2c7b5009cb614801"},
+ {file = "frozendict-2.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47fc26468407fdeb428cfc89495b7921419e670355c21b383765482fdf6c5c14"},
+ {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea638228692db2bf94bce40ea4b25f4077588497b516bd16576575560094bd9"},
+ {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a75bf87e76c4386caecdbdd02a99e53ad43a6b5c38fb3d5a634a9fc9ce41462"},
+ {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed5a6c5c7a0f57269577c2a338a6002949aea21a23b7b7d06da7e7dced8b605b"},
+ {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d086440328a465dea9bef2dbad7548d75d1a0a0d21f43a08c03e1ec79ac5240e"},
+ {file = "frozendict-2.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:0bc4767e2f83db5b701c787e22380296977368b0c57e485ca71b2eedfa11c4a3"},
+ {file = "frozendict-2.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:638cf363d3cbca31a341503cf2219eac52a5f5140449676fae3d9644cd3c5487"},
+ {file = "frozendict-2.3.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b2fd8ce36277919b36e3c834d2389f3cd7ac068ae730c312671dd4439a5dd65"},
+ {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3957d52f1906b0c85f641a1911d214255873f6408ab4e5ad657cc27a247fb145"},
+ {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72cfe08ab8ae524e54848fa90b22d02c1b1ecfb3064438696bcaa4b953f18772"},
+ {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4742e76c4111bd09198d3ab66cef94be8506212311338f9182d6ef5f5cb60493"},
+ {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:313ed8d9ba6bac35d7635cd9580ee5721a0fb016f4d2d20f0efa05dbecbdb1be"},
+ {file = "frozendict-2.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:d3c6ce943946c2a61501c8cf116fff0892d11dd579877eb36e2aea2c27fddfef"},
+ {file = "frozendict-2.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0f573dc4861dd7ec9e055c8cceaf45355e894e749f621f199aab7b311ac4bdb"},
+ {file = "frozendict-2.3.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b3435e5f1ca5ae68a5e95e64b09d6d5c645cadd6b87569a0b3019dd248c8d00"},
+ {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:145afd033ebfade28416093335261b8ec1af5cccc593482309e7add062ec8668"},
+ {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da98427de26b5a2865727947480cbb53860089c4d195baa29c539da811cea617"},
+ {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e82befa7c385a668d569cebbebbdf49cee6fea4083f08e869a1b08cfb640a9f"},
+ {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80abe81d36e889ceec665e06ec764a7638000fa3e7be09786ac4d3ddc64b76db"},
+ {file = "frozendict-2.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8ccc94ac781710db44e142e1a11ff9b31d02c032c01c6868d51fcbef73086225"},
+ {file = "frozendict-2.3.8-cp39-cp39-win_arm64.whl", hash = "sha256:e72dbc1bcc2203cef38d205f692396f5505921a5680f66aa9a7e8bb71fd38f28"},
+ {file = "frozendict-2.3.8-py311-none-any.whl", hash = "sha256:ba41a7ed019bd03b62d63ed3f8dea35b8243d1936f7c9ed4b5298ca45a01928e"},
+ {file = "frozendict-2.3.8.tar.gz", hash = "sha256:5526559eca8f1780a4ee5146896f59afc31435313560208dd394a3a5e537d3ff"},
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.3.3"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
+ {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+]
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "6.7.0"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"},
+ {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+
+[[package]]
+name = "importlib-resources"
+version = "5.12.0"
+description = "Read resources from Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
+ {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "jaraco-classes"
+version = "3.2.3"
+description = "Utility functions for Python class constructs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"},
+ {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"},
+]
+
+[package.dependencies]
+more-itertools = "*"
+
+[package.extras]
+docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[[package]]
+name = "jeepney"
+version = "0.8.0"
+description = "Low-level, pure Python DBus protocol wrapper."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"},
+ {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"},
+]
+
+[package.extras]
+test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"]
+trio = ["async_generator", "trio"]
+
+[[package]]
+name = "keyring"
+version = "24.1.1"
+description = "Store and access your passwords safely."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "keyring-24.1.1-py3-none-any.whl", hash = "sha256:bc402c5e501053098bcbd149c4ddbf8e36c6809e572c2d098d4961e88d4c270d"},
+ {file = "keyring-24.1.1.tar.gz", hash = "sha256:3d44a48fa9a254f6c72879d7c88604831ebdaac6ecb0b214308b02953502c510"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""}
+importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
+"jaraco.classes" = "*"
+jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
+pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""}
+SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
+
+[package.extras]
+completion = ["shtab"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
+[[package]]
+name = "markdown-it-py"
+version = "2.2.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
+ {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "more-itertools"
+version = "9.1.0"
+description = "More routines for operating on iterables, beyond itertools"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"},
+ {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.11.2"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
+ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
+]
+
+[[package]]
+name = "pkginfo"
+version = "1.9.6"
+description = "Query metadata from sdists / bdists / installed packages."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pkginfo-1.9.6-py3-none-any.whl", hash = "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546"},
+ {file = "pkginfo-1.9.6.tar.gz", hash = "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"},
+]
+
+[package.extras]
+testing = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "platformdirs"
+version = "3.11.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
+ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""}
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.2.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
+ {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
+[[package]]
+name = "pydantic"
+version = "2.4.2"
+description = "Data validation using Python type hints"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"},
+ {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.4.0"
+pydantic-core = "2.10.1"
+typing-extensions = ">=4.6.1"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.10.1"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"},
+ {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"},
+ {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"},
+ {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"},
+ {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"},
+ {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"},
+ {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"},
+ {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"},
+ {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"},
+ {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"},
+ {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"},
+ {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"},
+ {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"},
+ {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"},
+ {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"},
+ {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"},
+ {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"},
+ {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"},
+ {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"},
+ {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"},
+ {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"},
+ {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"},
+ {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"},
+ {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"},
+ {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"},
+ {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
+[[package]]
+name = "pygments"
+version = "2.16.1"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
+ {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
+]
+
+[package.extras]
+plugins = ["importlib-metadata"]
+
+[[package]]
+name = "pyproject-hooks"
+version = "1.0.0"
+description = "Wrappers to call pyproject.toml-based build backend hooks."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"},
+ {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"},
+]
+
+[package.dependencies]
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+
+[[package]]
+name = "pytest"
+version = "7.4.3"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
+ {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.21.1"
+description = "Pytest support for asyncio"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
+ {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
+testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
+
+[[package]]
+name = "pytest-cov"
+version = "2.8.1"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pytest-cov-2.8.1.tar.gz", hash = "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b"},
+ {file = "pytest_cov-2.8.1-py2.py3-none-any.whl", hash = "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"},
+]
+
+[package.dependencies]
+coverage = ">=4.4"
+pytest = ">=3.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "virtualenv"]
+
+[[package]]
+name = "pytest-randomly"
+version = "1.2.3"
+description = "Pytest plugin to randomly order tests and control random.seed."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-randomly-1.2.3.tar.gz", hash = "sha256:92ec6745d3ebdd690ecb598648748c9601f16f5afacf83ccef2b50d23e6edb7f"},
+ {file = "pytest_randomly-1.2.3-py2.py3-none-any.whl", hash = "sha256:6db5e03d72b54052b9b379dc3cfa4749c19bfe4de161cf3eb24762049f4ce9be"},
+]
+
+[package.dependencies]
+pytest = "*"
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.2"
+description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"},
+ {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"},
+]
+
+[[package]]
+name = "readme-renderer"
+version = "37.3"
+description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "readme_renderer-37.3-py3-none-any.whl", hash = "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343"},
+ {file = "readme_renderer-37.3.tar.gz", hash = "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273"},
+]
+
+[package.dependencies]
+bleach = ">=2.1.0"
+docutils = ">=0.13.1"
+Pygments = ">=2.5.1"
+
+[package.extras]
+md = ["cmarkgfm (>=0.8.0)"]
+
+[[package]]
+name = "requests"
+version = "2.31.0"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "requests-toolbelt"
+version = "1.0.0"
+description = "A utility belt for advanced users of python-requests"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
+ {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
+]
+
+[package.dependencies]
+requests = ">=2.0.1,<3.0.0"
+
+[[package]]
+name = "rfc3986"
+version = "2.0.0"
+description = "Validating URI References per RFC 3986"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
+ {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
+]
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "rich"
+version = "13.6.0"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"},
+ {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "secretstorage"
+version = "3.3.3"
+description = "Python bindings to FreeDesktop.org Secret Service API"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"},
+ {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"},
+]
+
+[package.dependencies]
+cryptography = ">=2.0"
+jeepney = ">=0.6"
+
+[[package]]
+name = "setuptools"
+version = "65.7.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"},
+ {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "twine"
+version = "4.0.2"
+description = "Collection of utilities for publishing packages on PyPI"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
+ {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
+]
+
+[package.dependencies]
+importlib-metadata = ">=3.6"
+keyring = ">=15.1"
+pkginfo = ">=1.8.1"
+readme-renderer = ">=35.0"
+requests = ">=2.20"
+requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
+rfc3986 = ">=1.4.0"
+rich = ">=12.0.0"
+urllib3 = ">=1.26.0"
+
+[[package]]
+name = "typed-ast"
+version = "1.5.5"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"},
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"},
+ {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"},
+ {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"},
+ {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.7.1"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
+ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
+]
+
+[[package]]
+name = "urllib3"
+version = "1.26.18"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
+ {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
+]
+
+[package.extras]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+description = "Character encoding aliases for legacy web content"
+optional = false
+python-versions = "*"
+files = [
+ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
+]
+
+[[package]]
+name = "yarl"
+version = "1.9.2"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "zipp"
+version = "3.15.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
+ {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.7"
+content-hash = "6c3c8e668e5534e5423d53ec7e0010528fad1bc013d10f7660f9b15a1e99dbfe"
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/pyproject.toml b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/pyproject.toml
new file mode 100644
index 000000000..51293e753
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/pyproject.toml
@@ -0,0 +1,32 @@
+[tool.poetry]
+name = "python-readme-header-snippet"
+version = "1.0.0-beta.1"
+description = "Client for python-readme-header-snippet API"
+authors = ["API Support "]
+license = "MIT"
+readme = "README.md"
+packages = [{include = "python_readme_header_snippet"}]
+
+[tool.poetry.dependencies]
+python = "^3.7"
+certifi = ">=2023.7.22"
+python-dateutil = "^2.8.2"
+typing_extensions = "^4.3.0"
+urllib3 = "^1.26.18"
+frozendict = "^2.3.4"
+aiohttp = "^3.8.4"
+pydantic = "^2.4.2"
+
+[tool.poetry.group.dev.dependencies]
+setuptools = "^65.5.1"
+pytest = "^7.3.1"
+pytest-cov = "2.8.1"
+pytest-randomly = "1.2.3"
+build = "^0.10.0"
+twine = "^4.0.2"
+pytest-asyncio = "^0.21.0"
+black = "^23.3.0"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/__init__.py
new file mode 100644
index 000000000..af228c853
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/__init__.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+
+# flake8: noqa
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+__version__ = "1.0.0-beta.1"
+
+# import ApiClient
+from python_readme_header_snippet.api_client import ApiClient
+
+# import Configuration
+from python_readme_header_snippet.configuration import Configuration
+
+# import exceptions
+from python_readme_header_snippet.exceptions import OpenApiException
+from python_readme_header_snippet.exceptions import ApiAttributeError
+from python_readme_header_snippet.exceptions import ApiTypeError
+from python_readme_header_snippet.exceptions import ApiValueError
+from python_readme_header_snippet.exceptions import ApiKeyError
+from python_readme_header_snippet.exceptions import ApiException
+
+from python_readme_header_snippet.client import PythonReadmeHeaderSnippetClient
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_client.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_client.py
new file mode 100644
index 000000000..c3757b88c
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_client.py
@@ -0,0 +1,1958 @@
+# coding: utf-8
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from dataclasses import dataclass
+from decimal import Decimal
+import enum
+import email
+import json
+import os
+import io
+import atexit
+from multiprocessing.pool import ThreadPool
+import re
+import tempfile
+import time
+import typing
+import typing_extensions
+import aiohttp
+import urllib3
+from pydantic import BaseModel, RootModel, ValidationError
+from urllib3._collections import HTTPHeaderDict
+from urllib.parse import urlparse, quote
+from urllib3.fields import RequestField as RequestFieldBase
+from urllib3.fields import guess_content_type
+
+import frozendict
+
+from python_readme_header_snippet import rest
+from python_readme_header_snippet.api_response import ApiResponse, AsyncApiResponse
+from python_readme_header_snippet.rest import AsyncResponseWrapper, ResponseWrapper
+from python_readme_header_snippet.configuration import Configuration
+from python_readme_header_snippet.exceptions import ApiTypeError, ApiValueError, MissingRequiredParametersError
+from python_readme_header_snippet.request_after_hook import request_after_hook
+from python_readme_header_snippet.request_before_url_hook import request_before_url_hook
+from python_readme_header_snippet.schemas import (
+ NoneClass,
+ BoolClass,
+ Schema,
+ FileIO,
+ BinarySchema,
+ date,
+ datetime,
+ none_type,
+ Unset,
+ unset,
+)
+
+@dataclass
+class MappedArgs:
+ body: typing.Any = None
+ query: typing.Optional[dict] = None
+ path: typing.Optional[dict] = None
+ header: typing.Optional[dict] = None
+ cookie: typing.Optional[dict] = None
+
+class RequestField(RequestFieldBase):
+ def __eq__(self, other):
+ if not isinstance(other, RequestField):
+ return False
+ return self.__dict__ == other.__dict__
+
+
+T = typing.TypeVar('T')
+
+
+def closest_type_match(value: typing.Any, types: typing.List[typing.Type]) -> typing.Type:
+ best_match = None
+
+ for t in types:
+ # Check for generic types
+ origin = typing_extensions.get_origin(t)
+ args = typing_extensions.get_args(t)
+
+ # Check for Literal types
+ if origin == typing_extensions.Literal:
+ if value in args:
+ best_match = t
+ continue
+
+ # Check for Pydantic models and non-generic types
+ if isinstance(t, type): # Ensure t is a class
+ if issubclass(t, BaseModel):
+ if isinstance(value, dict):
+ try:
+ t(**value)
+ best_match = t
+ except ValidationError:
+ continue
+ else: # This is a non-generic type
+ if isinstance(value, t):
+ if best_match is None or issubclass(best_match, t):
+ best_match = t
+ continue
+
+ # Check for generic list type
+ if origin == list and isinstance(value, list):
+ if args and issubclass(args[0], BaseModel):
+ try:
+ [args[0](**item) for item in value]
+ best_match = t
+ except ValidationError:
+ continue
+ elif best_match is None or (typing_extensions.get_origin(best_match) == list and len(
+ typing_extensions.get_args(best_match)) < len(args)):
+ if args and all(isinstance(item, args[0]) for item in value):
+ best_match = t
+
+ return best_match
+
+
+def construct_model_instance(model: typing.Type[T], data: typing.Any) -> T:
+ """
+ Recursively construct an instance of a Pydantic model along with its nested models.
+ """
+
+ # if model is Union,
+ if typing_extensions.get_origin(model) is typing.Union:
+ best_type = closest_type_match(data, model.__args__)
+ return construct_model_instance(best_type, data)
+ # if model is scalar value like str, number, etc., use RootModel to construct
+ elif isinstance(model, type):
+ model = RootModel[model]
+ # try to coerce value to model type
+ try:
+ return model(data).root
+ except ValidationError as e:
+ pass
+ # if not possible, give up
+ return model.model_construct(data).root
+ # if model is list, iterate over list and recursively call
+ elif typing_extensions.get_origin(model) is list:
+ item_model = typing_extensions.get_args(model)[0]
+ return [construct_model_instance(item_model, item) for item in data]
+ # if model is BaseModel, iterate over fields and recursively call
+ elif issubclass(model, BaseModel):
+ new_data = {}
+ for field_name, field_type in model.__annotations__.items():
+ if field_name in data:
+ new_data[field_name] = construct_model_instance(field_type, data[field_name])
+ return model.model_construct(**data)
+ raise ApiTypeError(f"Unable to construct model instance of type {model}")
+
+
+class Dictionary(BaseModel):
+ """
+ For free-form objects that can have any keys and values
+ (i.e. "type: object" with no properties)
+ """
+ class Config:
+ extra = 'allow'
+
+
+def DeprecationWarningOnce(func=None, *, prefix=None):
+ def decorator(func):
+ warned = False
+ def wrapper(instance, *args, **kwargs):
+ nonlocal warned
+ if not warned:
+ msg = f"{func.__name__} is deprecated"
+ if prefix:
+ msg = f"{prefix}.{msg}"
+ instance.api_client.configuration.logger.warning(msg)
+ warned = True
+ return func(instance, *args, **kwargs)
+ return wrapper
+ if func is None:
+ return decorator
+ else:
+ return decorator(func)
+
+class JSONEncoder(json.JSONEncoder):
+ compact_separators = (',', ':')
+
+ def default(self, obj):
+ if isinstance(obj, str):
+ return str(obj)
+ elif isinstance(obj, float):
+ return float(obj)
+ elif isinstance(obj, int):
+ return int(obj)
+ elif isinstance(obj, Decimal):
+ if obj.as_tuple().exponent >= 0:
+ return int(obj)
+ return float(obj)
+ elif isinstance(obj, NoneClass):
+ return None
+ elif isinstance(obj, BoolClass):
+ return bool(obj)
+ elif isinstance(obj, (dict, frozendict.frozendict)):
+ return {key: self.default(val) for key, val in obj.items()}
+ elif isinstance(obj, (list, tuple)):
+ return [self.default(item) for item in obj]
+ raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
+
+
+class ParameterInType(enum.Enum):
+ QUERY = 'query'
+ HEADER = 'header'
+ PATH = 'path'
+ COOKIE = 'cookie'
+
+
+class ParameterStyle(enum.Enum):
+ MATRIX = 'matrix'
+ LABEL = 'label'
+ FORM = 'form'
+ SIMPLE = 'simple'
+ SPACE_DELIMITED = 'spaceDelimited'
+ PIPE_DELIMITED = 'pipeDelimited'
+ DEEP_OBJECT = 'deepObject'
+
+
+class PrefixSeparatorIterator:
+ # A class to store prefixes and separators for rfc6570 expansions
+
+ def __init__(self, prefix: str, separator: str):
+ self.prefix = prefix
+ self.separator = separator
+ self.first = True
+ if separator in {'.', '|', '%20'}:
+ item_separator = separator
+ else:
+ item_separator = ','
+ self.item_separator = item_separator
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self.first:
+ self.first = False
+ return self.prefix
+ return self.separator
+
+
+class ParameterSerializerBase:
+ @classmethod
+ def _get_default_explode(cls, style: ParameterStyle) -> bool:
+ return False
+
+ @staticmethod
+ def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
+ """
+ Get representation if str/float/int/None/items in list/ values in dict
+ None is returned if an item is undefined, use cases are value=
+ - None
+ - []
+ - {}
+ - [None, None None]
+ - {'a': None, 'b': None}
+ """
+ if type(in_data) in {str, float, int}:
+ if percent_encode:
+ return quote(str(in_data))
+ return str(in_data)
+ elif isinstance(in_data, none_type):
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return None
+ elif isinstance(in_data, list) and not in_data:
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return None
+ elif isinstance(in_data, dict) and not in_data:
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return None
+ raise ApiValueError('Unable to generate a ref6570 item representation of {}'.format(in_data))
+
+ @staticmethod
+ def _to_dict(name: str, value: str):
+ return {name: value}
+
+ """
+ rfc6570 does not specify how boolean values are serialized so we use lowercase "true" and "false
+ """
+ @classmethod
+ def __konfig_bool_expansion(
+ cls,
+ in_data: typing.Any,
+ prefix_separator_iterator: PrefixSeparatorIterator,
+ var_name_piece: str,
+ named_parameter_expansion: bool
+ ) -> str:
+ item_value = "true" if in_data is True else "false"
+ if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
+ return next(prefix_separator_iterator) + var_name_piece
+ value_pair_equals = '=' if named_parameter_expansion else ''
+ return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+
+ @classmethod
+ def __ref6570_str_float_int_expansion(
+ cls,
+ variable_name: str,
+ in_data: typing.Any,
+ explode: bool,
+ percent_encode: bool,
+ prefix_separator_iterator: PrefixSeparatorIterator,
+ var_name_piece: str,
+ named_parameter_expansion: bool
+ ) -> str:
+ item_value = cls.__ref6570_item_value(in_data, percent_encode)
+ if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
+ return next(prefix_separator_iterator) + var_name_piece
+ value_pair_equals = '=' if named_parameter_expansion else ''
+ return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+
+ @classmethod
+ def __ref6570_list_expansion(
+ cls,
+ variable_name: str,
+ in_data: typing.Any,
+ explode: bool,
+ percent_encode: bool,
+ prefix_separator_iterator: PrefixSeparatorIterator,
+ var_name_piece: str,
+ named_parameter_expansion: bool
+ ) -> str:
+ item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
+ item_values = [v for v in item_values if v is not None]
+ if not item_values:
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return ""
+ value_pair_equals = '=' if named_parameter_expansion else ''
+ if not explode:
+ return (
+ next(prefix_separator_iterator) +
+ var_name_piece +
+ value_pair_equals +
+ prefix_separator_iterator.item_separator.join(item_values)
+ )
+ # exploded
+ return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
+ [var_name_piece + value_pair_equals + val for val in item_values]
+ )
+
+ @classmethod
+ def __ref6570_dict_expansion(
+ cls,
+ variable_name: str,
+ in_data: typing.Any,
+ explode: bool,
+ percent_encode: bool,
+ prefix_separator_iterator: PrefixSeparatorIterator,
+ var_name_piece: str,
+ named_parameter_expansion: bool
+ ) -> str:
+ in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
+ in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
+ if not in_data_transformed:
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return ""
+ value_pair_equals = '=' if named_parameter_expansion else ''
+ if not explode:
+ return (
+ next(prefix_separator_iterator) +
+ var_name_piece + value_pair_equals +
+ prefix_separator_iterator.item_separator.join(
+ prefix_separator_iterator.item_separator.join(
+ item_pair
+ ) for item_pair in in_data_transformed.items()
+ )
+ )
+ # exploded
+ return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
+ [key + '=' + val for key, val in in_data_transformed.items()]
+ )
+
+ @classmethod
+ def _ref6570_expansion(
+ cls,
+ variable_name: str,
+ in_data: typing.Any,
+ explode: bool,
+ percent_encode: bool,
+ prefix_separator_iterator: PrefixSeparatorIterator
+ ) -> str:
+ """
+ Separator is for separate variables like dict with explode true, not for array item separation
+ """
+ named_parameter_expansion = prefix_separator_iterator.separator in {'&', ';'}
+ var_name_piece = variable_name if named_parameter_expansion else ''
+ if type(in_data) in {str, float, int}:
+ return cls.__ref6570_str_float_int_expansion(
+ variable_name,
+ in_data,
+ explode,
+ percent_encode,
+ prefix_separator_iterator,
+ var_name_piece,
+ named_parameter_expansion
+ )
+ elif isinstance(in_data, none_type):
+ # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+ return ""
+ elif isinstance(in_data, list):
+ return cls.__ref6570_list_expansion(
+ variable_name,
+ in_data,
+ explode,
+ percent_encode,
+ prefix_separator_iterator,
+ var_name_piece,
+ named_parameter_expansion
+ )
+ elif isinstance(in_data, dict):
+ return cls.__ref6570_dict_expansion(
+ variable_name,
+ in_data,
+ explode,
+ percent_encode,
+ prefix_separator_iterator,
+ var_name_piece,
+ named_parameter_expansion
+ )
+ elif isinstance(in_data, bool):
+ return cls.__konfig_bool_expansion(
+ in_data,
+ prefix_separator_iterator,
+ var_name_piece,
+ named_parameter_expansion
+ )
+ # bytes, etc
+ raise ApiValueError('Unable to generate a ref6570 representation of {}'.format(in_data))
+
+
+class StyleFormSerializer(ParameterSerializerBase):
+ @classmethod
+ def _get_default_explode(cls, style: ParameterStyle) -> bool:
+ if style is ParameterStyle.FORM:
+ return True
+ return super()._get_default_explode(style)
+
+ def _serialize_form(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ name: str,
+ explode: bool,
+ percent_encode: bool,
+ prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
+ ) -> str:
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = PrefixSeparatorIterator('', '&')
+ return self._ref6570_expansion(
+ variable_name=name,
+ in_data=in_data,
+ explode=explode,
+ percent_encode=percent_encode,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+
+
+class StyleSimpleSerializer(ParameterSerializerBase):
+
+ def _serialize_simple(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ name: str,
+ explode: bool,
+ percent_encode: bool
+ ) -> str:
+ prefix_separator_iterator = PrefixSeparatorIterator('', ',')
+ return self._ref6570_expansion(
+ variable_name=name,
+ in_data=in_data,
+ explode=explode,
+ percent_encode=percent_encode,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+
+
+class JSONDetector:
+ """
+ Works for:
+ application/json
+ application/json; charset=UTF-8
+ application/json-patch+json
+ application/geo+json
+ """
+ __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
+
+ @classmethod
+ def _content_type_is_json(cls, content_type: str) -> bool:
+ if cls.__json_content_type_pattern.match(content_type):
+ return True
+ return False
+
+
+@dataclass
+class ParameterBase(JSONDetector):
+ name: str
+ in_type: ParameterInType
+ required: bool
+ style: typing.Optional[ParameterStyle]
+ explode: typing.Optional[bool]
+ allow_reserved: typing.Optional[bool]
+ schema: typing.Optional[typing.Type[Schema]]
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
+
+ __style_to_in_type = {
+ ParameterStyle.MATRIX: {ParameterInType.PATH},
+ ParameterStyle.LABEL: {ParameterInType.PATH},
+ ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
+ ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
+ ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
+ ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
+ ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
+ }
+ __in_type_to_default_style = {
+ ParameterInType.QUERY: ParameterStyle.FORM,
+ ParameterInType.PATH: ParameterStyle.SIMPLE,
+ ParameterInType.HEADER: ParameterStyle.SIMPLE,
+ ParameterInType.COOKIE: ParameterStyle.FORM,
+ }
+ __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
+ _json_encoder = JSONEncoder()
+
+ @classmethod
+ def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
+ if style is None:
+ return
+ in_type_set = cls.__style_to_in_type[style]
+ if in_type not in in_type_set:
+ raise ValueError(
+ 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
+ style, in_type_set
+ )
+ )
+
+ def __init__(
+ self,
+ name: str,
+ in_type: ParameterInType,
+ required: bool = False,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: bool = False,
+ allow_reserved: typing.Optional[bool] = None,
+ schema: typing.Optional[typing.Type[Schema]] = None,
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
+ ):
+ if schema is None and content is None:
+ raise ValueError('Value missing; Pass in either schema or content')
+ if schema and content:
+ raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
+ if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
+ raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
+ self.__verify_style_to_in_type(style, in_type)
+ if content is None and style is None:
+ style = self.__in_type_to_default_style[in_type]
+ if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
+ raise ValueError('Invalid content length, content length must equal 1')
+ self.in_type = in_type
+ self.name = name
+ self.required = required
+ self.style = style
+ self.explode = explode
+ self.allow_reserved = allow_reserved
+ self.schema = schema
+ self.content = content
+
+ def _serialize_json(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ eliminate_whitespace: bool = False
+ ) -> str:
+ if eliminate_whitespace:
+ return json.dumps(in_data, separators=self._json_encoder.compact_separators)
+ return json.dumps(in_data)
+
+
+class PathParameter(ParameterBase, StyleSimpleSerializer):
+
+ def __init__(
+ self,
+ name: str,
+ required: bool = False,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: bool = False,
+ allow_reserved: typing.Optional[bool] = None,
+ schema: typing.Optional[typing.Type[Schema]] = None,
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
+ ):
+ super().__init__(
+ name,
+ in_type=ParameterInType.PATH,
+ required=required,
+ style=style,
+ explode=explode,
+ allow_reserved=allow_reserved,
+ schema=schema,
+ content=content
+ )
+
+ def __serialize_label(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list]
+ ) -> typing.Dict[str, str]:
+ prefix_separator_iterator = PrefixSeparatorIterator('.', '.')
+ value = self._ref6570_expansion(
+ variable_name=self.name,
+ in_data=in_data,
+ explode=self.explode,
+ percent_encode=True,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ return self._to_dict(self.name, value)
+
+ def __serialize_matrix(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list]
+ ) -> typing.Dict[str, str]:
+ prefix_separator_iterator = PrefixSeparatorIterator(';', ';')
+ value = self._ref6570_expansion(
+ variable_name=self.name,
+ in_data=in_data,
+ explode=self.explode,
+ percent_encode=True,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ return self._to_dict(self.name, value)
+
+ def __serialize_simple(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ ) -> typing.Dict[str, str]:
+ value = self._serialize_simple(
+ in_data=in_data,
+ name=self.name,
+ explode=self.explode,
+ percent_encode=True
+ )
+ return self._to_dict(self.name, value)
+
+ def serialize(
+ self,
+ in_data: typing.Union[
+ Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
+ ) -> typing.Dict[str, str]:
+ if self.schema:
+ cast_in_data = self.schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ """
+ simple -> path
+ path:
+ returns path_params: dict
+ label -> path
+ returns path_params
+ matrix -> path
+ returns path_params
+ """
+ if self.style:
+ if self.style is ParameterStyle.SIMPLE:
+ return self.__serialize_simple(cast_in_data)
+ elif self.style is ParameterStyle.LABEL:
+ return self.__serialize_label(cast_in_data)
+ elif self.style is ParameterStyle.MATRIX:
+ return self.__serialize_matrix(cast_in_data)
+ # self.content will be length one
+ for content_type, schema in self.content.items():
+ cast_in_data = schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ if self._content_type_is_json(content_type):
+ value = self._serialize_json(cast_in_data)
+ return self._to_dict(self.name, value)
+ raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+
+
+class QueryParameter(ParameterBase, StyleFormSerializer):
+
+ def __init__(
+ self,
+ name: str,
+ required: bool = False,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: typing.Optional[bool] = None,
+ allow_reserved: typing.Optional[bool] = None,
+ schema: typing.Optional[typing.Type[Schema]] = None,
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
+ ):
+ used_style = ParameterStyle.FORM if style is None else style
+ used_explode = self._get_default_explode(used_style) if explode is None else explode
+
+ super().__init__(
+ name,
+ in_type=ParameterInType.QUERY,
+ required=required,
+ style=used_style,
+ explode=used_explode,
+ allow_reserved=allow_reserved,
+ schema=schema,
+ content=content
+ )
+
+ def __serialize_space_delimited(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
+ ) -> typing.Dict[str, str]:
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = self.get_prefix_separator_iterator()
+ value = self._ref6570_expansion(
+ variable_name=self.name,
+ in_data=in_data,
+ explode=self.explode,
+ percent_encode=True,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ return self._to_dict(self.name, value)
+
+ def __serialize_pipe_delimited(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
+ ) -> typing.Dict[str, str]:
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = self.get_prefix_separator_iterator()
+ value = self._ref6570_expansion(
+ variable_name=self.name,
+ in_data=in_data,
+ explode=self.explode,
+ percent_encode=True,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ return self._to_dict(self.name, value)
+
+ def __serialize_form(
+ self,
+ in_data: typing.Union[None, int, float, str, bool, dict, list],
+ prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
+ ) -> typing.Dict[str, str]:
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = self.get_prefix_separator_iterator()
+ value = self._serialize_form(
+ in_data,
+ name=self.name,
+ explode=self.explode,
+ percent_encode=True,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ return self._to_dict(self.name, value)
+
+ def get_prefix_separator_iterator(self) -> typing.Optional[PrefixSeparatorIterator]:
+ if self.style is ParameterStyle.FORM:
+ return PrefixSeparatorIterator('?', '&')
+ elif self.style is ParameterStyle.SPACE_DELIMITED:
+ return PrefixSeparatorIterator('', '%20')
+ elif self.style is ParameterStyle.PIPE_DELIMITED:
+ return PrefixSeparatorIterator('', '|')
+
+ def serialize(
+ self,
+ in_data: typing.Union[
+ Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict],
+ prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
+ ) -> typing.Dict[str, str]:
+ if self.schema:
+ cast_in_data = self.schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ """
+ form -> query
+ query:
+ - GET/HEAD/DELETE: could use fields
+ - PUT/POST: must use urlencode to send parameters
+ returns fields: tuple
+ spaceDelimited -> query
+ returns fields
+ pipeDelimited -> query
+ returns fields
+ deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
+ returns fields
+ """
+ if self.style:
+ # TODO update query ones to omit setting values when [] {} or None is input
+ if self.style is ParameterStyle.FORM:
+ return self.__serialize_form(cast_in_data, prefix_separator_iterator)
+ elif self.style is ParameterStyle.SPACE_DELIMITED:
+ return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
+ elif self.style is ParameterStyle.PIPE_DELIMITED:
+ return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
+ # self.content will be length one
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = self.get_prefix_separator_iterator()
+ for content_type, schema in self.content.items():
+ cast_in_data = schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ if self._content_type_is_json(content_type):
+ value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
+ return self._to_dict(
+ self.name,
+ next(prefix_separator_iterator) + self.name + '=' + quote(value)
+ )
+ raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+
+
+class CookieParameter(ParameterBase, StyleFormSerializer):
+
+ def __init__(
+ self,
+ name: str,
+ required: bool = False,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: typing.Optional[bool] = None,
+ allow_reserved: typing.Optional[bool] = None,
+ schema: typing.Optional[typing.Type[Schema]] = None,
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
+ ):
+ used_style = ParameterStyle.FORM if style is None and content is None and schema else style
+ used_explode = self._get_default_explode(used_style) if explode is None else explode
+
+ super().__init__(
+ name,
+ in_type=ParameterInType.COOKIE,
+ required=required,
+ style=used_style,
+ explode=used_explode,
+ allow_reserved=allow_reserved,
+ schema=schema,
+ content=content
+ )
+
+ def serialize(
+ self,
+ in_data: typing.Union[
+ Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
+ ) -> typing.Dict[str, str]:
+ if self.schema:
+ cast_in_data = self.schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ """
+ form -> cookie
+ returns fields: tuple
+ """
+ if self.style:
+ """
+ TODO add escaping of comma, space, equals
+ or turn encoding on
+ """
+ value = self._serialize_form(
+ cast_in_data,
+ explode=self.explode,
+ name=self.name,
+ percent_encode=False,
+ prefix_separator_iterator=PrefixSeparatorIterator('', '&')
+ )
+ return self._to_dict(self.name, value)
+ # self.content will be length one
+ for content_type, schema in self.content.items():
+ cast_in_data = schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ if self._content_type_is_json(content_type):
+ value = self._serialize_json(cast_in_data)
+ return self._to_dict(self.name, value)
+ raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+
+
+class HeaderParameter(ParameterBase, StyleSimpleSerializer):
+ def __init__(
+ self,
+ name: str,
+ required: bool = False,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: bool = False,
+ allow_reserved: typing.Optional[bool] = None,
+ schema: typing.Optional[typing.Type[Schema]] = None,
+ content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
+ ):
+ super().__init__(
+ name,
+ in_type=ParameterInType.HEADER,
+ required=required,
+ style=style,
+ explode=explode,
+ allow_reserved=allow_reserved,
+ schema=schema,
+ content=content
+ )
+
+ @staticmethod
+ def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
+ data = tuple(t for t in in_data if t)
+ headers = HTTPHeaderDict()
+ if not data:
+ return headers
+ headers.extend(data)
+ return headers
+
+ def serialize(
+ self,
+ in_data: typing.Union[
+ Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
+ ) -> HTTPHeaderDict:
+ if self.schema:
+ cast_in_data = self.schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ """
+ simple -> header
+ headers: PoolManager needs a mapping, tuple is close
+ returns headers: dict
+ """
+ if self.style:
+ value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
+ return self.__to_headers(((self.name, value),))
+ # self.content will be length one
+ for content_type, schema in self.content.items():
+ cast_in_data = schema(in_data)
+ cast_in_data = self._json_encoder.default(cast_in_data)
+ if self._content_type_is_json(content_type):
+ value = self._serialize_json(cast_in_data)
+ return self.__to_headers(((self.name, value),))
+ raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+
+
+class Encoding:
+ def __init__(
+ self,
+ content_type: str,
+ headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
+ style: typing.Optional[ParameterStyle] = None,
+ explode: bool = False,
+ allow_reserved: bool = False,
+ ):
+ self.content_type = content_type
+ self.headers = headers
+ self.style = style
+ self.explode = explode
+ self.allow_reserved = allow_reserved
+
+
+@dataclass
+class MediaType:
+ """
+ Used to store request and response body schema information
+ encoding:
+ A map between a property name and its encoding information.
+ The key, being the property name, MUST exist in the schema as a property.
+ The encoding object SHALL only apply to requestBody objects when the media type is
+ multipart or application/x-www-form-urlencoded.
+ """
+ schema: typing.Optional[typing.Type[Schema]] = None
+ encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+
+
+@dataclass
+class ApiResponseWithoutDeserialization(ApiResponse):
+ pass
+
+@dataclass
+class ApiResponseWithoutDeserializationAsync(AsyncApiResponse):
+ pass
+
+
+class OpenApiResponse(JSONDetector):
+ __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+
+ def __init__(
+ self,
+ response_cls: typing.Type[ApiResponse] = ApiResponse,
+ response_cls_async: typing.Type[AsyncApiResponse] = AsyncApiResponse,
+ content: typing.Optional[typing.Dict[str, MediaType]] = None,
+ headers: typing.Optional[typing.List[HeaderParameter]] = None,
+ ):
+ self.headers = headers
+ if content is not None and len(content) == 0:
+ raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
+ self.content = content
+ self.response_cls = response_cls
+ self.response_cls_async = response_cls_async
+
+ @staticmethod
+ def __deserialize_json(response: bytes) -> typing.Any:
+ # python must be >= 3.9 so we can pass in bytes into json.loads
+ return json.loads(response)
+
+ @staticmethod
+ def __file_name_from_response_url(response_url: typing.Optional[str]) -> typing.Optional[str]:
+ if response_url is None:
+ return None
+ url_path = urlparse(response_url).path
+ if url_path:
+ path_basename = os.path.basename(url_path)
+ if path_basename:
+ _filename, ext = os.path.splitext(path_basename)
+ if ext:
+ return path_basename
+ return None
+
+ @classmethod
+ def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
+ if content_disposition is None:
+ return None
+ match = cls.__filename_content_disposition_pattern.search(content_disposition)
+ if not match:
+ return None
+ return match.group(1)
+
+ def __deserialize_application_octet_stream(
+ self, response: urllib3.HTTPResponse
+ ) -> typing.Union[bytes, io.BufferedReader]:
+ """
+ urllib3 use cases:
+ 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
+ 2. when preload_content=False (stream=True) then supports_chunked_reads is True and
+ a file will be written and returned
+ """
+ if response.supports_chunked_reads():
+ file_name = (
+ self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
+ or self.__file_name_from_response_url(response.geturl())
+ )
+
+ if file_name is None:
+ _fd, path = tempfile.mkstemp()
+ else:
+ path = os.path.join(tempfile.gettempdir(), file_name)
+
+ with open(path, 'wb') as new_file:
+ chunk_size = 1024
+ while True:
+ data = response.read(chunk_size)
+ if not data:
+ break
+ new_file.write(data)
+ # release_conn is needed for streaming connections only
+ response.release_conn()
+ new_file = open(path, 'rb')
+ return new_file
+ else:
+ return response.data
+
+ @staticmethod
+ def __deserialize_multipart_form_data(
+ response: bytes
+ ) -> typing.Dict[str, typing.Any]:
+ msg = email.message_from_bytes(response)
+ return {
+ part.get_param("name", header="Content-Disposition"): part.get_payload(
+ decode=True
+ ).decode(part.get_content_charset())
+ if part.get_content_charset()
+ else part.get_payload()
+ for part in msg.get_payload()
+ }
+
+ def __get_schema_for_content_type(
+ self,
+ content_type
+ ) -> typing.Optional[typing.Type[Schema]]:
+ """
+ Finds the correct SchemaObject for a particular content type. Handles
+ the asterisk "*" character that is used to group media types into ranges
+ (https://www.rfc-editor.org/rfc/rfc7231#section-5.3.2). Also handles
+ parameters in the form of name=value pairs.
+ """
+ media_types = self.content.keys()
+ matched_media_type = OpenApiResponse.match_content_type(
+ content_type=content_type,
+ media_types=media_types
+ )
+ if matched_media_type is None:
+ return None
+ return self.content[matched_media_type].schema
+
+ @staticmethod
+ def match_content_type(content_type: str, media_types: typing.List[str]) -> typing.Optional[str]:
+ """
+ Matches a content type to a media type in a list of media types, handling media type ranges as defined in RFC7231.
+
+ Parameters:
+ content_type (str): The content type to match.
+ media_types (list): The list of media types to search.
+
+ Returns:
+ str: The first media type that matches the content type, or None if no match is found.
+ """
+ for media_type in media_types:
+ if media_type == '*/*' or media_type == content_type:
+ return media_type
+ elif '/' in media_type:
+ type_, subtype = media_type.split('/')
+ if (type_ == '*' or type_ == content_type.split('/')[0]) and \
+ (subtype == '*' or subtype == content_type.split('/')[1].split(';')[0]):
+ return media_type
+
+ return None
+
+ async def deserialize_async(self, response: AsyncResponseWrapper, configuration: Configuration, skip_deserialization = False) -> AsyncApiResponse:
+ """
+ Deserializes an HTTP response body into an object.
+ """
+ content_type = response.http_response.content_type
+ deserialized_body = unset
+ if self.content is not None:
+ if len(self.content) == 0:
+ # some specs do not define response content media type schemas
+ return self.response_cls_async(
+ round_trip_time=response.round_trip_time,
+ response=response.http_response,
+ body=unset,
+ headers=response.http_response.headers,
+ status=response.http_response.status
+ )
+ body_schema = self.__get_schema_for_content_type(content_type)
+ if body_schema is None:
+ raise ApiValueError(
+ f"Invalid content_type returned. Content_type='{content_type}' was returned "
+ f"when only {str(set(self.content))} are defined for status_code={str(response.http_response.status)}"
+ )
+ if self._content_type_is_json(content_type):
+ body_data = self.__deserialize_json(await response.http_response.read())
+ elif content_type.startswith('multipart/form-data'):
+ body_data = self.__deserialize_multipart_form_data(await response.http_response.read())
+ else:
+ raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
+ if skip_deserialization:
+ return self.response_cls_async(
+ round_trip_time=response.round_trip_time,
+ response=response.http_response,
+ body=body_data,
+ headers=response.http_response.headers,
+ status=response.http_response.status
+ )
+ # Execute validation and throw as a side effect if validation fails
+ body_schema.from_openapi_data_oapg(
+ body_data,
+ _configuration=configuration
+ )
+ # Validation passed, set deserialized_body to plain old deserialized data
+ deserialized_body = body_data
+
+ return self.response_cls_async(
+ round_trip_time=response.round_trip_time,
+ response=response.http_response,
+ body=deserialized_body,
+ headers=response.http_response.headers,
+ status=response.http_response.status
+ )
+
+ def deserialize_body(self, response: ResponseWrapper, content_type: str) -> (any, str):
+ if self._content_type_is_json(content_type):
+ deserialized_body = self.__deserialize_json(response.http_response.data)
+ elif content_type == 'application/octet-stream':
+ deserialized_body = self.__deserialize_application_octet_stream(response.http_response)
+ elif content_type.startswith('multipart/form-data'):
+ deserialized_body = self.__deserialize_multipart_form_data(response.http_response.data)
+ content_type = 'multipart/form-data'
+ else: # If we don't know how to deserialize, use raw body string
+ deserialized_body = response.http_response.data.decode()
+ return deserialized_body, content_type
+
+ def deserialize(self, response: ResponseWrapper, configuration: Configuration, skip_deserialization = False) -> ApiResponse:
+ content_type = response.http_response.headers.get('content-type')
+ streamed = response.http_response.supports_chunked_reads()
+
+ deserialized_headers = unset
+ if self.headers is not None:
+ # TODO add header deserialization here
+ pass
+
+ if self.content is not None and len(self.content) == 0:
+ # some specs do not define response content media type schemas
+ return self.response_cls(
+ round_trip_time=response.round_trip_time,
+ response=response.http_response,
+ body=unset,
+ headers=response.http_response.headers,
+ status=response.http_response.status
+ )
+
+ try:
+ deserialized_body, content_type = self.deserialize_body(response, content_type)
+ except Exception:
+ # Most likely content-type did not match actual body
+ deserialized_body = unset
+
+ if not skip_deserialization:
+ body_schema = self.__get_schema_for_content_type(content_type)
+ if body_schema is None:
+ raise ApiValueError(
+ f"Invalid content_type returned. Content_type='{content_type}' was returned "
+ f"when only {str(set(self.content))} are defined for status_code={str(response.http_response.status)}"
+ )
+ # Execute validation and throw as a side effect if validation fails
+ body_schema.from_openapi_data_oapg(
+ body_data,
+ _configuration=configuration
+ )
+
+ if streamed:
+ response.http_response.release_conn()
+
+ return self.response_cls(
+ round_trip_time=response.round_trip_time,
+ response=response.http_response,
+ body=deserialized_body,
+ headers=response.http_response.headers,
+ status=response.http_response.status
+ )
+
+
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
+
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
+
+ This class is auto generated by Konfig (https://konfigthis.com)
+
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ :param pool_threads: The number of threads to use for async requests
+ to the API. More threads means more concurrent API requests.
+ """
+
+ _pool = None
+
+ def __init__(
+ self,
+ configuration: typing.Optional[Configuration] = None,
+ header_name: typing.Optional[str] = None,
+ header_value: typing.Optional[str] = None,
+ cookie: typing.Optional[str] = None,
+ pool_threads: int = 1
+ ):
+ if configuration is None:
+ configuration = Configuration()
+ self.configuration = configuration
+ self.pool_threads = pool_threads
+
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = HTTPHeaderDict()
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = 'Konfig/1.0.0-beta.1/python'
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def close(self):
+ if self._pool:
+ self._pool.close()
+ self._pool.join()
+ self._pool = None
+ if hasattr(atexit, 'unregister'):
+ atexit.unregister(self.close)
+
+ @property
+ def pool(self):
+ """Create thread pool on first request
+ avoids instantiating unused threadpool for blocking clients.
+ """
+ if self._pool is None:
+ atexit.register(self.close)
+ self._pool = ThreadPool(self.pool_threads)
+ return self._pool
+
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers['User-Agent']
+
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers['User-Agent'] = value
+
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
+
+ async def __async_call_api(
+ self,
+ resource_path: str,
+ method: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ serialized_body: typing.Optional[typing.Union[str, bytes]] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ host: typing.Optional[str] = None,
+ prefix_separator_iterator: PrefixSeparatorIterator = None,
+ ) -> AsyncResponseWrapper:
+
+ # header parameters
+ used_headers = HTTPHeaderDict(self.default_headers)
+ if self.cookie:
+ headers['Cookie'] = self.cookie
+
+ # auth setting
+ resource_path_ref = [self.update_params_for_auth(
+ used_headers,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ prefix_separator_iterator
+ )]
+
+ # must happen after cookie setting and auth setting in case user is overriding those
+ if headers:
+ used_headers.update(headers)
+
+ request_before_url_hook(
+ resource_path_ref=resource_path_ref,
+ method=method,
+ configuration=self.configuration,
+ body=body,
+ fields=fields,
+ auth_settings=auth_settings,
+ headers=used_headers,
+ )
+
+ # request url
+ if host is None:
+ url = self.configuration.host + resource_path_ref[0]
+ else:
+ # use server/host defined in path or operation instead
+ url = host + resource_path_ref[0]
+
+ request_after_hook(
+ resource_path=resource_path_ref[0],
+ method=method,
+ configuration=self.configuration,
+ body=body,
+ fields=fields,
+ auth_settings=auth_settings,
+ headers=used_headers,
+ )
+
+ # perform request and return response
+ response = await self.async_request(
+ method,
+ url,
+ headers=used_headers,
+ fields=fields,
+ body=serialized_body,
+ stream=stream,
+ timeout=timeout,
+ )
+
+
+ return response
+
+ def __call_api(
+ self,
+ resource_path: str,
+ method: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ serialized_body: typing.Optional[typing.Union[str, bytes]] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ host: typing.Optional[str] = None,
+ prefix_separator_iterator: PrefixSeparatorIterator = None,
+ ) -> ResponseWrapper:
+
+ # header parameters
+ used_headers = HTTPHeaderDict(self.default_headers)
+ if self.cookie:
+ headers['Cookie'] = self.cookie
+
+ # auth setting
+ resource_path_ref = [self.update_params_for_auth(
+ used_headers,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ prefix_separator_iterator
+ )]
+
+ # must happen after cookie setting and auth setting in case user is overriding those
+ if headers:
+ used_headers.update(headers)
+
+ request_before_url_hook(
+ resource_path_ref=resource_path_ref,
+ method=method,
+ configuration=self.configuration,
+ body=body,
+ fields=fields,
+ auth_settings=auth_settings,
+ headers=used_headers,
+ )
+
+ # request url
+ if host is None:
+ url = self.configuration.host + resource_path_ref[0]
+ else:
+ # use server/host defined in path or operation instead
+ url = host + resource_path_ref[0]
+
+ request_after_hook(
+ resource_path=resource_path_ref[0],
+ method=method,
+ configuration=self.configuration,
+ body=body,
+ fields=fields,
+ auth_settings=auth_settings,
+ headers=used_headers,
+ )
+
+ # perform request and return response
+ response = self.request(
+ method,
+ url,
+ headers=used_headers,
+ fields=fields,
+ body=serialized_body,
+ stream=stream,
+ timeout=timeout,
+ )
+
+
+ return response
+
+ async def async_call_api(
+ self,
+ resource_path: str,
+ method: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ serialized_body: typing.Optional[typing.Union[str, bytes]] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ host: typing.Optional[str] = None,
+ prefix_separator_iterator: PrefixSeparatorIterator = None,
+ ) -> AsyncResponseWrapper:
+ """Makes the HTTP request (synchronous) and returns deserialized data.
+
+ :param resource_path: Path to method endpoint.
+ :param method: Method to call.
+ :param headers: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param fields: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings: Auth Settings names for the request.
+ :param stream: if True, the urllib3.HTTPResponse object will
+ be returned without reading/decoding response
+ data. Also when True, if the openapi spec describes a file download,
+ the data will be written to a local filesystme file and the BinarySchema
+ instance will also inherit from FileSchema and FileIO
+ Default is False.
+ :type stream: bool, optional
+ :param timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :param host: api endpoint host
+ :return: response
+ """
+ return await self.__async_call_api(
+ resource_path,
+ method,
+ headers,
+ serialized_body,
+ body,
+ fields,
+ auth_settings,
+ stream,
+ timeout,
+ host,
+ prefix_separator_iterator,
+ )
+
+ def call_api(
+ self,
+ resource_path: str,
+ method: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ serialized_body: typing.Optional[typing.Union[str, bytes]] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ host: typing.Optional[str] = None,
+ prefix_separator_iterator: PrefixSeparatorIterator = None,
+ ) -> ResponseWrapper:
+ """Makes the HTTP request (synchronous) and returns deserialized data.
+
+ :param resource_path: Path to method endpoint.
+ :param method: Method to call.
+ :param headers: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param fields: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings: Auth Settings names for the request.
+ :param stream: if True, the urllib3.HTTPResponse object will
+ be returned without reading/decoding response
+ data. Also when True, if the openapi spec describes a file download,
+ the data will be written to a local filesystme file and the BinarySchema
+ instance will also inherit from FileSchema and FileIO
+ Default is False.
+ :type stream: bool, optional
+ :param timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :param host: api endpoint host
+ :return: response
+ """
+ return self.__call_api(
+ resource_path,
+ method,
+ headers,
+ serialized_body,
+ body,
+ fields,
+ auth_settings,
+ stream,
+ timeout,
+ host,
+ prefix_separator_iterator,
+ )
+
+ def fields_to_dict(self, fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]]):
+ """Converts fields to dict.
+
+ :param fields: fields
+ :return: dict
+ """
+ if fields is None:
+ return None
+ return {k: v for k, v in fields}
+
+ async def async_request(
+ self,
+ method: str,
+ url: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ body: typing.Optional[typing.Union[str, bytes]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ ) -> AsyncResponseWrapper:
+ if body and fields:
+ raise ApiValueError("body parameter cannot be used with fields parameter")
+ data = None
+ if body:
+ data=body
+ if fields:
+ data=self.fields_to_dict(fields)
+ session = aiohttp.ClientSession()
+ t1 = time.time()
+ if method == "GET":
+ session.get(url)
+ response = await session.get(url, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "HEAD":
+ response = await session.head(url, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "OPTIONS":
+ response = await session.options(url, data=data, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "POST":
+ response = await session.post(url, data=data, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "PUT":
+ response = await session.put(url, data=data, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "PATCH":
+ response = await session.patch(url, data=data, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ elif method == "DELETE":
+ response = await session.delete(url, data=data, headers=headers)
+ return AsyncResponseWrapper(response, time.time() - t1, session)
+ raise ApiValueError(
+ "http method must be `GET`, `HEAD`, `OPTIONS`,"
+ " `POST`, `PATCH`, `PUT` or `DELETE`."
+ )
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ body: typing.Optional[typing.Union[str, bytes]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ ) -> ResponseWrapper:
+ """Makes the HTTP request using RESTClient."""
+ if method == "GET":
+ return self.rest_client.GET(url,
+ stream=stream,
+ timeout=timeout,
+ headers=headers)
+ elif method == "HEAD":
+ return self.rest_client.HEAD(url,
+ stream=stream,
+ timeout=timeout,
+ headers=headers)
+ elif method == "OPTIONS":
+ return self.rest_client.OPTIONS(url,
+ headers=headers,
+ fields=fields,
+ stream=stream,
+ timeout=timeout,
+ body=body)
+ elif method == "POST":
+ return self.rest_client.POST(url,
+ headers=headers,
+ fields=fields,
+ stream=stream,
+ timeout=timeout,
+ body=body)
+ elif method == "PUT":
+ return self.rest_client.PUT(url,
+ headers=headers,
+ fields=fields,
+ stream=stream,
+ timeout=timeout,
+ body=body)
+ elif method == "PATCH":
+ return self.rest_client.PATCH(url,
+ headers=headers,
+ fields=fields,
+ stream=stream,
+ timeout=timeout,
+ body=body)
+ elif method == "DELETE":
+ return self.rest_client.DELETE(url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body)
+ else:
+ raise ApiValueError(
+ "http method must be `GET`, `HEAD`, `OPTIONS`,"
+ " `POST`, `PATCH`, `PUT` or `DELETE`."
+ )
+
+ def update_params_for_auth(
+ self,
+ headers,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ prefix_separator_iterator: PrefixSeparatorIterator = None
+ ) -> str:
+ """Updates header and query params based on authentication setting.
+
+ :param headers: Header parameters dict to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :param resource_path: A string representation of the HTTP request resource path.
+ :param method: A string representation of the HTTP request method.
+ :param body: A object representing the body of the HTTP request.
+ The object type is the return value of _encoder.default().
+ """
+ if not auth_settings:
+ return resource_path
+ if prefix_separator_iterator is None:
+ prefix_separator_iterator = PrefixSeparatorIterator("?", "&")
+
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if not auth_setting:
+ continue
+ if auth_setting['in'] == 'cookie':
+ headers.add('Cookie', auth_setting['value'])
+ elif auth_setting['in'] == 'header':
+ if auth_setting['type'] != 'http-signature':
+ headers.add(auth_setting['key'], auth_setting['value'])
+ elif auth_setting['in'] == 'query':
+ """ TODO implement auth in query
+ need to pass in prefix_separator_iterator
+ and need to output resource_path with query params added
+ """
+ resource_path += ParameterSerializerBase._ref6570_expansion(
+ variable_name=auth_setting['key'],
+ in_data=auth_setting['value'],
+ explode=False,
+ percent_encode=False,
+ prefix_separator_iterator=prefix_separator_iterator
+ )
+ else:
+ raise ApiValueError(
+ 'Authentication token must be in `query` or `header`'
+ )
+ return resource_path
+
+
+class Api:
+ """NOTE:
+ This class is auto generated by Konfig (https://konfigthis.com)
+ """
+
+ def __init__(self, api_client: typing.Optional[ApiClient] = None):
+ if api_client is None:
+ api_client = ApiClient()
+ self.api_client = api_client
+
+ @staticmethod
+ def _verify_typed_dict_inputs_oapg(cls: typing.Type[typing_extensions.TypedDict], data: typing.Dict[str, typing.Any]):
+ """
+ Ensures that:
+ - required keys are present
+ - additional properties are not input
+ - value stored under required keys do not have the value unset
+ Note: detailed value checking is done in schema classes
+ """
+ missing_required_keys = []
+ required_keys_with_unset_values = []
+ for required_key in cls.__required_keys__:
+ if required_key not in data:
+ missing_required_keys.append(required_key)
+ continue
+ value = data[required_key]
+ if value is unset:
+ required_keys_with_unset_values.append(required_key)
+ if missing_required_keys:
+ raise ApiTypeError(
+ '{} missing {} required arguments: {}'.format(
+ cls.__name__, len(missing_required_keys), missing_required_keys
+ )
+ )
+ if required_keys_with_unset_values:
+ raise ApiValueError(
+ '{} contains invalid unset values for {} required keys: {}'.format(
+ cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
+ )
+ )
+
+ disallowed_additional_keys = []
+ for key in data:
+ if key in cls.__required_keys__ or key in cls.__optional_keys__:
+ continue
+ disallowed_additional_keys.append(key)
+ if disallowed_additional_keys:
+ raise ApiTypeError(
+ '{} got {} unexpected keyword arguments: {}'.format(
+ cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
+ )
+ )
+
+ def _get_host_oapg(
+ self,
+ operation_id: str,
+ servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
+ host_index: typing.Optional[int] = None
+ ) -> typing.Optional[str]:
+ configuration = self.api_client.configuration
+ try:
+ if host_index is None:
+ index = configuration.server_operation_index.get(
+ operation_id, configuration.server_index
+ )
+ else:
+ index = host_index
+ server_variables = configuration.server_operation_variables.get(
+ operation_id, configuration.server_variables
+ )
+ host = configuration.get_host_from_settings(
+ index, variables=server_variables, servers=servers
+ )
+ except IndexError:
+ if servers:
+ raise ApiValueError(
+ "Invalid host index. Must be 0 <= index < %s" %
+ len(servers)
+ )
+ host = None
+ return host
+
+
+class SerializedRequestBody(typing_extensions.TypedDict, total=False):
+ body: typing.Union[str, bytes]
+ fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+
+
+class RequestBody(StyleFormSerializer, JSONDetector):
+ """
+ A request body parameter
+ content: content_type to MediaType Schema info
+ """
+ __json_encoder = JSONEncoder()
+
+ def __init__(
+ self,
+ content: typing.Dict[str, MediaType],
+ required: bool = False,
+ ):
+ self.required = required
+ if len(content) == 0:
+ raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
+ self.content = content
+
+ def __serialize_json(
+ self,
+ in_data: typing.Any
+ ) -> typing.Dict[str, bytes]:
+ in_data = self.__json_encoder.default(in_data)
+ json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
+ "utf-8"
+ )
+ return dict(body=json_str)
+
+ @staticmethod
+ def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
+ if isinstance(in_data, frozendict.frozendict):
+ raise ValueError('Unable to serialize type frozendict.frozendict to text/plain')
+ elif isinstance(in_data, tuple):
+ raise ValueError('Unable to serialize type tuple to text/plain')
+ elif isinstance(in_data, NoneClass):
+ raise ValueError('Unable to serialize type NoneClass to text/plain')
+ elif isinstance(in_data, BoolClass):
+ raise ValueError('Unable to serialize type BoolClass to text/plain')
+ return dict(body=str(in_data))
+
+ def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
+ json_value = self.__json_encoder.default(value)
+ return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
+
+ def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
+ if isinstance(value, str):
+ return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
+ elif isinstance(value, bytes):
+ return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
+ elif isinstance(value, FileIO):
+ filename = os.path.basename(value.name)
+ request_field = RequestField(
+ name=key,
+ data=value.read(),
+ filename=filename,
+ headers={'Content-Type': guess_content_type(filename)}
+ )
+ value.close()
+ return request_field
+ else:
+ return self.__multipart_json_item(key=key, value=value)
+
+ def __serialize_multipart_form_data(
+ self, in_data: Schema
+ ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
+ if not isinstance(in_data, frozendict.frozendict) and not isinstance(in_data, list) and not isinstance(in_data, tuple):
+ raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data or a list of data')
+ """
+ In a multipart/form-data request body, each schema property, or each element of a schema array property,
+ takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
+ for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+
+ When passing in multipart types, boundaries MAY be used to separate sections of the content being
+ transferred – thus, the following default Content-Types are defined for multipart:
+
+ If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
+ If the property is complex, or an array of complex values, the default Content-Type is application/json
+ Question: how is the array of primitives encoded?
+ If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
+ """
+ fields: typing.List[RequestField] = []
+
+ def add_field(data):
+ for key, value in data.items():
+ if isinstance(value, tuple):
+ if value:
+ # values use explode = True, so the code makes a RequestField for each item with name=key
+ for item in value:
+ request_field = self.__multipart_form_item(key=key, value=item)
+ fields.append(request_field)
+ else:
+ # send an empty array as json because exploding will not send it
+ request_field = self.__multipart_json_item(key=key, value=value)
+ fields.append(request_field)
+ else:
+ request_field = self.__multipart_form_item(key=key, value=value)
+ fields.append(request_field)
+
+ if isinstance(in_data, list) or isinstance(in_data, tuple):
+ for item in in_data:
+ add_field(item)
+ else:
+ add_field(in_data)
+
+ # This is necessary to fill the "Content-Disposition" header needed for naming fields in multipart
+ for field in fields:
+ field.make_multipart(content_type=field.headers["Content-Type"])
+
+ return dict(fields=tuple(fields))
+
+ def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
+ if isinstance(in_data, bytes):
+ return dict(body=in_data)
+ # FileIO type
+ result = dict(body=in_data.read())
+ in_data.close()
+ return result
+
+ def __serialize_application_x_www_form_data(
+ self, in_data: typing.Any
+ ) -> SerializedRequestBody:
+ """
+ POST submission of form data in body
+ """
+ if not isinstance(in_data, frozendict.frozendict):
+ raise ValueError(
+ f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
+ cast_in_data = self.__json_encoder.default(in_data)
+ value = self._serialize_form(cast_in_data, name='', explode=True, percent_encode=True)
+ return dict(body=value)
+
+ def serialize(
+ self, in_data: typing.Any, content_type: str
+ ) -> SerializedRequestBody:
+ """
+ If a str is returned then the result will be assigned to data when making the request
+ If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
+ Return a tuple of
+
+ The key of the return dict is
+ - body for application/json
+ - encode_multipart and fields for multipart/form-data
+ """
+ media_type = self.content[content_type]
+ if isinstance(in_data, media_type.schema):
+ cast_in_data = in_data
+ elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
+ try:
+ cast_in_data = media_type.schema(**in_data)
+ except TypeError as e:
+ raise MissingRequiredParametersError(e)
+ else:
+ cast_in_data = media_type.schema(in_data)
+ # TODO check for and use encoding if it exists
+ # and content_type is multipart or application/x-www-form-urlencoded
+ if self._content_type_is_json(content_type):
+ return self.__serialize_json(cast_in_data)
+ elif content_type == 'text/plain':
+ return self.__serialize_text_plain(cast_in_data)
+ elif content_type == 'multipart/form-data':
+ return self.__serialize_multipart_form_data(cast_in_data)
+ elif content_type == 'application/x-www-form-urlencoded':
+ return self.__serialize_application_x_www_form_data(cast_in_data)
+ elif content_type == 'application/octet-stream':
+ return self.__serialize_application_octet_stream(cast_in_data)
+ raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_response.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_response.py
new file mode 100644
index 000000000..ef5362aa0
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/api_response.py
@@ -0,0 +1,32 @@
+from dataclasses import dataclass
+import aiohttp
+from multidict import CIMultiDictProxy
+from urllib3._collections import HTTPHeaderDict
+import urllib3
+import typing
+
+
+@dataclass
+class ApiResponse:
+ headers: HTTPHeaderDict
+ status: int
+ response: urllib3.HTTPResponse
+ round_trip_time: float
+ body: typing.Any
+
+
+@dataclass
+class AsyncApiResponse:
+ headers: CIMultiDictProxy[str]
+ status: int
+ response: aiohttp.ClientResponse
+ round_trip_time: float
+ body: typing.Any
+
+
+@dataclass
+class AsyncGeneratorResponse:
+ headers: CIMultiDictProxy[str]
+ status: int
+ content: typing.AsyncGenerator
+ response: aiohttp.ClientResponse
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/__init__.py
new file mode 100644
index 000000000..7840f7726
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/__init__.py
@@ -0,0 +1,3 @@
+# do not import all endpoints into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all endpoints then import them from
+# tags, paths, or path_to_api, or tag_to_api
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/path_to_api.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/path_to_api.py
new file mode 100644
index 000000000..05912c83a
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/path_to_api.py
@@ -0,0 +1,17 @@
+import typing_extensions
+
+from python_readme_header_snippet.paths import PathValues
+from python_readme_header_snippet.apis.paths.simple_endpoint import SimpleEndpoint
+
+PathToApi = typing_extensions.TypedDict(
+ 'PathToApi',
+ {
+ PathValues.SIMPLEENDPOINT: SimpleEndpoint,
+ }
+)
+
+path_to_api = PathToApi(
+ {
+ PathValues.SIMPLEENDPOINT: SimpleEndpoint,
+ }
+)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/__init__.py
new file mode 100644
index 000000000..73e9d1588
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/__init__.py
@@ -0,0 +1,3 @@
+# do not import all endpoints into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all endpoints from this module, import them with
+# from python_readme_header_snippet.apis.path_to_api import path_to_api
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/simple_endpoint.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/simple_endpoint.py
new file mode 100644
index 000000000..2f2b71b69
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/paths/simple_endpoint.py
@@ -0,0 +1,7 @@
+from python_readme_header_snippet.paths.simple_endpoint.get import ApiForget
+
+
+class SimpleEndpoint(
+ ApiForget,
+):
+ pass
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tag_to_api.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tag_to_api.py
new file mode 100644
index 000000000..433f3efb4
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tag_to_api.py
@@ -0,0 +1,17 @@
+import typing_extensions
+
+from python_readme_header_snippet.apis.tags import TagValues
+from python_readme_header_snippet.apis.tags.test_api import TestApi
+
+TagToApi = typing_extensions.TypedDict(
+ 'TagToApi',
+ {
+ TagValues.TEST: TestApi,
+ }
+)
+
+tag_to_api = TagToApi(
+ {
+ TagValues.TEST: TestApi,
+ }
+)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/__init__.py
new file mode 100644
index 000000000..5629d8e8f
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/__init__.py
@@ -0,0 +1,9 @@
+# do not import all endpoints into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all endpoints from this module, import them with
+# from python_readme_header_snippet.apis.tag_to_api import tag_to_api
+
+import enum
+
+
+class TagValues(str, enum.Enum):
+ TEST = "Test"
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api.py
new file mode 100644
index 000000000..d3fa7064f
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api.py
@@ -0,0 +1,27 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from python_readme_header_snippet.paths.simple_endpoint.get import Fetch
+from python_readme_header_snippet.apis.tags.test_api_raw import TestApiRaw
+
+
+class TestApi(
+ Fetch,
+):
+ """NOTE:
+ This class is auto generated by Konfig (https://konfigthis.com)
+ """
+ raw: TestApiRaw
+
+ def __init__(self, api_client=None):
+ super().__init__(api_client)
+ self.raw = TestApiRaw(api_client)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api_raw.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api_raw.py
new file mode 100644
index 000000000..0e39ce21c
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/apis/tags/test_api_raw.py
@@ -0,0 +1,22 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from python_readme_header_snippet.paths.simple_endpoint.get import FetchRaw
+
+
+class TestApiRaw(
+ FetchRaw,
+):
+ """NOTE:
+ This class is auto generated by Konfig (https://konfigthis.com)
+ """
+ pass
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.py
new file mode 100644
index 000000000..f86601d88
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.py
@@ -0,0 +1,32 @@
+# coding: utf-8
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+import inspect
+from datetime import date, datetime
+from python_readme_header_snippet.client_custom import ClientCustom
+from python_readme_header_snippet.configuration import Configuration
+from python_readme_header_snippet.api_client import ApiClient
+from python_readme_header_snippet.type_util import copy_signature
+from python_readme_header_snippet.apis.tags.test_api import TestApi
+
+
+
+class PythonReadmeHeaderSnippetClient(ClientCustom):
+
+ def __init__(self, configuration: typing.Union[Configuration, None] = None, **kwargs):
+ super().__init__(configuration, **kwargs)
+ if (len(kwargs) > 0):
+ configuration = Configuration(**kwargs)
+ if (configuration is None):
+ raise Exception("configuration is required")
+ api_client = ApiClient(configuration)
+ self.test: TestApi = TestApi(api_client)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.pyi b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.pyi
new file mode 100644
index 000000000..f86601d88
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client.pyi
@@ -0,0 +1,32 @@
+# coding: utf-8
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+import inspect
+from datetime import date, datetime
+from python_readme_header_snippet.client_custom import ClientCustom
+from python_readme_header_snippet.configuration import Configuration
+from python_readme_header_snippet.api_client import ApiClient
+from python_readme_header_snippet.type_util import copy_signature
+from python_readme_header_snippet.apis.tags.test_api import TestApi
+
+
+
+class PythonReadmeHeaderSnippetClient(ClientCustom):
+
+ def __init__(self, configuration: typing.Union[Configuration, None] = None, **kwargs):
+ super().__init__(configuration, **kwargs)
+ if (len(kwargs) > 0):
+ configuration = Configuration(**kwargs)
+ if (configuration is None):
+ raise Exception("configuration is required")
+ api_client = ApiClient(configuration)
+ self.test: TestApi = TestApi(api_client)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client_custom.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client_custom.py
new file mode 100644
index 000000000..e73ae0fc9
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/client_custom.py
@@ -0,0 +1,29 @@
+# coding: utf-8
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+
+from python_readme_header_snippet.configuration import Configuration
+from python_readme_header_snippet.api_client import ApiClient
+
+
+
+class ClientCustom:
+
+ def __init__(self, configuration: typing.Union[Configuration, None] = None, **kwargs):
+ if (len(kwargs) > 0):
+ configuration = Configuration(**kwargs)
+ if (configuration is None):
+ raise Exception("configuration is required")
+ api_client = ApiClient(configuration)
+ # customize here
+
+ # add custom methods here
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/configuration.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/configuration.py
new file mode 100644
index 000000000..7054cc60e
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/configuration.py
@@ -0,0 +1,493 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import copy
+import logging
+import multiprocessing
+import sys
+import urllib3
+
+
+import re
+from urllib.parse import urlparse
+from http import client as http_client
+from python_readme_header_snippet.exceptions_base import ApiValueError
+from python_readme_header_snippet.exceptions import ClientConfigurationError
+from python_readme_header_snippet.exceptions import InvalidHostConfigurationError
+
+
+JSON_SCHEMA_VALIDATION_KEYWORDS = {
+ 'multipleOf', 'maximum', 'exclusiveMaximum',
+ 'minimum', 'exclusiveMinimum', 'maxLength',
+ 'minLength', 'pattern', 'maxItems', 'minItems',
+ 'uniqueItems', 'maxProperties', 'minProperties',
+}
+
+class Configuration(object):
+ """NOTE:
+ This class is auto generated by Konfig (https://konfigthis.com)
+
+ :param host: Base url
+ :param api_key: Dict to store API key(s).
+ Each entry in the dict specifies an API key.
+ The dict key is the name of the security scheme in the OAS specification.
+ The dict value is the API key secret.
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
+ The dict key is the name of the security scheme in the OAS specification.
+ The dict value is an API key prefix when generating the auth data.
+ :param username: Username for HTTP basic authentication
+ :param password: Password for HTTP basic authentication
+ :param discard_unknown_keys: Boolean value indicating whether to discard
+ unknown properties. A server may send a response that includes additional
+ properties that are not known by the client in the following scenarios:
+ 1. The OpenAPI document is incomplete, i.e. it does not match the server
+ implementation.
+ 2. The client was generated using an older version of the OpenAPI document
+ and the server has been upgraded since then.
+ If a schema in the OpenAPI document defines the additionalProperties attribute,
+ then all undeclared properties received by the server are injected into the
+ additional properties map. In that case, there are undeclared properties, and
+ nothing to discard.
+ :param disabled_client_side_validations (string): Comma-separated list of
+ JSON schema validation keywords to disable JSON schema structural validation
+ rules. The following keywords may be specified: multipleOf, maximum,
+ exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
+ maxItems, minItems.
+ By default, the validation is performed for data generated locally by the client
+ and data received from the server, independent of any validation performed by
+ the server side. If the input data does not satisfy the JSON schema validation
+ rules specified in the OpenAPI document, an exception is raised.
+ If disabled_client_side_validations is set, structural validation is
+ disabled. This can be useful to troubleshoot data validation problem, such as
+ when the OpenAPI document validation rules do not match the actual API data
+ received by the server.
+ :param server_index: Index to servers configuration.
+ :param server_variables: Mapping with string values to replace variables in
+ templated server configuration. The validation of enums is performed for
+ variables with defined enum values before.
+ :param server_operation_index: Mapping from operation ID to an index to server
+ configuration.
+ :param server_operation_variables: Mapping from operation ID to a mapping with
+ string values to replace variables in templated server configuration.
+ The validation of enums is performed for variables with defined enum values before.
+
+ :Example:
+
+ API Key Authentication Example.
+ Given the following security scheme in the OpenAPI specification:
+ components:
+ securitySchemes:
+ cookieAuth: # name for the security scheme
+ type: apiKey
+ in: cookie
+ name: JSESSIONID # cookie name
+
+ You can programmatically set the cookie:
+
+conf = python_readme_header_snippet.Configuration(
+ api_key={'cookieAuth': 'abc123'}
+ api_key_prefix={'cookieAuth': 'JSESSIONID'}
+)
+
+ The following cookie will be added to the HTTP request:
+ Cookie: JSESSIONID abc123
+ """
+
+ _default = None
+
+ def __init__(self, host=None,
+ api_key=None, api_key_prefix=None,
+ username=None, password=None,
+ discard_unknown_keys=False,
+ x_api_key=None,
+ disabled_client_side_validations="",
+ server_index=None, server_variables=None,
+ server_operation_index=None, server_operation_variables=None,
+ ):
+ """Constructor
+ """
+ self.host = "https://python-readme-header-snippet.konfigthis.com" if host is None else host
+ """Default Base url
+ """
+ self.server_index = 0 if server_index is None and host is None else server_index
+ self.server_operation_index = server_operation_index or {}
+ """Default server index
+ """
+ self.server_variables = server_variables or {}
+ self.server_operation_variables = server_operation_variables or {}
+ """Default server variables
+ """
+ self.temp_folder_path = None
+ """Temp file folder for downloading files
+ """
+ # Authentication Settings
+ self.api_key = {}
+ if api_key:
+ if (isinstance(api_key, str)):
+ self.api_key = {'ApiKeyAuth': api_key}
+ else:
+ self.api_key = api_key
+ else:
+ raise ClientConfigurationError('API Key "ApiKeyAuth" is required')
+ if x_api_key:
+ self.api_key['ApiKeyAuth'] = x_api_key
+ elif api_key is None:
+ raise ClientConfigurationError('API Key "ApiKeyAuth" is required')
+ """dict to store API key(s)
+ """
+ self.api_key_prefix = {}
+ if api_key_prefix:
+ self.api_key_prefix = api_key_prefix
+ """dict to store API prefix (e.g. Bearer)
+ """
+ self.refresh_api_key_hook = None
+ """function hook to refresh API key if expired
+ """
+ self.username = username
+ """Username for HTTP basic authentication
+ """
+ self.password = password
+ """Password for HTTP basic authentication
+ """
+ self.discard_unknown_keys = discard_unknown_keys
+ self.disabled_client_side_validations = disabled_client_side_validations
+ """Logging Settings
+ """
+ self.logger = logging.getLogger("python_readme_header_snippet")
+ # if no handler for logger, add a stream handler
+ if not self.logger.handlers:
+ self.logger.addHandler(logging.StreamHandler())
+ self.logger_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ """Log format
+ """
+ self.debug = False
+ """Debug switch
+ """
+
+ self.verify_ssl = True
+ """SSL/TLS verification
+ Set this to false to skip verifying SSL certificate when calling API
+ from https server.
+ """
+ self.ssl_ca_cert = None
+ """Set this to customize the certificate file to verify the peer.
+ """
+ self.cert_file = None
+ """client certificate file
+ """
+ self.key_file = None
+ """client key file
+ """
+ self.assert_hostname = None
+ """Set this to True/False to enable/disable SSL hostname verification.
+ """
+
+ self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
+ """urllib3 connection pool's maximum number of connections saved
+ per pool. urllib3 uses 1 connection as default value, but this is
+ not the best value when you are making a lot of possibly parallel
+ requests to the same host, which is often the case here.
+ cpu_count * 5 is used as default value to increase performance.
+ """
+
+ self.proxy = None
+ """Proxy URL
+ """
+ self.proxy_headers = None
+ """Proxy headers
+ """
+ self.safe_chars_for_path_param = ''
+ """Safe chars for path_param
+ """
+ self.retries = None
+ """Adding retries to override urllib3 default value 3
+ """
+ # Enable client side validation
+ self.client_side_validation = True
+
+ # Options to pass down to the underlying urllib3 socket
+ self.socket_options = None
+
+ def __deepcopy__(self, memo):
+ cls = self.__class__
+ result = cls.__new__(cls)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ if k not in ('logger'):
+ setattr(result, k, copy.deepcopy(v, memo))
+ # shallow copy of loggers
+ result.logger = copy.copy(self.logger)
+ result.debug = self.debug
+ return result
+
+ def __setattr__(self, name, value):
+ object.__setattr__(self, name, value)
+ if name == 'disabled_client_side_validations':
+ s = set(filter(None, value.split(',')))
+ for v in s:
+ if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
+ raise ApiValueError(
+ "Invalid keyword: '{0}''".format(v))
+ self._disabled_client_side_validations = s
+
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of configuration.
+
+ It stores default configuration, which can be
+ returned by get_default_copy method.
+
+ :param default: object of Configuration
+ """
+ cls._default = copy.deepcopy(default)
+
+ @classmethod
+ def get_default_copy(cls):
+ """Return new instance of configuration.
+
+ This method returns newly created, based on default constructor,
+ object of Configuration class or returns a copy of default
+ configuration passed by the set_default method.
+
+ :return: The configuration object.
+ """
+ if cls._default is not None:
+ return copy.deepcopy(cls._default)
+ return Configuration()
+
+ @property
+ def logger_file(self):
+ """The logger file.
+
+ If the logger_file is None, then add stream handler and remove file
+ handler. Otherwise, add file handler and remove stream handler.
+
+ :param value: The logger_file path.
+ :type: str
+ """
+ return self.__logger_file
+
+ @logger_file.setter
+ def logger_file(self, value):
+ """The logger file.
+
+ If the logger_file is None, then add stream handler and remove file
+ handler. Otherwise, add file handler and remove stream handler.
+
+ :param value: The logger_file path.
+ :type: str
+ """
+ self.__logger_file = value
+ if self.__logger_file:
+ # If set logging file, then add file handler to self.logger if one
+ # does not already exist. Otherwise, do nothing.
+ if not any(isinstance(handler, logging.FileHandler)
+ for handler in self.logger.handlers):
+ self.logger.addHandler(logging.FileHandler(self.__logger_file))
+
+ @property
+ def debug(self):
+ """Debug status
+
+ :param value: The debug status, True or False.
+ :type: bool
+ """
+ return self.__debug
+
+ @debug.setter
+ def debug(self, value):
+ """Debug status
+
+ :param value: The debug status, True or False.
+ :type: bool
+ """
+ self.__debug = value
+ if self.__debug:
+ # if debug status is True, turn on debug logging
+ self.logger.setLevel(logging.DEBUG)
+ # turn on http_client debug
+ http_client.HTTPConnection.debuglevel = 1
+ else:
+ # if debug status is False, turn off debug logging,
+ # setting log level to default `logging.WARNING`
+ self.logger.setLevel(logging.WARNING)
+ # turn off http_client debug
+ http_client.HTTPConnection.debuglevel = 0
+
+ @property
+ def logger_format(self):
+ """The logger format.
+
+ The logger_formatter will be updated when sets logger_format.
+
+ :param value: The format string.
+ :type: str
+ """
+ return self.__logger_format
+
+ @logger_format.setter
+ def logger_format(self, value):
+ """The logger format.
+
+ The logger_formatter will be updated when sets logger_format.
+
+ :param value: The format string.
+ :type: str
+ """
+ self.__logger_format = value
+ # set Formatter for all handlers in self.logger
+ for handler in self.logger.handlers:
+ handler.setFormatter(logging.Formatter(self.__logger_format))
+
+ def get_api_key_with_prefix(self, identifier, alias=None):
+ """Gets API key (with prefix if set).
+
+ :param identifier: The identifier of apiKey.
+ :param alias: The alternative identifier of apiKey.
+ :return: The token for api key authentication.
+ """
+ if self.refresh_api_key_hook is not None:
+ self.refresh_api_key_hook(self)
+ key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None)
+ if key:
+ prefix = self.api_key_prefix.get(identifier)
+ if prefix:
+ return "%s %s" % (prefix, key)
+ else:
+ return key
+
+ def get_basic_auth_token(self):
+ """Gets HTTP basic authentication header (string).
+
+ :return: The token for basic HTTP authentication.
+ """
+ username = ""
+ if self.username is not None:
+ username = self.username
+ password = ""
+ if self.password is not None:
+ password = self.password
+ return urllib3.util.make_headers(
+ basic_auth=username + ':' + password
+ ).get('authorization')
+
+ def auth_settings(self):
+ """Gets Auth Settings dict for api client.
+
+ :return: The Auth Settings information dict.
+ """
+ auth = {}
+ if 'ApiKeyAuth' in self.api_key:
+ auth['ApiKeyAuth'] = {
+ 'type': 'api_key',
+ 'in': 'header',
+ 'key': 'X-API-KEY',
+ 'value': self.get_api_key_with_prefix(
+ 'ApiKeyAuth',
+ ),
+ }
+ return auth
+
+ def to_debug_report(self):
+ """Gets the essential information for debugging.
+
+ :return: The report for debugging.
+ """
+ return "Python SDK Debug Report:\n"\
+ "OS: {env}\n"\
+ "Python Version: {pyversion}\n"\
+ "Version of the API: 1.0.0\n"\
+ "SDK Package Version: 1.0.0-beta.1".\
+ format(env=sys.platform, pyversion=sys.version)
+
+ def get_host_settings(self):
+ """Gets an array of host settings
+
+ :return: An array of host settings
+ """
+ return [
+ {
+ 'url': "https://python-readme-header-snippet.konfigthis.com",
+ 'description': "Live API server",
+ }
+ ]
+
+ def get_host_from_settings(self, index, variables=None, servers=None):
+ """Gets host URL based on the index and variables
+ :param index: array index of the host settings
+ :param variables: hash of variable and the corresponding value
+ :param servers: an array of host settings or None
+ :return: URL based on host settings
+ """
+ if index is None:
+ return self._base_path
+
+ variables = {} if variables is None else variables
+ servers = self.get_host_settings() if servers is None else servers
+
+ try:
+ server = servers[index]
+ except IndexError:
+ raise ValueError(
+ "Invalid index {0} when selecting the host settings. "
+ "Must be less than {1}".format(index, len(servers)))
+
+ url = server['url']
+
+ # go through variables and replace placeholders
+ for variable_name, variable in server.get('variables', {}).items():
+ used_value = variables.get(
+ variable_name, variable['default_value'])
+
+ if 'enum_values' in variable \
+ and used_value not in variable['enum_values']:
+ raise ValueError(
+ "The variable `{0}` in the host URL has invalid value "
+ "{1}. Must be {2}.".format(
+ variable_name, variables[variable_name],
+ variable['enum_values']))
+
+ url = url.replace("{" + variable_name + "}", used_value)
+
+ return url
+
+ @property
+ def host(self):
+ """Return generated host."""
+ return self.get_host_from_settings(self.server_index, variables=self.server_variables)
+
+ @host.setter
+ def host(self, value):
+ """Fix base path."""
+ self._base_path = check_url(value)
+ self.server_index = None
+
+DOMAIN_REGEX = re.compile(
+ r'^(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9](?::[0-9]{1,5})?$|^(?:[0-9]{1,3}\.){3}[0-9]{1,3}(?::[0-9]{1,5})?$'
+)
+def check_url(url: str) -> str:
+ parsed = urlparse(url)
+ if parsed.query != '':
+ raise InvalidHostConfigurationError(url, "query string is not allowed")
+ if parsed.fragment != '':
+ raise InvalidHostConfigurationError(url, "fragment is not allowed")
+ if parsed.scheme not in ["http", "https"]:
+ raise InvalidHostConfigurationError(url, 'scheme must be "http" or "https"'.format(parsed.scheme))
+ if (parsed.netloc == ''):
+ raise InvalidHostConfigurationError(url, "host is not set")
+ if not DOMAIN_REGEX.match(parsed.netloc):
+ raise InvalidHostConfigurationError(url, "Invalid domain")
+ if (url.endswith("/")):
+ return url[:-1]
+ return url
+
+
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions.py
new file mode 100644
index 000000000..dfa805b86
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions.py
@@ -0,0 +1,192 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+
+import typing
+from python_readme_header_snippet.api_response import ApiResponse, AsyncApiResponse
+from python_readme_header_snippet.exceptions_base import OpenApiException, ApiTypeError, ApiValueError, render_path
+
+class ClientConfigurationError(OpenApiException):
+ def __init__(self, msg):
+ super(ClientConfigurationError, self).__init__(msg)
+
+
+class ApiAttributeError(OpenApiException, AttributeError):
+ def __init__(self, msg, path_to_item=None):
+ """
+ Raised when an attribute reference or assignment fails.
+
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (None/list) the path to the exception in the
+ received_data dict
+ """
+ self.path_to_item = path_to_item
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiAttributeError, self).__init__(full_msg)
+
+
+class ApiKeyError(OpenApiException, KeyError):
+ def __init__(self, msg, path_to_item=None):
+ """
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (None/list) the path to the exception in the
+ received_data dict
+ """
+ self.path_to_item = path_to_item
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiKeyError, self).__init__(full_msg)
+
+
+class ApiStreamingException(OpenApiException):
+
+ def __init__(self, status=None, reason=None, body=None):
+ self.status = status
+ self.reason = reason
+ self.body = body
+
+ def __str__(self):
+ """Custom error messages for exception"""
+ return "({0})\n Reason: {1}\n Body: {2}".format(self.status, self.reason, self.body)
+
+
+class ApiException(OpenApiException):
+
+ def __init__(self, status=None, reason=None, api_response: typing.Optional[typing.Union[ApiResponse, AsyncApiResponse]] = None):
+ if api_response:
+ self.status = api_response.status
+ self.reason = api_response.response.reason
+ self.body = api_response.body
+ self.headers = api_response.response.headers
+ self.round_trip_time = api_response.round_trip_time
+ else:
+ self.status = status
+ self.reason = reason
+ self.body = None
+ self.headers = None
+ self.round_trip_time = None
+
+ def __str__(self):
+ """Custom error messages for exception"""
+ error_message = "({0})\n"\
+ "Reason: {1}\n".format(self.status, self.reason)
+ if self.headers:
+ error_message += "HTTP response headers: {0}\n".format(
+ self.headers)
+
+ if self.body:
+ error_message += "HTTP response body: {0}\n".format(self.body)
+
+ return error_message
+
+
+class AnyOfValidationError(OpenApiException):
+ def __init__(self, error_list: typing.List[typing.Union[ApiTypeError, ApiValueError]]):
+ self.error_list = error_list
+ sub_msgs: typing.List[str] = []
+ for type_error in error_list:
+ sub_msgs.append(str(type_error))
+ num_validation_errors = len(self.error_list)
+ if num_validation_errors == 1:
+ super().__init__(sub_msgs[0])
+ else:
+ # create a string that says how many validation errors there were and
+ # prints each sub_msg out using a bulleted list of messages
+ msg = "{} validation error{} detected: \n".format(num_validation_errors, "s" if num_validation_errors > 1 else "")
+ for i, sub_msg in enumerate(sub_msgs):
+ msg += " {}. {}\n".format(i+1, sub_msg)
+ super().__init__(msg)
+
+
+class InvalidHostConfigurationError(ClientConfigurationError):
+ def __init__(self, host: str, reason: str):
+ self.host = host
+ self.reason = reason
+ super().__init__('Invalid host: "{}", {}'.format(self.host, self.reason))
+
+
+class MissingRequiredPropertiesError(ApiTypeError):
+ def __init__(self, msg: str):
+ super().__init__(msg)
+
+
+class MissingRequiredParametersError(ApiTypeError):
+ def __init__(self, error: TypeError):
+ self.error = error
+ error_str = str(error)
+ self.msg = error_str
+ if "__new__()" in error_str:
+ # parse error to reformat
+ missing_parameters = error_str.split(":")[1].strip()
+ number_of_parameters = error_str.split("missing")[1].split("required")[0].strip()
+ self.msg = "Missing {} required parameter{}: {}".format(number_of_parameters, "s" if int(number_of_parameters) > 1 else "", missing_parameters)
+ super().__init__(self.msg)
+
+class SchemaValidationError(OpenApiException):
+ def __init__(self, validation_errors: typing.List[typing.Union[ApiValueError, ApiTypeError]]):
+ """ Aggregates schema validation errors
+
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (list): a list of keys an indices to get to the
+ current_item
+ None if unset
+ valid_classes (tuple): the primitive classes that current item
+ should be an instance of
+ None if unset
+ key_type (bool): False if our value is a value in a dict
+ True if it is a key in a dict
+ False if our item is an item in a list
+ None if unset
+ """
+ self.validation_errors = validation_errors
+ self.type_errors: typing.List[ApiTypeError] = []
+ self.value_errors: typing.List[ApiValueError] = []
+ self.missing_required_properties_errors: typing.List[MissingRequiredPropertiesError] = []
+ for error in validation_errors:
+ if isinstance(error, ApiTypeError):
+ self.type_errors.append(error)
+ elif isinstance(error, ApiValueError):
+ self.value_errors.append(error)
+ elif isinstance(error, MissingRequiredPropertiesError):
+ self.missing_required_properties_errors.append(error)
+ sub_msgs: typing.List[str] = []
+ if len(self.missing_required_properties_errors) > 0:
+ for error in self.missing_required_properties_errors:
+ sub_msgs.append(str(error))
+ if len(self.type_errors) > 0:
+ for type_error in self.type_errors:
+ if isinstance(type_error, MissingRequiredPropertiesError) or isinstance(type_error, MissingRequiredParametersError):
+ sub_msgs.append(str(type_error))
+ else:
+ classes = ", ".join([cls.__name__ for cls in type_error.valid_classes])
+ msg = 'Got {}({}) for required type {} at {}'.format(
+ type(type_error.invalid_value).__name__, type_error.invalid_value, classes, render_path(type_error.path_to_item))
+ sub_msgs.append(msg)
+ if len(self.value_errors) > 0:
+ for value_error in self.value_errors:
+ sub_msgs.append(value_error.full_msg)
+ sub_msg = ". ".join(sub_msgs)
+ num_validation_errors = len(self.validation_errors)
+ self.msg = "{} invalid argument{}. {}".format(num_validation_errors, "s" if num_validation_errors > 1 else "", sub_msg)
+ super().__init__(self.msg)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions_base.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions_base.py
new file mode 100644
index 000000000..128310a20
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/exceptions_base.py
@@ -0,0 +1,59 @@
+class OpenApiException(Exception):
+ """The base exception class for all OpenAPIExceptions"""
+
+class ApiTypeError(OpenApiException, TypeError):
+ def __init__(self, msg, invalid_value=None, path_to_item=None, valid_classes=None,
+ key_type=None):
+ """ Raises an exception for TypeErrors
+
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (list): a list of keys an indices to get to the
+ current_item
+ None if unset
+ valid_classes (tuple): the primitive classes that current item
+ should be an instance of
+ None if unset
+ key_type (bool): False if our value is a value in a dict
+ True if it is a key in a dict
+ False if our item is an item in a list
+ None if unset
+ """
+ self.invalid_value = invalid_value
+ self.path_to_item = path_to_item
+ self.valid_classes = valid_classes
+ self.key_type = key_type
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiTypeError, self).__init__(full_msg)
+
+
+class ApiValueError(OpenApiException, ValueError):
+ def __init__(self, msg, path_to_item=None):
+ """
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (list) the path to the exception in the
+ received_data dict. None if unset
+ """
+
+ self.path_to_item = path_to_item
+ self.full_msg = msg
+ if path_to_item:
+ self.full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiValueError, self).__init__(self.full_msg)
+
+ @property
+ def path(self) -> str:
+ return ".".join([step for step in self.path_to_item if step != "args[0]"])
+
+
+def render_path(path_to_item):
+ """Returns a string representation of a path"""
+ str_path = [str(step) if isinstance(step, int) else step for step in path_to_item]
+ return "\"" + ".".join([step for step in str_path if step != "args[0]"]) + "\""
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/model/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/model/__init__.py
new file mode 100644
index 000000000..31f1c1bc0
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/model/__init__.py
@@ -0,0 +1,5 @@
+# we can not import model classes here because that would create a circular
+# reference which would not work in python2
+# do not import all models into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all models from one package, import them with
+# from python_readme_header_snippet.models import ModelA, ModelB
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/models/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/models/__init__.py
new file mode 100644
index 000000000..97affcb72
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/models/__init__.py
@@ -0,0 +1,13 @@
+# coding: utf-8
+
+# flake8: noqa
+
+# import all models into this package
+# if you have many models here with many references from one model to another this may
+# raise a RecursionError
+# to avoid this, import only the models that you directly need like:
+# from from python_readme_header_snippet.model.pet import Pet
+# or import this package, but before doing it, use:
+# import sys
+# sys.setrecursionlimit(n)
+
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/__init__.py
new file mode 100644
index 000000000..be9d394fe
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/__init__.py
@@ -0,0 +1,9 @@
+# do not import all endpoints into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all endpoints from this module, import them with
+# from python_readme_header_snippet.apis.path_to_api import path_to_api
+
+import enum
+
+
+class PathValues(str, enum.Enum):
+ SIMPLEENDPOINT = "/simple-endpoint"
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/__init__.py
new file mode 100644
index 000000000..d77b6e7be
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/__init__.py
@@ -0,0 +1,7 @@
+# do not import all endpoints into this module because that uses a lot of memory and stack frames
+# if you need the ability to import all endpoints from this module, import them with
+# from python_readme_header_snippet.paths.simple_endpoint import Api
+
+from python_readme_header_snippet.paths import PathValues
+
+path = PathValues.SIMPLEENDPOINT
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.py
new file mode 100644
index 000000000..0f360c041
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.py
@@ -0,0 +1,316 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from dataclasses import dataclass
+import typing_extensions
+import urllib3
+from pydantic import RootModel
+from python_readme_header_snippet.request_before_hook import request_before_hook
+import json
+from urllib3._collections import HTTPHeaderDict
+
+from python_readme_header_snippet.api_response import AsyncGeneratorResponse
+from python_readme_header_snippet import api_client, exceptions
+from datetime import date, datetime # noqa: F401
+import decimal # noqa: F401
+import functools # noqa: F401
+import io # noqa: F401
+import re # noqa: F401
+import typing # noqa: F401
+import typing_extensions # noqa: F401
+import uuid # noqa: F401
+
+import frozendict # noqa: F401
+
+from python_readme_header_snippet import schemas # noqa: F401
+
+
+
+from ...api_client import Dictionary
+
+from . import path
+
+_auth = [
+ 'ApiKeyAuth',
+]
+SchemaFor200ResponseBodyApplicationJson = schemas.DictSchema
+
+
+@dataclass
+class ApiResponseFor200(api_client.ApiResponse):
+ body: typing.Dict[str, typing.Union[bool, date, datetime, dict, float, int, list, str, None]]
+
+
+@dataclass
+class ApiResponseFor200Async(api_client.AsyncApiResponse):
+ body: typing.Dict[str, typing.Union[bool, date, datetime, dict, float, int, list, str, None]]
+
+
+_response_for_200 = api_client.OpenApiResponse(
+ response_cls=ApiResponseFor200,
+ response_cls_async=ApiResponseFor200Async,
+ content={
+ 'application/json': api_client.MediaType(
+ schema=SchemaFor200ResponseBodyApplicationJson),
+ },
+)
+_status_code_to_response = {
+ '200': _response_for_200,
+}
+_all_accept_content_types = (
+ 'application/json',
+)
+
+
+class BaseApi(api_client.Api):
+
+ def _fetch_mapped_args(
+ self,
+ ) -> api_client.MappedArgs:
+ args: api_client.MappedArgs = api_client.MappedArgs()
+ return args
+
+ async def _afetch_oapg(
+ self,
+ skip_deserialization: bool = True,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ accept_content_types: typing.Tuple[str] = _all_accept_content_types,
+ stream: bool = False,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ """
+ Fetches a JSON value based on input parameter
+ :param skip_deserialization: If true then api_response.response will be set but
+ api_response.body and api_response.headers will not be deserialized into schema
+ class instances
+ """
+ used_path = path.value
+
+ _headers = HTTPHeaderDict()
+ # TODO add cookie handling
+ if accept_content_types:
+ for accept_content_type in accept_content_types:
+ _headers.add('Accept', accept_content_type)
+ method = 'get'.upper()
+ request_before_hook(
+ resource_path=used_path,
+ method=method,
+ configuration=self.api_client.configuration,
+ auth_settings=_auth,
+ headers=_headers,
+ )
+
+ response = await self.api_client.async_call_api(
+ resource_path=used_path,
+ method=method,
+ headers=_headers,
+ auth_settings=_auth,
+ timeout=timeout,
+ )
+
+ if stream:
+ if not 200 <= response.http_response.status <= 299:
+ body = (await response.http_response.content.read()).decode("utf-8")
+ raise exceptions.ApiStreamingException(
+ status=response.http_response.status,
+ reason=response.http_response.reason,
+ body=body,
+ )
+
+ async def stream_iterator():
+ """
+ iterates over response.http_response.content and closes connection once iteration has finished
+ """
+ async for line in response.http_response.content:
+ if line == b'\r\n':
+ continue
+ yield line
+ response.http_response.close()
+ await response.session.close()
+ return AsyncGeneratorResponse(
+ content=stream_iterator(),
+ headers=response.http_response.headers,
+ status=response.http_response.status,
+ response=response.http_response
+ )
+
+ response_for_status = _status_code_to_response.get(str(response.http_response.status))
+ if response_for_status:
+ api_response = await response_for_status.deserialize_async(
+ response,
+ self.api_client.configuration,
+ skip_deserialization=skip_deserialization
+ )
+ else:
+ # If response data is JSON then deserialize for SDK consumer convenience
+ is_json = api_client.JSONDetector._content_type_is_json(response.http_response.headers.get('Content-Type', ''))
+ api_response = api_client.ApiResponseWithoutDeserializationAsync(
+ body=await response.http_response.json() if is_json else await response.http_response.text(),
+ response=response.http_response,
+ round_trip_time=response.round_trip_time,
+ status=response.http_response.status,
+ headers=response.http_response.headers,
+ )
+
+ if not 200 <= api_response.status <= 299:
+ raise exceptions.ApiException(api_response=api_response)
+
+ # cleanup session / response
+ response.http_response.close()
+ await response.session.close()
+
+ return api_response
+
+
+ def _fetch_oapg(
+ self,
+ skip_deserialization: bool = True,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ accept_content_types: typing.Tuple[str] = _all_accept_content_types,
+ stream: bool = False,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ """
+ Fetches a JSON value based on input parameter
+ :param skip_deserialization: If true then api_response.response will be set but
+ api_response.body and api_response.headers will not be deserialized into schema
+ class instances
+ """
+ used_path = path.value
+
+ _headers = HTTPHeaderDict()
+ # TODO add cookie handling
+ if accept_content_types:
+ for accept_content_type in accept_content_types:
+ _headers.add('Accept', accept_content_type)
+ method = 'get'.upper()
+ request_before_hook(
+ resource_path=used_path,
+ method=method,
+ configuration=self.api_client.configuration,
+ auth_settings=_auth,
+ headers=_headers,
+ )
+
+ response = self.api_client.call_api(
+ resource_path=used_path,
+ method=method,
+ headers=_headers,
+ auth_settings=_auth,
+ timeout=timeout,
+ )
+
+ response_for_status = _status_code_to_response.get(str(response.http_response.status))
+ if response_for_status:
+ api_response = response_for_status.deserialize(
+ response,
+ self.api_client.configuration,
+ skip_deserialization=skip_deserialization
+ )
+ else:
+ # If response data is JSON then deserialize for SDK consumer convenience
+ is_json = api_client.JSONDetector._content_type_is_json(response.http_response.headers.get('Content-Type', ''))
+ api_response = api_client.ApiResponseWithoutDeserialization(
+ body=json.loads(response.http_response.data) if is_json else response.http_response.data,
+ response=response.http_response,
+ round_trip_time=response.round_trip_time,
+ status=response.http_response.status,
+ headers=response.http_response.headers,
+ )
+
+ if not 200 <= api_response.status <= 299:
+ raise exceptions.ApiException(api_response=api_response)
+
+ return api_response
+
+
+class FetchRaw(BaseApi):
+ # this class is used by api classes that refer to endpoints with operationId fn names
+
+ async def afetch(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return await self._afetch_oapg(
+ )
+
+ def fetch(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return self._fetch_oapg(
+ )
+
+class Fetch(BaseApi):
+
+ async def afetch(
+ self,
+ validate: bool = False,
+ ):
+ raw_response = await self.raw.afetch(
+ )
+ if validate:
+ return Dictionary(**raw_response.body)
+ return api_client.construct_model_instance(Dictionary, raw_response.body)
+
+
+ def fetch(
+ self,
+ validate: bool = False,
+ ):
+ raw_response = self.raw.fetch(
+ )
+ if validate:
+ return Dictionary(**raw_response.body)
+ return api_client.construct_model_instance(Dictionary, raw_response.body)
+
+
+class ApiForget(BaseApi):
+ # this class is used by api classes that refer to endpoints by path and http method names
+
+ async def aget(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return await self._afetch_oapg(
+ )
+
+ def get(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return self._fetch_oapg(
+ )
+
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.pyi b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.pyi
new file mode 100644
index 000000000..51d80da42
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/paths/simple_endpoint/get.pyi
@@ -0,0 +1,308 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from dataclasses import dataclass
+import typing_extensions
+import urllib3
+from pydantic import RootModel
+from python_readme_header_snippet.request_before_hook import request_before_hook
+import json
+from urllib3._collections import HTTPHeaderDict
+
+from python_readme_header_snippet.api_response import AsyncGeneratorResponse
+from python_readme_header_snippet import api_client, exceptions
+from datetime import date, datetime # noqa: F401
+import decimal # noqa: F401
+import functools # noqa: F401
+import io # noqa: F401
+import re # noqa: F401
+import typing # noqa: F401
+import typing_extensions # noqa: F401
+import uuid # noqa: F401
+
+import frozendict # noqa: F401
+
+from python_readme_header_snippet import schemas # noqa: F401
+
+
+
+from ...api_client import Dictionary
+
+SchemaFor200ResponseBodyApplicationJson = schemas.DictSchema
+
+
+@dataclass
+class ApiResponseFor200(api_client.ApiResponse):
+ body: typing.Dict[str, typing.Union[bool, date, datetime, dict, float, int, list, str, None]]
+
+
+@dataclass
+class ApiResponseFor200Async(api_client.AsyncApiResponse):
+ body: typing.Dict[str, typing.Union[bool, date, datetime, dict, float, int, list, str, None]]
+
+
+_response_for_200 = api_client.OpenApiResponse(
+ response_cls=ApiResponseFor200,
+ response_cls_async=ApiResponseFor200Async,
+ content={
+ 'application/json': api_client.MediaType(
+ schema=SchemaFor200ResponseBodyApplicationJson),
+ },
+)
+_all_accept_content_types = (
+ 'application/json',
+)
+
+
+class BaseApi(api_client.Api):
+
+ def _fetch_mapped_args(
+ self,
+ ) -> api_client.MappedArgs:
+ args: api_client.MappedArgs = api_client.MappedArgs()
+ return args
+
+ async def _afetch_oapg(
+ self,
+ skip_deserialization: bool = True,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ accept_content_types: typing.Tuple[str] = _all_accept_content_types,
+ stream: bool = False,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ """
+ Fetches a JSON value based on input parameter
+ :param skip_deserialization: If true then api_response.response will be set but
+ api_response.body and api_response.headers will not be deserialized into schema
+ class instances
+ """
+ used_path = path.value
+
+ _headers = HTTPHeaderDict()
+ # TODO add cookie handling
+ if accept_content_types:
+ for accept_content_type in accept_content_types:
+ _headers.add('Accept', accept_content_type)
+ method = 'get'.upper()
+ request_before_hook(
+ resource_path=used_path,
+ method=method,
+ configuration=self.api_client.configuration,
+ auth_settings=_auth,
+ headers=_headers,
+ )
+
+ response = await self.api_client.async_call_api(
+ resource_path=used_path,
+ method=method,
+ headers=_headers,
+ auth_settings=_auth,
+ timeout=timeout,
+ )
+
+ if stream:
+ if not 200 <= response.http_response.status <= 299:
+ body = (await response.http_response.content.read()).decode("utf-8")
+ raise exceptions.ApiStreamingException(
+ status=response.http_response.status,
+ reason=response.http_response.reason,
+ body=body,
+ )
+
+ async def stream_iterator():
+ """
+ iterates over response.http_response.content and closes connection once iteration has finished
+ """
+ async for line in response.http_response.content:
+ if line == b'\r\n':
+ continue
+ yield line
+ response.http_response.close()
+ await response.session.close()
+ return AsyncGeneratorResponse(
+ content=stream_iterator(),
+ headers=response.http_response.headers,
+ status=response.http_response.status,
+ response=response.http_response
+ )
+
+ response_for_status = _status_code_to_response.get(str(response.http_response.status))
+ if response_for_status:
+ api_response = await response_for_status.deserialize_async(
+ response,
+ self.api_client.configuration,
+ skip_deserialization=skip_deserialization
+ )
+ else:
+ # If response data is JSON then deserialize for SDK consumer convenience
+ is_json = api_client.JSONDetector._content_type_is_json(response.http_response.headers.get('Content-Type', ''))
+ api_response = api_client.ApiResponseWithoutDeserializationAsync(
+ body=await response.http_response.json() if is_json else await response.http_response.text(),
+ response=response.http_response,
+ round_trip_time=response.round_trip_time,
+ status=response.http_response.status,
+ headers=response.http_response.headers,
+ )
+
+ if not 200 <= api_response.status <= 299:
+ raise exceptions.ApiException(api_response=api_response)
+
+ # cleanup session / response
+ response.http_response.close()
+ await response.session.close()
+
+ return api_response
+
+
+ def _fetch_oapg(
+ self,
+ skip_deserialization: bool = True,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ accept_content_types: typing.Tuple[str] = _all_accept_content_types,
+ stream: bool = False,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ """
+ Fetches a JSON value based on input parameter
+ :param skip_deserialization: If true then api_response.response will be set but
+ api_response.body and api_response.headers will not be deserialized into schema
+ class instances
+ """
+ used_path = path.value
+
+ _headers = HTTPHeaderDict()
+ # TODO add cookie handling
+ if accept_content_types:
+ for accept_content_type in accept_content_types:
+ _headers.add('Accept', accept_content_type)
+ method = 'get'.upper()
+ request_before_hook(
+ resource_path=used_path,
+ method=method,
+ configuration=self.api_client.configuration,
+ auth_settings=_auth,
+ headers=_headers,
+ )
+
+ response = self.api_client.call_api(
+ resource_path=used_path,
+ method=method,
+ headers=_headers,
+ auth_settings=_auth,
+ timeout=timeout,
+ )
+
+ response_for_status = _status_code_to_response.get(str(response.http_response.status))
+ if response_for_status:
+ api_response = response_for_status.deserialize(
+ response,
+ self.api_client.configuration,
+ skip_deserialization=skip_deserialization
+ )
+ else:
+ # If response data is JSON then deserialize for SDK consumer convenience
+ is_json = api_client.JSONDetector._content_type_is_json(response.http_response.headers.get('Content-Type', ''))
+ api_response = api_client.ApiResponseWithoutDeserialization(
+ body=json.loads(response.http_response.data) if is_json else response.http_response.data,
+ response=response.http_response,
+ round_trip_time=response.round_trip_time,
+ status=response.http_response.status,
+ headers=response.http_response.headers,
+ )
+
+ if not 200 <= api_response.status <= 299:
+ raise exceptions.ApiException(api_response=api_response)
+
+ return api_response
+
+
+class FetchRaw(BaseApi):
+ # this class is used by api classes that refer to endpoints with operationId fn names
+
+ async def afetch(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return await self._afetch_oapg(
+ )
+
+ def fetch(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return self._fetch_oapg(
+ )
+
+class Fetch(BaseApi):
+
+ async def afetch(
+ self,
+ validate: bool = False,
+ ):
+ raw_response = await self.raw.afetch(
+ )
+ if validate:
+ return Dictionary(**raw_response.body)
+ return api_client.construct_model_instance(Dictionary, raw_response.body)
+
+
+ def fetch(
+ self,
+ validate: bool = False,
+ ):
+ raw_response = self.raw.fetch(
+ )
+ if validate:
+ return Dictionary(**raw_response.body)
+ return api_client.construct_model_instance(Dictionary, raw_response.body)
+
+
+class ApiForget(BaseApi):
+ # this class is used by api classes that refer to endpoints by path and http method names
+
+ async def aget(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200Async,
+ api_client.ApiResponseWithoutDeserializationAsync,
+ AsyncGeneratorResponse,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return await self._afetch_oapg(
+ )
+
+ def get(
+ self,
+ ) -> typing.Union[
+ ApiResponseFor200,
+ api_client.ApiResponseWithoutDeserialization,
+ ]:
+ args = self._fetch_mapped_args(
+ )
+ return self._fetch_oapg(
+ )
+
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/pydantic/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/pydantic/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_after_hook.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_after_hook.py
new file mode 100644
index 000000000..e6dcbadb3
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_after_hook.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+from urllib3._collections import HTTPHeaderDict
+from python_readme_header_snippet.configuration import Configuration
+
+def request_after_hook(
+ resource_path: str,
+ method: str,
+ configuration: Configuration,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+):
+ pass
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_hook.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_hook.py
new file mode 100644
index 000000000..2d070c7db
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_hook.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+from urllib3._collections import HTTPHeaderDict
+from python_readme_header_snippet.configuration import Configuration
+
+def request_before_hook(
+ resource_path: str,
+ method: str,
+ configuration: Configuration,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+):
+ pass
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_url_hook.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_url_hook.py
new file mode 100644
index 000000000..886a150e7
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/request_before_url_hook.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import typing
+from urllib3._collections import HTTPHeaderDict
+from python_readme_header_snippet.configuration import Configuration
+
+def request_before_url_hook(
+ resource_path_ref: typing.List[str],
+ method: str,
+ configuration: Configuration,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ body: typing.Any = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+ auth_settings: typing.Optional[typing.List[str]] = None,
+):
+ pass
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/rest.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/rest.py
new file mode 100644
index 000000000..346594bab
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/rest.py
@@ -0,0 +1,272 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+
+import logging
+import ssl
+from urllib.parse import urlencode
+import typing
+import aiohttp
+
+import certifi
+import urllib3
+import time
+from urllib3._collections import HTTPHeaderDict
+
+from python_readme_header_snippet.exceptions import ApiException, ApiValueError
+
+
+logger = logging.getLogger(__name__)
+
+class ResponseWrapper:
+ def __init__(self, http_response: urllib3.HTTPResponse, round_trip_time: float):
+ self.http_response = http_response
+ self.round_trip_time = round_trip_time
+
+class AsyncResponseWrapper:
+ def __init__(self, http_response: aiohttp.ClientResponse, round_trip_time: float, session: aiohttp.ClientSession):
+ self.http_response = http_response
+ self.round_trip_time = round_trip_time
+ self.session = session
+
+class RESTClientObject(object):
+
+ def __init__(self, configuration, pools_size=4, maxsize=None):
+ # urllib3.PoolManager will pass all kw parameters to connectionpool
+ # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75
+ # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680
+ # maxsize is the number of requests to host that are allowed in parallel
+ # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html
+
+ # cert_reqs
+ if configuration.verify_ssl:
+ cert_reqs = ssl.CERT_REQUIRED
+ else:
+ cert_reqs = ssl.CERT_NONE
+
+ # ca_certs
+ if configuration.ssl_ca_cert:
+ ca_certs = configuration.ssl_ca_cert
+ else:
+ # if not set certificate file, use Mozilla's root certificates.
+ ca_certs = certifi.where()
+
+ addition_pool_args = {}
+ if configuration.assert_hostname is not None:
+ addition_pool_args['assert_hostname'] = configuration.assert_hostname
+
+ if configuration.retries is not None:
+ addition_pool_args['retries'] = configuration.retries
+
+ if configuration.socket_options is not None:
+ addition_pool_args['socket_options'] = configuration.socket_options
+
+ if maxsize is None:
+ if configuration.connection_pool_maxsize is not None:
+ maxsize = configuration.connection_pool_maxsize
+ else:
+ maxsize = 4
+
+ # https pool manager
+ if configuration.proxy:
+ self.pool_manager = urllib3.ProxyManager(
+ num_pools=pools_size,
+ maxsize=maxsize,
+ cert_reqs=cert_reqs,
+ ca_certs=ca_certs,
+ cert_file=configuration.cert_file,
+ key_file=configuration.key_file,
+ proxy_url=configuration.proxy,
+ proxy_headers=configuration.proxy_headers,
+ **addition_pool_args
+ )
+ else:
+ self.pool_manager = urllib3.PoolManager(
+ num_pools=pools_size,
+ maxsize=maxsize,
+ cert_reqs=cert_reqs,
+ ca_certs=ca_certs,
+ cert_file=configuration.cert_file,
+ key_file=configuration.key_file,
+ **addition_pool_args
+ )
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ headers: typing.Optional[HTTPHeaderDict] = None,
+ fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None,
+ body: typing.Optional[typing.Union[str, bytes]] = None,
+ stream: bool = False,
+ timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+ ) -> ResponseWrapper:
+ """Perform requests.
+
+ :param method: http request method
+ :param url: http request url
+ :param headers: http request headers
+ :param body: request body, for other types
+ :param fields: request parameters for
+ `application/x-www-form-urlencoded`
+ or `multipart/form-data`
+ :param stream: if True, the urllib3.HTTPResponse object will
+ be returned without reading/decoding response
+ data. Default is False.
+ :param timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ """
+ method = method.upper()
+ if method not in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS']:
+ raise Exception(f'method of "{method}" is invalid')
+
+ if fields and body:
+ raise ApiValueError(
+ "body parameter cannot be used with fields parameter."
+ )
+
+ fields = fields or {}
+ headers = headers or {}
+
+ if timeout:
+ if isinstance(timeout, (int, float)):
+ timeout = urllib3.Timeout(total=timeout)
+ elif (isinstance(timeout, tuple) and
+ len(timeout) == 2):
+ timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1])
+
+ t1 = time.time()
+
+ try:
+ # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
+ if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
+ if 'Content-Type' not in headers and body is None:
+ r = self.pool_manager.request(
+ method,
+ url,
+ preload_content=not stream,
+ timeout=timeout,
+ headers=headers
+ )
+ elif headers['Content-Type'] == 'application/x-www-form-urlencoded':
+ r = self.pool_manager.request(
+ method, url,
+ body=body,
+ fields=fields,
+ encode_multipart=False,
+ preload_content=not stream,
+ timeout=timeout,
+ headers=headers)
+ elif headers['Content-Type'] == 'multipart/form-data':
+ # must del headers['Content-Type'], or the correct
+ # Content-Type which generated by urllib3 will be
+ # overwritten.
+ del headers['Content-Type']
+ r = self.pool_manager.request(
+ method, url,
+ fields=fields,
+ encode_multipart=True,
+ preload_content=not stream,
+ timeout=timeout,
+ headers=headers)
+ # Pass a `string` parameter directly in the body to support
+ # other content types than Json when `body` argument is
+ # provided in serialized form
+ elif isinstance(body, str) or isinstance(body, bytes):
+ request_body = body
+ r = self.pool_manager.request(
+ method, url,
+ body=request_body,
+ preload_content=not stream,
+ timeout=timeout,
+ headers=headers)
+ else:
+ # Cannot generate the request from given parameters
+ msg = """Cannot prepare a request message for provided
+ arguments. Please check that your arguments match
+ declared content type."""
+ raise ApiException(status=0, reason=msg)
+ # For `GET`, `HEAD`
+ else:
+ r = self.pool_manager.request(method, url,
+ preload_content=not stream,
+ timeout=timeout,
+ headers=headers)
+ except urllib3.exceptions.SSLError as e:
+ msg = "{0}\n{1}".format(type(e).__name__, str(e))
+ raise ApiException(status=0, reason=msg)
+
+ if not stream:
+ # log response body
+ logger.debug("response body: %s", r.data)
+
+ t2 = time.time()
+
+ return ResponseWrapper(r, t2 - t1)
+
+ def GET(self, url, headers=None, stream=False,
+ timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("GET", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ fields=fields)
+
+ def HEAD(self, url, headers=None, stream=False,
+ timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("HEAD", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ fields=fields)
+
+ def OPTIONS(self, url, headers=None,
+ body=None, stream=False, timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("OPTIONS", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body, fields=fields)
+
+ def DELETE(self, url, headers=None, body=None,
+ stream=False, timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("DELETE", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body, fields=fields)
+
+ def POST(self, url, headers=None,
+ body=None, stream=False, timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("POST", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body, fields=fields)
+
+ def PUT(self, url, headers=None,
+ body=None, stream=False, timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("PUT", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body, fields=fields)
+
+ def PATCH(self, url, headers=None,
+ body=None, stream=False, timeout=None, fields=None) -> ResponseWrapper:
+ return self.request("PATCH", url,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ body=body, fields=fields)
\ No newline at end of file
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/schemas.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/schemas.py
new file mode 100644
index 000000000..549bf95b5
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/schemas.py
@@ -0,0 +1,2428 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from collections import defaultdict
+from datetime import date, datetime, timedelta # noqa: F401
+import functools
+import decimal
+import io
+import re
+import types
+import typing
+import typing_extensions
+import uuid
+
+from dateutil.parser.isoparser import isoparser, _takes_ascii
+import frozendict
+
+from python_readme_header_snippet.exceptions_base import (
+ ApiTypeError,
+ ApiValueError,
+)
+from python_readme_header_snippet.configuration import (
+ Configuration,
+)
+from python_readme_header_snippet.exceptions import SchemaValidationError
+from python_readme_header_snippet.exceptions import render_path
+from python_readme_header_snippet.validation_metadata import ValidationMetadata
+from python_readme_header_snippet.exceptions import AnyOfValidationError
+from python_readme_header_snippet.exceptions import MissingRequiredPropertiesError
+
+Primitive: typing_extensions.TypeAlias = typing.Union[int, float, bool, str]
+
+class Unset(object):
+ """
+ An instance of this class is set as the default value for object type(dict) properties that are optional
+ When a property has an unset value, that property will not be assigned in the dict
+ """
+ pass
+
+unset = Unset()
+
+none_type = type(None)
+file_type = io.IOBase
+
+
+class FileIO(io.FileIO):
+ """
+ A class for storing files
+ Note: this class is not immutable
+ """
+
+ def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]):
+ if isinstance(arg, (io.FileIO, io.BufferedReader)):
+ if arg.closed:
+ raise ApiValueError('Invalid file state; file is closed and must be open')
+ arg.close()
+ inst = super(FileIO, cls).__new__(cls, arg.name)
+ super(FileIO, inst).__init__(arg.name)
+ return inst
+ raise ApiValueError('FileIO must be passed arg which contains the open file')
+
+ def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]):
+ pass
+
+
+def update(d: dict, u: dict):
+ """
+ Adds u to d
+ Where each dict is defaultdict(set)
+ """
+ if not u:
+ return d
+ for k, v in u.items():
+ if k not in d:
+ d[k] = v
+ else:
+ d[k] = d[k] | v
+
+
+class Singleton:
+ """
+ Enums and singletons are the same
+ The same instance is returned for a given key of (cls, arg)
+ """
+ _instances = {}
+
+ def __new__(cls, arg: typing.Any, **kwargs):
+ """
+ cls base classes: BoolClass, NoneClass, str, decimal.Decimal
+ The 3rd key is used in the tuple below for a corner case where an enum contains integer 1
+ However 1.0 can also be ingested into that enum schema because 1.0 == 1 and
+ Decimal('1.0') == Decimal('1')
+ But if we omitted the 3rd value in the key, then Decimal('1.0') would be stored as Decimal('1')
+ and json serializing that instance would be '1' rather than the expected '1.0'
+ Adding the 3rd value, the str of arg ensures that 1.0 -> Decimal('1.0') which is serialized as 1.0
+ """
+ key = (cls, arg, str(arg))
+ if key not in cls._instances:
+ if isinstance(arg, (none_type, bool, BoolClass, NoneClass)):
+ inst = super().__new__(cls)
+ cls._instances[key] = inst
+ else:
+ cls._instances[key] = super().__new__(cls, arg)
+ return cls._instances[key]
+
+ def __repr__(self):
+ if isinstance(self, NoneClass):
+ return f'<{self.__class__.__name__}: None>'
+ elif isinstance(self, BoolClass):
+ if bool(self):
+ return f'<{self.__class__.__name__}: True>'
+ return f'<{self.__class__.__name__}: False>'
+ return f'<{self.__class__.__name__}: {super().__repr__()}>'
+
+
+class classproperty:
+
+ def __init__(self, fget):
+ self.fget = fget
+
+ def __get__(self, owner_self, owner_cls):
+ return self.fget(owner_cls)
+
+
+class NoneClass(Singleton):
+ @classproperty
+ def NONE(cls):
+ return cls(None)
+
+ def __bool__(self) -> bool:
+ return False
+
+
+class BoolClass(Singleton):
+ @classproperty
+ def TRUE(cls):
+ return cls(True)
+
+ @classproperty
+ def FALSE(cls):
+ return cls(False)
+
+ @functools.lru_cache()
+ def __bool__(self) -> bool:
+ for key, instance in self._instances.items():
+ if self is instance:
+ return bool(key[1])
+ raise ValueError('Unable to find the boolean value of this instance')
+
+ def __str__(self) -> str:
+ return str(bool(self))
+
+
+class MetaOapgTyped:
+ exclusive_maximum: typing.Union[int, float]
+ inclusive_maximum: typing.Union[int, float]
+ exclusive_minimum: typing.Union[int, float]
+ inclusive_minimum: typing.Union[int, float]
+ max_items: int
+ min_items: int
+ discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]]
+ x_konfig_strip: bool
+
+
+ class properties:
+ # to hold object properties
+ pass
+
+ additional_properties: typing.Optional[typing.Type['Schema']]
+ max_properties: int
+ min_properties: int
+ all_of: typing.Callable[[], typing.List[typing.Type['Schema']]]
+ one_of: typing.Callable[[], typing.List[typing.Type['Schema']]]
+ any_of: typing.Callable[[], typing.List[typing.Type['Schema']]]
+ not_schema: typing.Type['Schema']
+ max_length: int
+ min_length: int
+ items: typing.Type['Schema']
+
+
+class Schema:
+ """
+ the base class of all swagger/openapi schemas/models
+ """
+ __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass}
+ _types: typing.Set[typing.Type]
+ MetaOapg = MetaOapgTyped
+
+ @staticmethod
+ def __get_valid_classes_phrase(input_classes):
+ """Returns a string phrase describing what types are allowed"""
+ all_classes = list(input_classes)
+ all_classes = sorted(all_classes, key=lambda cls: cls.__name__)
+ all_class_names = [cls.__name__ for cls in all_classes]
+ if len(all_class_names) == 1:
+ return "is {0}".format(all_class_names[0])
+ return "is one of [{0}]".format(", ".join(all_class_names))
+
+ @staticmethod
+ def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']:
+ if isinstance(item_cls, types.FunctionType):
+ # referenced schema
+ return item_cls()
+ elif isinstance(item_cls, staticmethod):
+ # referenced schema
+ return item_cls.__func__()
+ return item_cls
+
+ @classmethod
+ def __type_error_message(
+ cls, var_value=None, var_name=None, valid_classes=None, key_type=None
+ ):
+ """
+ Keyword Args:
+ var_value (any): the variable which has the type_error
+ var_name (str): the name of the variable which has the typ error
+ valid_classes (tuple): the accepted classes for current_item's
+ value
+ key_type (bool): False if our value is a value in a dict
+ True if it is a key in a dict
+ False if our item is an item in a tuple
+ """
+ key_or_value = "value"
+ if key_type:
+ key_or_value = "key"
+ valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes)
+ msg = "Invalid type. Required {0} type {1} and " "passed type was {2} for \"{3}\"".format(
+ key_or_value,
+ valid_classes_phrase,
+ type(var_value).__name__,
+ var_name,
+ )
+ return msg
+
+ @classmethod
+ def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False):
+ error_msg = cls.__type_error_message(
+ var_name=path_to_item[-1],
+ var_value=var_value,
+ valid_classes=valid_classes,
+ key_type=key_type,
+ )
+ return ApiTypeError(
+ error_msg,
+ invalid_value=var_value,
+ path_to_item=path_to_item,
+ valid_classes=valid_classes,
+ key_type=key_type,
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
+ """
+ Schema _validate_oapg
+ All keyword validation except for type checking was done in calling stack frames
+ If those validations passed, the validated classes are collected in path_to_schemas
+
+ Returns:
+ path_to_schemas: a map of path to schemas
+
+ Raises:
+ ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
+ ApiTypeError: when the input type is not in the list of allowed spec types
+ """
+ base_class = type(arg)
+ if base_class not in cls._types:
+ raise cls.__get_type_error(
+ arg,
+ validation_metadata.path_to_item,
+ cls._types,
+ key_type=False,
+ )
+
+ path_to_schemas = {validation_metadata.path_to_item: set()}
+ path_to_schemas[validation_metadata.path_to_item].add(cls)
+ path_to_schemas[validation_metadata.path_to_item].add(base_class)
+ return path_to_schemas
+
+ @staticmethod
+ def _process_schema_classes_oapg(
+ schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]
+ ):
+ """
+ Processes and mutates schema_classes
+ If a SomeSchema is a subclass of DictSchema then remove DictSchema because it is already included
+ """
+ if len(schema_classes) < 2:
+ return
+ if len(schema_classes) > 2 and UnsetAnyTypeSchema in schema_classes:
+ schema_classes.remove(UnsetAnyTypeSchema)
+ x_schema = schema_type_classes & schema_classes
+ if not x_schema:
+ return
+ x_schema = x_schema.pop()
+ if any(c is not x_schema and issubclass(c, x_schema) for c in schema_classes):
+ # needed to not have a mro error in get_new_class
+ schema_classes.remove(x_schema)
+
+ @classmethod
+ def __get_new_cls(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]:
+ """
+ Make a new dynamic class and return an instance of that class
+ We are making an instance of cls, but instead of making cls
+ make a new class, new_cls
+ which includes dynamic bases including cls
+ return an instance of that new class
+
+ Dict property + List Item Assignment Use cases:
+ 1. value is NOT an instance of the required schema class
+ the value is validated by _validate_oapg
+ _validate_oapg returns a key value pair
+ where the key is the path to the item, and the value will be the required manufactured class
+ made out of the matching schemas
+ 2. value is an instance of the the correct schema type
+ the value is NOT validated by _validate_oapg, _validate_oapg only checks that the instance is of the correct schema type
+ for this value, _validate_oapg does NOT return an entry for it in _path_to_schemas
+ and in list/dict _get_items_oapg,_get_properties_oapg the value will be directly assigned
+ because value is of the correct type, and validation was run earlier when the instance was created
+ """
+ _path_to_schemas = {}
+ if validation_metadata.validated_path_to_schemas:
+ update(_path_to_schemas, validation_metadata.validated_path_to_schemas)
+ if not validation_metadata.validation_ran_earlier(cls):
+ other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata)
+ update(_path_to_schemas, other_path_to_schemas)
+ # loop through it make a new class for each entry
+ # do not modify the returned result because it is cached and we would be modifying the cached value
+ path_to_schemas = {}
+ for path, schema_classes in _path_to_schemas.items():
+ """
+ Use cases
+ 1. N number of schema classes + enum + type != bool/None, classes in path_to_schemas: tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
+ needs Singleton added
+ 2. N number of schema classes + enum + type == bool/None, classes in path_to_schemas: BoolClass/NoneClass
+ Singleton already added
+ 3. N number of schema classes, classes in path_to_schemas: BoolClass/NoneClass/tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
+ """
+ cls._process_schema_classes_oapg(schema_classes)
+ enum_schema = any(
+ issubclass(this_cls, EnumBase) for this_cls in schema_classes)
+ inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set)
+ chosen_schema_classes = schema_classes - inheritable_primitive_type
+ suffix = tuple(inheritable_primitive_type)
+ if enum_schema and suffix[0] not in {NoneClass, BoolClass}:
+ suffix = (Singleton,) + suffix
+
+ used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix
+ mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes)
+ path_to_schemas[path] = mfg_cls
+
+ return path_to_schemas
+
+ @classmethod
+ def _get_new_instance_without_conversion_oapg(
+ cls,
+ arg: typing.Any,
+ path_to_item: typing.Tuple[typing.Union[str, int], ...],
+ path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
+ ):
+ # We have a Dynamic class and we are making an instance of it
+ if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase):
+ properties = cls._get_properties_oapg(arg, path_to_item, path_to_schemas)
+ return super(Schema, cls).__new__(cls, properties)
+ elif issubclass(cls, tuple) and issubclass(cls, ListBase):
+ items = cls._get_items_oapg(arg, path_to_item, path_to_schemas)
+ return super(Schema, cls).__new__(cls, items)
+ """
+ str = openapi str, date, and datetime
+ decimal.Decimal = openapi int and float
+ FileIO = openapi binary type and the user inputs a file
+ bytes = openapi binary type and the user inputs bytes
+ """
+ return super(Schema, cls).__new__(cls, arg)
+
+ @classmethod
+ def from_openapi_data_oapg(
+ cls,
+ arg: typing.Union[
+ str,
+ date,
+ datetime,
+ int,
+ float,
+ decimal.Decimal,
+ bool,
+ None,
+ 'Schema',
+ dict,
+ frozendict.frozendict,
+ tuple,
+ list,
+ io.FileIO,
+ io.BufferedReader,
+ bytes
+ ],
+ _configuration: typing.Optional[Configuration]
+ ):
+ """
+ Schema from_openapi_data_oapg
+ """
+ from_server = True
+ validated_path_to_schemas = {}
+ arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas)
+ validation_metadata = ValidationMetadata(
+ from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas)
+ path_to_schemas = cls.__get_new_cls(arg, validation_metadata)
+ new_cls = path_to_schemas[validation_metadata.path_to_item]
+ new_inst = new_cls._get_new_instance_without_conversion_oapg(
+ arg,
+ validation_metadata.path_to_item,
+ path_to_schemas
+ )
+ return new_inst
+
+ @staticmethod
+ def __get_input_dict(*args, **kwargs) -> frozendict.frozendict:
+ input_dict = {}
+ if args and isinstance(args[0], (dict, frozendict.frozendict)):
+ input_dict.update(args[0])
+ if kwargs:
+ input_dict.update(kwargs)
+ return frozendict.frozendict(input_dict)
+
+ @staticmethod
+ def __remove_unsets(kwargs):
+ return {key: val for key, val in kwargs.items() if val is not unset}
+
+ def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]):
+ """
+ Schema __new__
+
+ Args:
+ args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): the value
+ kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): dict values
+ _configuration: contains the Configuration that enables json schema validation keywords
+ like minItems, minLength etc
+
+ Note: double underscores are used here because pycharm thinks that these variables
+ are instance properties if they are named normally :(
+ """
+ _kwargs = cls.__remove_unsets(kwargs)
+ if not args and not _kwargs:
+ raise TypeError(
+ 'No input given. args or kwargs must be given.'
+ )
+ if not _kwargs and args and not isinstance(args[0], dict):
+ _arg = args[0]
+ else:
+ _arg = cls.__get_input_dict(*args, **_kwargs)
+ _from_server = False
+ _validated_path_to_schemas = {}
+ _arg = cast_to_allowed_types(
+ _arg, _from_server, _validated_path_to_schemas, schema=cls)
+ _validation_metadata = ValidationMetadata(
+ configuration=_configuration, from_server=_from_server, validated_path_to_schemas=_validated_path_to_schemas)
+ _path_to_schemas = cls.__get_new_cls(_arg, _validation_metadata)
+ _new_cls = _path_to_schemas[_validation_metadata.path_to_item]
+ return _new_cls._get_new_instance_without_conversion_oapg(
+ _arg,
+ _validation_metadata.path_to_item,
+ _path_to_schemas
+ )
+
+ def __init__(
+ self,
+ *args: typing.Union[
+ dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'],
+ _configuration: typing.Optional[Configuration] = None,
+ **kwargs: typing.Union[
+ dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset
+ ]
+ ):
+ """
+ this is needed to fix 'Unexpected argument' warning in pycharm
+ this code does nothing because all Schema instances are immutable
+ this means that all input data is passed into and used in new, and after the new instance is made
+ no new attributes are assigned and init is not used
+ """
+ pass
+
+"""
+import itertools
+data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool')
+type_to_cls = {
+ 'None': 'NoneClass',
+ 'FrozenDict': 'frozendict.frozendict',
+ 'Tuple': 'tuple',
+ 'Str': 'str',
+ 'Decimal': 'decimal.Decimal',
+ 'Bool': 'BoolClass'
+}
+cls_tuples = [v for v in itertools.combinations(data_types, 5)]
+typed_classes = [f"class {''.join(cls_tuple)}Mixin({', '.join(type_to_cls[typ] for typ in cls_tuple)}):\n pass" for cls_tuple in cls_tuples]
+for cls in typed_classes:
+ print(cls)
+object_classes = [f"{''.join(cls_tuple)}Mixin = object" for cls_tuple in cls_tuples]
+for cls in object_classes:
+ print(cls)
+"""
+if typing.TYPE_CHECKING:
+ # qty 1
+ NoneMixin = NoneClass
+ FrozenDictMixin = frozendict.frozendict
+ IntMixin = int
+ TupleMixin = tuple
+ StrMixin = str
+ DecimalMixin = decimal.Decimal
+ BoolMixin = BoolClass
+ BytesMixin = bytes
+ FileMixin = FileIO
+ # qty 2
+ class NumberMixin(decimal.Decimal, int):
+ pass
+ class BinaryMixin(bytes, FileIO):
+ pass
+ class NoneFrozenDictMixin(NoneClass, frozendict.frozendict):
+ pass
+ class NoneTupleMixin(NoneClass, tuple):
+ pass
+ class NoneStrMixin(NoneClass, str):
+ pass
+ class NoneDecimalMixin(NoneClass, decimal.Decimal):
+ pass
+ class NoneBoolMixin(NoneClass, BoolClass):
+ pass
+ class FrozenDictTupleMixin(frozendict.frozendict, tuple):
+ pass
+ class FrozenDictStrMixin(frozendict.frozendict, str):
+ pass
+ class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal):
+ pass
+ class FrozenDictBoolMixin(frozendict.frozendict, BoolClass):
+ pass
+ class TupleStrMixin(tuple, str):
+ pass
+ class TupleDecimalMixin(tuple, decimal.Decimal):
+ pass
+ class TupleBoolMixin(tuple, BoolClass):
+ pass
+ class StrDecimalMixin(str, decimal.Decimal):
+ pass
+ class StrBoolMixin(str, BoolClass):
+ pass
+ class DecimalBoolMixin(decimal.Decimal, BoolClass):
+ pass
+ # qty 3
+ class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple):
+ pass
+ class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str):
+ pass
+ class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal):
+ pass
+ class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass):
+ pass
+ class NoneTupleStrMixin(NoneClass, tuple, str):
+ pass
+ class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal):
+ pass
+ class NoneTupleBoolMixin(NoneClass, tuple, BoolClass):
+ pass
+ class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal):
+ pass
+ class NoneStrBoolMixin(NoneClass, str, BoolClass):
+ pass
+ class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass):
+ pass
+ class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str):
+ pass
+ class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal):
+ pass
+ class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass):
+ pass
+ class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal):
+ pass
+ class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass):
+ pass
+ class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass):
+ pass
+ class TupleStrDecimalMixin(tuple, str, decimal.Decimal):
+ pass
+ class TupleStrBoolMixin(tuple, str, BoolClass):
+ pass
+ class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass):
+ pass
+ class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass):
+ pass
+ # qty 4
+ class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str):
+ pass
+ class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal):
+ pass
+ class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass):
+ pass
+ class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal):
+ pass
+ class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass):
+ pass
+ class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass):
+ pass
+ class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal):
+ pass
+ class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass):
+ pass
+ class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass):
+ pass
+ class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass):
+ pass
+ class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal):
+ pass
+ class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass):
+ pass
+ class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
+ pass
+ class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass):
+ pass
+ class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass):
+ pass
+ # qty 5
+ class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal):
+ pass
+ class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass):
+ pass
+ class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
+ pass
+ class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass):
+ pass
+ class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass):
+ pass
+ class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
+ pass
+ # qty 6
+ class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
+ pass
+ # qty 9
+ class NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, int, decimal.Decimal, BoolClass, FileIO, bytes):
+ pass
+else:
+ # qty 1
+ class NoneMixin:
+ _types = {NoneClass}
+ class FrozenDictMixin:
+ _types = {frozendict.frozendict}
+ class TupleMixin:
+ _types = {tuple}
+ class StrMixin:
+ _types = {str}
+ class DecimalMixin:
+ _types = {decimal.Decimal}
+ class IntMixin:
+ _types = {int}
+ class BoolMixin:
+ _types = {BoolClass}
+ class BytesMixin:
+ _types = {bytes}
+ class FileMixin:
+ _types = {FileIO}
+ # qty 2
+ class NumberMixin:
+ _types = {decimal.Decimal, int}
+ class BinaryMixin:
+ _types = {bytes, FileIO}
+ class NoneFrozenDictMixin:
+ _types = {NoneClass, frozendict.frozendict}
+ class NoneTupleMixin:
+ _types = {NoneClass, tuple}
+ class NoneStrMixin:
+ _types = {NoneClass, str}
+ class NoneDecimalMixin:
+ _types = {NoneClass, decimal.Decimal}
+ class NoneBoolMixin:
+ _types = {NoneClass, BoolClass}
+ class FrozenDictTupleMixin:
+ _types = {frozendict.frozendict, tuple}
+ class FrozenDictStrMixin:
+ _types = {frozendict.frozendict, str}
+ class FrozenDictDecimalMixin:
+ _types = {frozendict.frozendict, decimal.Decimal}
+ class FrozenDictBoolMixin:
+ _types = {frozendict.frozendict, BoolClass}
+ class TupleStrMixin:
+ _types = {tuple, str}
+ class TupleDecimalMixin:
+ _types = {tuple, decimal.Decimal}
+ class TupleBoolMixin:
+ _types = {tuple, BoolClass}
+ class StrDecimalMixin:
+ _types = {str, decimal.Decimal}
+ class StrBoolMixin:
+ _types = {str, BoolClass}
+ class DecimalBoolMixin:
+ _types = {decimal.Decimal, BoolClass}
+ # qty 3
+ class NoneFrozenDictTupleMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple}
+ class NoneFrozenDictStrMixin:
+ _types = {NoneClass, frozendict.frozendict, str}
+ class NoneFrozenDictDecimalMixin:
+ _types = {NoneClass, frozendict.frozendict, decimal.Decimal}
+ class NoneFrozenDictBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, BoolClass}
+ class NoneTupleStrMixin:
+ _types = {NoneClass, tuple, str}
+ class NoneTupleDecimalMixin:
+ _types = {NoneClass, tuple, decimal.Decimal}
+ class NoneTupleBoolMixin:
+ _types = {NoneClass, tuple, BoolClass}
+ class NoneStrDecimalMixin:
+ _types = {NoneClass, str, decimal.Decimal}
+ class NoneStrBoolMixin:
+ _types = {NoneClass, str, BoolClass}
+ class NoneDecimalBoolMixin:
+ _types = {NoneClass, decimal.Decimal, BoolClass}
+ class FrozenDictTupleStrMixin:
+ _types = {frozendict.frozendict, tuple, str}
+ class FrozenDictTupleDecimalMixin:
+ _types = {frozendict.frozendict, tuple, decimal.Decimal}
+ class FrozenDictTupleBoolMixin:
+ _types = {frozendict.frozendict, tuple, BoolClass}
+ class FrozenDictStrDecimalMixin:
+ _types = {frozendict.frozendict, str, decimal.Decimal}
+ class FrozenDictStrBoolMixin:
+ _types = {frozendict.frozendict, str, BoolClass}
+ class FrozenDictDecimalBoolMixin:
+ _types = {frozendict.frozendict, decimal.Decimal, BoolClass}
+ class TupleStrDecimalMixin:
+ _types = {tuple, str, decimal.Decimal}
+ class TupleStrBoolMixin:
+ _types = {tuple, str, BoolClass}
+ class TupleDecimalBoolMixin:
+ _types = {tuple, decimal.Decimal, BoolClass}
+ class StrDecimalBoolMixin:
+ _types = {str, decimal.Decimal, BoolClass}
+ # qty 4
+ class NoneFrozenDictTupleStrMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, str}
+ class NoneFrozenDictTupleDecimalMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal}
+ class NoneFrozenDictTupleBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, BoolClass}
+ class NoneFrozenDictStrDecimalMixin:
+ _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal}
+ class NoneFrozenDictStrBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, str, BoolClass}
+ class NoneFrozenDictDecimalBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass}
+ class NoneTupleStrDecimalMixin:
+ _types = {NoneClass, tuple, str, decimal.Decimal}
+ class NoneTupleStrBoolMixin:
+ _types = {NoneClass, tuple, str, BoolClass}
+ class NoneTupleDecimalBoolMixin:
+ _types = {NoneClass, tuple, decimal.Decimal, BoolClass}
+ class NoneStrDecimalBoolMixin:
+ _types = {NoneClass, str, decimal.Decimal, BoolClass}
+ class FrozenDictTupleStrDecimalMixin:
+ _types = {frozendict.frozendict, tuple, str, decimal.Decimal}
+ class FrozenDictTupleStrBoolMixin:
+ _types = {frozendict.frozendict, tuple, str, BoolClass}
+ class FrozenDictTupleDecimalBoolMixin:
+ _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
+ class FrozenDictStrDecimalBoolMixin:
+ _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass}
+ class TupleStrDecimalBoolMixin:
+ _types = {tuple, str, decimal.Decimal, BoolClass}
+ # qty 5
+ class NoneFrozenDictTupleStrDecimalMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal}
+ class NoneFrozenDictTupleStrBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass}
+ class NoneFrozenDictTupleDecimalBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
+ class NoneFrozenDictStrDecimalBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass}
+ class NoneTupleStrDecimalBoolMixin:
+ _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass}
+ class FrozenDictTupleStrDecimalBoolMixin:
+ _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
+ # qty 6
+ class NoneFrozenDictTupleStrDecimalBoolMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
+ # qty 9
+ class NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin:
+ _types = {NoneClass, frozendict.frozendict, tuple, str, int, decimal.Decimal, BoolClass, FileIO, bytes}
+
+
+class ValidatorBase:
+ @staticmethod
+ def _is_json_validation_enabled_oapg(schema_keyword, configuration=None):
+ """Returns true if JSON schema validation is enabled for the specified
+ validation keyword. This can be used to skip JSON schema structural validation
+ as requested in the configuration.
+ Note: the suffix _oapg stands for openapi python (experimental) generator and
+ it has been added to prevent collisions with other methods and properties
+
+ Args:
+ schema_keyword (string): the name of a JSON schema validation keyword.
+ configuration (Configuration): the configuration class.
+ """
+
+ return (configuration is None or
+ not hasattr(configuration, '_disabled_client_side_validations') or
+ schema_keyword not in configuration._disabled_client_side_validations)
+
+ @staticmethod
+ def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""):
+ raise ApiValueError(
+ "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format(
+ value=value,
+ constraint_msg=constraint_msg,
+ constraint_value=constraint_value,
+ additional_txt=additional_txt,
+ path_to_item=render_path(path_to_item),
+ )
+ )
+
+
+class EnumBase:
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
+ """
+ EnumBase _validate_oapg
+ Validates that arg is in the enum's allowed values
+ """
+ try:
+ cls.MetaOapg.enum_value_to_name[arg]
+ except KeyError:
+ raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys()))
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class BoolBase:
+ def is_true_oapg(self) -> bool:
+ """
+ A replacement for x is True
+ True if the instance is a BoolClass True Singleton
+ """
+ if not issubclass(self.__class__, BoolClass):
+ return False
+ return bool(self)
+
+ def is_false_oapg(self) -> bool:
+ """
+ A replacement for x is False
+ True if the instance is a BoolClass False Singleton
+ """
+ if not issubclass(self.__class__, BoolClass):
+ return False
+ return bool(self) is False
+
+
+class NoneBase:
+ def is_none_oapg(self) -> bool:
+ """
+ A replacement for x is None
+ True if the instance is a NoneClass None Singleton
+ """
+ if issubclass(self.__class__, NoneClass):
+ return True
+ return False
+
+
+class StrBase(ValidatorBase):
+ MetaOapg: MetaOapgTyped
+
+ @property
+ def as_str_oapg(self) -> str:
+ return self
+
+ @property
+ def as_date_oapg(self) -> date:
+ raise Exception('not implemented')
+
+ @property
+ def as_datetime_oapg(self) -> datetime:
+ raise Exception('not implemented')
+
+ @property
+ def as_decimal_oapg(self) -> decimal.Decimal:
+ raise Exception('not implemented')
+
+ @property
+ def as_uuid_oapg(self) -> uuid.UUID:
+ raise Exception('not implemented')
+
+ @classmethod
+ def __check_str_validations(
+ cls,
+ arg: str,
+ validation_metadata: ValidationMetadata
+ ):
+ if not hasattr(cls, 'MetaOapg'):
+ return
+ if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'max_length') and
+ len(arg) > cls.MetaOapg.max_length):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="length must be less than or equal to",
+ constraint_value=cls.MetaOapg.max_length,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'min_length') and
+ len(arg) < cls.MetaOapg.min_length):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="length must be greater than or equal to",
+ constraint_value=cls.MetaOapg.min_length,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'regex')):
+ for regex_dict in cls.MetaOapg.regex:
+ flags = regex_dict.get('flags', 0)
+ if not re.search(regex_dict['pattern'], arg, flags=flags):
+ if flags != 0:
+ # Don't print the regex flags if the flags are not
+ # specified in the OAS document.
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must match regular expression",
+ constraint_value=regex_dict['pattern'],
+ path_to_item=validation_metadata.path_to_item,
+ additional_txt=" with flags=`{}`".format(flags)
+ )
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must match regular expression",
+ constraint_value=regex_dict['pattern'],
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
+ """
+ StrBase _validate_oapg
+ Validates that validations pass
+ """
+ if isinstance(arg, str):
+ if hasattr(cls.MetaOapg, 'x_konfig_strip') and cls.MetaOapg.x_konfig_strip:
+ arg = arg.strip()
+ cls.__check_str_validations(arg, validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class UUIDBase:
+ @property
+ @functools.lru_cache()
+ def as_uuid_oapg(self) -> uuid.UUID:
+ return uuid.UUID(self)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
+ if isinstance(arg, str):
+ try:
+ uuid.UUID(arg)
+ return True
+ except ValueError:
+ raise ApiValueError(
+ "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: typing.Optional[ValidationMetadata] = None,
+ ):
+ """
+ UUIDBase _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class CustomIsoparser(isoparser):
+
+ @_takes_ascii
+ def parse_isodatetime(self, dt_str):
+ components, pos = self._parse_isodate(dt_str)
+ if len(dt_str) > pos:
+ if self._sep is None or dt_str[pos:pos + 1] == self._sep:
+ components += self._parse_isotime(dt_str[pos + 1:])
+ else:
+ raise ValueError('String contains unknown ISO components')
+
+ if len(components) > 3 and components[3] == 24:
+ components[3] = 0
+ return datetime(*components) + timedelta(days=1)
+
+ if len(components) <= 3:
+ raise ValueError('Value is not a datetime')
+
+ return datetime(*components)
+
+ @_takes_ascii
+ def parse_isodate(self, datestr):
+ components, pos = self._parse_isodate(datestr)
+
+ if len(datestr) > pos:
+ raise ValueError('String contains invalid time components')
+
+ if len(components) > 3:
+ raise ValueError('String contains invalid time components')
+
+ return date(*components)
+
+
+DEFAULT_ISOPARSER = CustomIsoparser()
+
+
+class DateBase:
+ @property
+ @functools.lru_cache()
+ def as_date_oapg(self) -> date:
+ return DEFAULT_ISOPARSER.parse_isodate(self)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
+ if isinstance(arg, str):
+ try:
+ DEFAULT_ISOPARSER.parse_isodate(arg)
+ return True
+ except ValueError:
+ raise ApiValueError(
+ "Value does not conform to the required ISO-8601 date format. "
+ "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: typing.Optional[ValidationMetadata] = None,
+ ):
+ """
+ DateBase _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class DateTimeBase:
+ @property
+ @functools.lru_cache()
+ def as_datetime_oapg(self) -> datetime:
+ return DEFAULT_ISOPARSER.parse_isodatetime(self)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
+ if isinstance(arg, str):
+ try:
+ DEFAULT_ISOPARSER.parse_isodatetime(arg)
+ return True
+ except ValueError:
+ raise ApiValueError(
+ "Value does not conform to the required ISO-8601 datetime format. "
+ "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ DateTimeBase _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class DecimalBase:
+ """
+ A class for storing decimals that are sent over the wire as strings
+ These schemas must remain based on StrBase rather than NumberBase
+ because picking base classes must be deterministic
+ """
+
+ @property
+ @functools.lru_cache()
+ def as_decimal_oapg(self) -> decimal.Decimal:
+ return decimal.Decimal(self)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
+ if isinstance(arg, str):
+ try:
+ decimal.Decimal(arg)
+ return True
+ except decimal.InvalidOperation:
+ raise ApiValueError(
+ "Value cannot be converted to a decimal. "
+ "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ DecimalBase _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class NumberBase(ValidatorBase):
+ MetaOapg: MetaOapgTyped
+
+ @property
+ def as_int_oapg(self) -> int:
+ try:
+ return self._as_int
+ except AttributeError:
+ """
+ Note: for some numbers like 9.0 they could be represented as an
+ integer but our code chooses to store them as
+ >>> Decimal('9.0').as_tuple()
+ DecimalTuple(sign=0, digits=(9, 0), exponent=-1)
+ so we can tell that the value came from a float and convert it back to a float
+ during later serialization
+ """
+ if self.as_tuple().exponent < 0:
+ # this could be represented as an integer but should be represented as a float
+ # because that's what it was serialized from
+ raise ApiValueError(f'{self} is not an integer')
+ self._as_int = int(self)
+ return self._as_int
+
+ @property
+ def as_float_oapg(self) -> float:
+ try:
+ return self._as_float
+ except AttributeError:
+ if self.as_tuple().exponent >= 0:
+ raise ApiValueError(f'{self} is not an float')
+ self._as_float = float(self)
+ return self._as_float
+
+ @classmethod
+ def __check_numeric_validations(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata
+ ):
+ if not hasattr(cls, 'MetaOapg'):
+ return
+ if cls._is_json_validation_enabled_oapg('multipleOf',
+ validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
+ multiple_of_value = cls.MetaOapg.multiple_of
+ if (not (float(arg) / multiple_of_value).is_integer()):
+ # Note 'multipleOf' will be as good as the floating point arithmetic.
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="value must be a multiple of",
+ constraint_value=multiple_of_value,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ checking_max_or_min_values = any(
+ hasattr(cls.MetaOapg, validation_key) for validation_key in {
+ 'exclusive_maximum',
+ 'inclusive_maximum',
+ 'exclusive_minimum',
+ 'inclusive_minimum',
+ }
+ )
+ if not checking_max_or_min_values:
+ return
+
+ if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'exclusive_maximum') and
+ arg >= cls.MetaOapg.exclusive_maximum):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must be a value less than",
+ constraint_value=cls.MetaOapg.exclusive_maximum,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'inclusive_maximum') and
+ arg > cls.MetaOapg.inclusive_maximum):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must be a value less than or equal to",
+ constraint_value=cls.MetaOapg.inclusive_maximum,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'exclusive_minimum') and
+ arg <= cls.MetaOapg.exclusive_minimum):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must be a value greater than",
+ constraint_value=cls.MetaOapg.exclusive_maximum,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'inclusive_minimum') and
+ arg < cls.MetaOapg.inclusive_minimum):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="must be a value greater than or equal to",
+ constraint_value=cls.MetaOapg.inclusive_minimum,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
+ """
+ NumberBase _validate_oapg
+ Validates that validations pass
+ """
+ if isinstance(arg, decimal.Decimal):
+ cls.__check_numeric_validations(arg, validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class ListBase(ValidatorBase):
+ MetaOapg: MetaOapgTyped
+
+ @classmethod
+ def __validate_items(cls, list_items, validation_metadata: ValidationMetadata):
+ """
+ Ensures that:
+ - values passed in for items are valid
+ Exceptions will be raised if:
+ - invalid arguments were passed in
+
+ Args:
+ list_items: the input list of items
+
+ Raises:
+ ApiTypeError - for missing required arguments, or for invalid properties
+ """
+
+ # if we have definitions for an items schema, use it
+ # otherwise accept anything
+ item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema)
+ item_cls = cls._get_class_oapg(item_cls)
+ path_to_schemas = {}
+ for i, value in enumerate(list_items):
+ item_validation_metadata = ValidationMetadata(
+ from_server=validation_metadata.from_server,
+ configuration=validation_metadata.configuration,
+ path_to_item=validation_metadata.path_to_item+(i,),
+ validated_path_to_schemas=validation_metadata.validated_path_to_schemas
+ )
+ if item_validation_metadata.validation_ran_earlier(item_cls):
+ continue
+ other_path_to_schemas = item_cls._validate_oapg(
+ value, validation_metadata=item_validation_metadata)
+ update(path_to_schemas, other_path_to_schemas)
+ return path_to_schemas
+
+ @classmethod
+ def __check_tuple_validations(
+ cls, arg,
+ validation_metadata: ValidationMetadata):
+ if not hasattr(cls, 'MetaOapg'):
+ return
+ if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'max_items') and
+ len(arg) > cls.MetaOapg.max_items):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="number of items must be less than or equal to",
+ constraint_value=cls.MetaOapg.max_items,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'min_items') and
+ len(arg) < cls.MetaOapg.min_items):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="number of items must be greater than or equal to",
+ constraint_value=cls.MetaOapg.min_items,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
+ unique_items = set(arg)
+ if len(arg) > len(unique_items):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="duplicate items were found, and the tuple must not contain duplicates because",
+ constraint_value='unique_items==True',
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ ListBase _validate_oapg
+ We return dynamic classes of different bases depending upon the inputs
+ This makes it so:
+ - the returned instance is always a subclass of our defining schema
+ - this allows us to check type based on whether an instance is a subclass of a schema
+ - the returned instance is a serializable type (except for None, True, and False) which are enums
+
+ Returns:
+ new_cls (type): the new class
+
+ Raises:
+ ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
+ ApiTypeError: when the input type is not in the list of allowed spec types
+ """
+ if isinstance(arg, tuple):
+ cls.__check_tuple_validations(arg, validation_metadata)
+ _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
+ if not isinstance(arg, tuple):
+ return _path_to_schemas
+ updated_vm = ValidationMetadata(
+ configuration=validation_metadata.configuration,
+ from_server=validation_metadata.from_server,
+ path_to_item=validation_metadata.path_to_item,
+ seen_classes=validation_metadata.seen_classes | frozenset({cls}),
+ validated_path_to_schemas=validation_metadata.validated_path_to_schemas
+ )
+ other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm)
+ update(_path_to_schemas, other_path_to_schemas)
+ return _path_to_schemas
+
+ @classmethod
+ def _get_items_oapg(
+ cls: 'Schema',
+ arg: typing.List[typing.Any],
+ path_to_item: typing.Tuple[typing.Union[str, int], ...],
+ path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
+ ):
+ '''
+ ListBase _get_items_oapg
+ '''
+ cast_items = []
+
+ for i, value in enumerate(arg):
+ item_path_to_item = path_to_item + (i,)
+ item_cls = path_to_schemas[item_path_to_item]
+ new_value = item_cls._get_new_instance_without_conversion_oapg(
+ value,
+ item_path_to_item,
+ path_to_schemas
+ )
+ cast_items.append(new_value)
+
+ return cast_items
+
+
+class Discriminable:
+ MetaOapg: MetaOapgTyped
+
+ @classmethod
+ def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args):
+ if not args or args and disc_property_name not in args[0]:
+ # The input data does not contain the discriminator property
+ raise ApiValueError(
+ "Cannot deserialize input data due to missing discriminator. "
+ "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str):
+ """
+ Used in schemas with discriminators
+ """
+ if not hasattr(cls.MetaOapg, 'discriminator'):
+ return None
+ disc = cls.MetaOapg.discriminator()
+ if disc_property_name not in disc:
+ return None
+ discriminated_cls = disc[disc_property_name].get(disc_payload_value)
+ if discriminated_cls is not None:
+ return discriminated_cls
+ if not hasattr(cls, 'MetaOapg'):
+ return None
+ elif not (
+ hasattr(cls.MetaOapg, 'all_of') or
+ hasattr(cls.MetaOapg, 'one_of') or
+ hasattr(cls.MetaOapg, 'any_of')
+ ):
+ return None
+ # TODO stop traveling if a cycle is hit
+ if hasattr(cls.MetaOapg, 'all_of'):
+ for allof_cls in cls.MetaOapg.all_of():
+ discriminated_cls = allof_cls.get_discriminated_class_oapg(
+ disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
+ if discriminated_cls is not None:
+ return discriminated_cls
+ if hasattr(cls.MetaOapg, 'one_of'):
+ for oneof_cls in cls.MetaOapg.one_of():
+ discriminated_cls = oneof_cls.get_discriminated_class_oapg(
+ disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
+ if discriminated_cls is not None:
+ return discriminated_cls
+ if hasattr(cls.MetaOapg, 'any_of'):
+ for anyof_cls in cls.MetaOapg.any_of():
+ discriminated_cls = anyof_cls.get_discriminated_class_oapg(
+ disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
+ if discriminated_cls is not None:
+ return discriminated_cls
+ return None
+
+
+class DictBase(Discriminable, ValidatorBase):
+
+ @classmethod
+ def __validate_arg_presence(cls, arg, validation_metadata: ValidationMetadata):
+ """
+ Ensures that:
+ - all required arguments are passed in
+ - the input variable names are valid
+ - present in properties or
+ - accepted because additionalProperties exists
+ Exceptions will be raised if:
+ - invalid arguments were passed in
+ - a var_name is invalid if additional_properties == NotAnyTypeSchema
+ and var_name not in properties.__annotations__
+ - required properties were not passed in
+
+ Args:
+ arg: the input dict
+
+ Raises:
+ ApiTypeError - for missing required arguments, or for invalid properties
+ """
+ seen_required_properties = set()
+ invalid_arguments = []
+ required_property_names = getattr(cls.MetaOapg, 'required', set())
+ additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
+ properties = getattr(cls.MetaOapg, 'properties', {})
+ property_annotations = getattr(properties, '__annotations__', {})
+ for property_name in arg:
+ if property_name in required_property_names:
+ seen_required_properties.add(property_name)
+ elif property_name in property_annotations:
+ continue
+ elif additional_properties is not NotAnyTypeSchema:
+ continue
+ else:
+ invalid_arguments.append(property_name)
+ missing_required_arguments = list(required_property_names - seen_required_properties)
+ if missing_required_arguments:
+ missing_required_arguments.sort()
+ raise MissingRequiredPropertiesError(
+ "{} is missing {} required propert{}{}: {}".format(
+ cls.__name__,
+ len(missing_required_arguments),
+ "ies" if len(missing_required_arguments) > 1 else "y",
+ " at '{}'".format('.'.join([str(i) for i in validation_metadata.path_to_item[1:]])) if len(validation_metadata.path_to_item) > 1 else "",
+ missing_required_arguments
+ )
+ )
+ if invalid_arguments:
+ invalid_arguments.sort()
+ raise ApiTypeError(
+ "{} was passed {} invalid argument{}: {}".format(
+ cls.__name__,
+ len(invalid_arguments),
+ "s" if len(invalid_arguments) > 1 else "",
+ invalid_arguments
+ )
+ )
+
+ @classmethod
+ def __validate_args(cls, arg, validation_metadata: ValidationMetadata):
+ """
+ Ensures that:
+ - values passed in for properties are valid
+ Exceptions will be raised if:
+ - invalid arguments were passed in
+
+ Args:
+ arg: the input dict
+
+ Raises:
+ ApiTypeError - for missing required arguments, or for invalid properties
+ """
+ path_to_schemas = {}
+ additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
+ properties = getattr(cls.MetaOapg, 'properties', {})
+ property_annotations = getattr(properties, '__annotations__', {})
+ validation_errors = []
+ for property_name, value in arg.items():
+ path_to_item = validation_metadata.path_to_item+(property_name,)
+ if property_name in property_annotations:
+ schema = property_annotations[property_name]
+ elif additional_properties is not NotAnyTypeSchema:
+ if additional_properties is UnsetAnyTypeSchema:
+ """
+ If additionalProperties is unset and this path_to_item does not yet have
+ any validations on it, validate it.
+ If it already has validations on it, skip this validation.
+ """
+ if path_to_item in path_to_schemas:
+ continue
+ schema = additional_properties
+ else:
+ raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format(
+ value, cls, validation_metadata.path_to_item+(property_name,)
+ ))
+ schema = cls._get_class_oapg(schema)
+ arg_validation_metadata = ValidationMetadata(
+ from_server=validation_metadata.from_server,
+ configuration=validation_metadata.configuration,
+ path_to_item=path_to_item,
+ validated_path_to_schemas=validation_metadata.validated_path_to_schemas
+ )
+ if arg_validation_metadata.validation_ran_earlier(schema):
+ continue
+ try:
+ other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata)
+ update(path_to_schemas, other_path_to_schemas)
+ except (ApiTypeError, ApiValueError, MissingRequiredPropertiesError) as e:
+ validation_errors.append(e)
+ if len(validation_errors) > 0:
+ raise SchemaValidationError(validation_errors)
+ return path_to_schemas
+
+ @classmethod
+ def __check_dict_validations(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata
+ ):
+ if not hasattr(cls, 'MetaOapg'):
+ return
+ if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'max_properties') and
+ len(arg) > cls.MetaOapg.max_properties):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="number of properties must be less than or equal to",
+ constraint_value=cls.MetaOapg.max_properties,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
+ hasattr(cls.MetaOapg, 'min_properties') and
+ len(arg) < cls.MetaOapg.min_properties):
+ cls._raise_validation_errror_message_oapg(
+ value=arg,
+ constraint_msg="number of properties must be greater than or equal to",
+ constraint_value=cls.MetaOapg.min_properties,
+ path_to_item=validation_metadata.path_to_item
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ DictBase _validate_oapg
+ We return dynamic classes of different bases depending upon the inputs
+ This makes it so:
+ - the returned instance is always a subclass of our defining schema
+ - this allows us to check type based on whether an instance is a subclass of a schema
+ - the returned instance is a serializable type (except for None, True, and False) which are enums
+
+ Returns:
+ new_cls (type): the new class
+
+ Raises:
+ ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
+ ApiTypeError: when the input type is not in the list of allowed spec types
+ """
+ if isinstance(arg, frozendict.frozendict):
+ cls.__check_dict_validations(arg, validation_metadata)
+ _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
+ if not isinstance(arg, frozendict.frozendict):
+ return _path_to_schemas
+ cls.__validate_arg_presence(arg, validation_metadata)
+ other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata)
+ update(_path_to_schemas, other_path_to_schemas)
+ try:
+ discriminator = cls.MetaOapg.discriminator()
+ except AttributeError:
+ return _path_to_schemas
+ # discriminator exists
+ disc_prop_name = list(discriminator.keys())[0]
+ cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg)
+ discriminated_cls = cls.get_discriminated_class_oapg(
+ disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name])
+ if discriminated_cls is None:
+ raise ApiValueError(
+ "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format(
+ cls.__name__,
+ disc_prop_name,
+ list(discriminator[disc_prop_name].keys()),
+ validation_metadata.path_to_item + (disc_prop_name,)
+ )
+ )
+ updated_vm = ValidationMetadata(
+ configuration=validation_metadata.configuration,
+ from_server=validation_metadata.from_server,
+ path_to_item=validation_metadata.path_to_item,
+ seen_classes=validation_metadata.seen_classes | frozenset({cls}),
+ validated_path_to_schemas=validation_metadata.validated_path_to_schemas
+ )
+ if updated_vm.validation_ran_earlier(discriminated_cls):
+ return _path_to_schemas
+ other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm)
+ update(_path_to_schemas, other_path_to_schemas)
+ return _path_to_schemas
+
+ @classmethod
+ def _get_properties_oapg(
+ cls,
+ arg: typing.Dict[str, typing.Any],
+ path_to_item: typing.Tuple[typing.Union[str, int], ...],
+ path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
+ ):
+ """
+ DictBase _get_properties_oapg, this is how properties are set
+ These values already passed validation
+ """
+ dict_items = {}
+
+ for property_name_js, value in arg.items():
+ property_path_to_item = path_to_item + (property_name_js,)
+ property_cls = path_to_schemas[property_path_to_item]
+ new_value = property_cls._get_new_instance_without_conversion_oapg(
+ value,
+ property_path_to_item,
+ path_to_schemas
+ )
+ dict_items[property_name_js] = new_value
+
+ return dict_items
+
+ def __setattr__(self, name: str, value: typing.Any):
+ if not isinstance(self, FileIO):
+ raise AttributeError('property setting not supported on immutable instances')
+
+ def __getattr__(self, name: str):
+ """
+ for instance.name access
+ Properties are only type hinted for required properties
+ so that hasattr(instance, 'optionalProp') is False when that key is not present
+ """
+ if not isinstance(self, frozendict.frozendict):
+ return super().__getattr__(name)
+ if name not in self.__class__.__annotations__:
+ raise AttributeError(f"{self} has no attribute '{name}'")
+ try:
+ value = self[name]
+ return value
+ except KeyError as ex:
+ raise AttributeError(str(ex))
+
+ def __getitem__(self, name: str):
+ """
+ dict_instance[name] accessor
+ key errors thrown
+ """
+ if not isinstance(self, frozendict.frozendict):
+ return super().__getattr__(name)
+ return super().__getitem__(name)
+
+ def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]:
+ # dict_instance[name] accessor
+ if not isinstance(self, frozendict.frozendict):
+ raise NotImplementedError()
+ try:
+ return super().__getitem__(name)
+ except KeyError:
+ return unset
+
+
+def cast_to_allowed_types(
+ arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader],
+ from_server: bool,
+ validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]],
+ path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
+ schema: Schema = None,
+) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]:
+ """
+ Casts the input payload arg into the allowed types
+ The input validated_path_to_schemas is mutated by running this function
+
+ When from_server is False then
+ - date/datetime is cast to str
+ - int/float is cast to Decimal
+
+ If a Schema instance is passed in it is converted back to a primitive instance because
+ One may need to validate that data to the original Schema class AND additional different classes
+ those additional classes will need to be added to the new manufactured class for that payload
+ If the code didn't do this and kept the payload as a Schema instance it would fail to validate to other
+ Schema classes and the code wouldn't be able to mfg a new class that includes all valid schemas
+ TODO: store the validated schema classes in validation_metadata
+
+ Args:
+ arg: the payload
+ from_server: whether this payload came from the server or not
+ validated_path_to_schemas: a dict that stores the validated classes at any path location in the payload
+ """
+ if isinstance(arg, Schema):
+ # store the already run validations
+ schema_classes = set()
+ source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__
+ if not source_schema_was_unset:
+ """
+ Do not include UnsetAnyTypeSchema and its base class because
+ it did not exist in the original spec schema definition
+ It was added to ensure that all instances are of type Schema and the allowed base types
+ """
+ for cls in arg.__class__.__bases__:
+ if cls is Singleton:
+ # Skip Singleton
+ continue
+ schema_classes.add(cls)
+ validated_path_to_schemas[path_to_item] = schema_classes
+
+ type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}")
+ if isinstance(arg, str):
+ return str(arg)
+ elif isinstance(arg, (dict, frozendict.frozendict)):
+ return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()})
+ elif isinstance(arg, (bool, BoolClass)):
+ """
+ this check must come before isinstance(arg, (int, float))
+ because isinstance(True, int) is True
+ """
+ if arg:
+ return BoolClass.TRUE
+ return BoolClass.FALSE
+ elif isinstance(arg, int):
+ return arg
+ elif isinstance(arg, float):
+ decimal_from_float = decimal.Decimal(arg)
+ if decimal_from_float.as_integer_ratio()[1] == 1:
+ # 9.0 -> Decimal('9.0')
+ # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0')
+ return decimal.Decimal(str(decimal_from_float)+'.0')
+ return decimal_from_float
+ elif isinstance(arg, (tuple, list)):
+ return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)])
+ elif isinstance(arg, (none_type, NoneClass)):
+ return NoneClass.NONE
+ elif isinstance(arg, (date, datetime)):
+ if not from_server:
+ # if schema itself is the DateTimeSchema class then convert to isoformat
+ # if schema itself is the DateSchema class then convert to yyyy-mm-dd using strftime
+ if schema is None:
+ return arg.isoformat()
+ if schema is DateTimeSchema:
+ return arg.isoformat()
+ if schema is DateSchema:
+ return arg.strftime('%Y-%m-%d')
+ raise type_error
+ elif isinstance(arg, uuid.UUID):
+ if not from_server:
+ return str(arg)
+ raise type_error
+ elif isinstance(arg, decimal.Decimal):
+ return decimal.Decimal(arg)
+ elif isinstance(arg, bytes):
+ return bytes(arg)
+ elif isinstance(arg, (io.FileIO, io.BufferedReader)):
+ return FileIO(arg)
+ raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg)))
+
+
+class ComposedBase(Discriminable):
+
+ @classmethod
+ def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
+ path_to_schemas = defaultdict(set)
+ for allof_cls in cls.MetaOapg.all_of():
+ if validation_metadata.validation_ran_earlier(allof_cls):
+ continue
+ other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
+ update(path_to_schemas, other_path_to_schemas)
+ return path_to_schemas
+
+ @classmethod
+ def __get_oneof_class(
+ cls,
+ arg,
+ discriminated_cls,
+ validation_metadata: ValidationMetadata,
+ ):
+ oneof_classes = []
+ path_to_schemas = defaultdict(set)
+ for oneof_cls in cls.MetaOapg.one_of():
+ if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
+ oneof_classes.append(oneof_cls)
+ continue
+ if validation_metadata.validation_ran_earlier(oneof_cls):
+ oneof_classes.append(oneof_cls)
+ continue
+ try:
+ path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
+ except (ApiValueError, ApiTypeError) as ex:
+ if discriminated_cls is not None and oneof_cls is discriminated_cls:
+ raise ex
+ continue
+ oneof_classes.append(oneof_cls)
+ if not oneof_classes:
+ raise ApiValueError(
+ "Invalid inputs given to generate an instance of {}. None "
+ "of the oneOf schemas matched the input data.".format(cls)
+ )
+ elif len(oneof_classes) > 1:
+ raise ApiValueError(
+ "Invalid inputs given to generate an instance of {}. Multiple "
+ "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes)
+ )
+ # exactly one class matches
+ return path_to_schemas
+
+ @classmethod
+ def __get_anyof_classes(
+ cls,
+ arg,
+ discriminated_cls,
+ validation_metadata: ValidationMetadata
+ ):
+ anyof_classes = []
+ exceptions: typing.List[typing.Union[ApiTypeError, ApiValueError]] = []
+ path_to_schemas = defaultdict(set)
+ for anyof_cls in cls.MetaOapg.any_of():
+ if validation_metadata.validation_ran_earlier(anyof_cls):
+ anyof_classes.append(anyof_cls)
+ continue
+
+ try:
+ other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
+ except (ApiValueError, ApiTypeError) as ex:
+ if discriminated_cls is not None and anyof_cls is discriminated_cls:
+ raise ex
+ exceptions.append(ex)
+ continue
+ anyof_classes.append(anyof_cls)
+ update(path_to_schemas, other_path_to_schemas)
+ if not anyof_classes:
+ raise AnyOfValidationError(error_list=exceptions)
+ return path_to_schemas
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
+ """
+ ComposedBase _validate_oapg
+ We return dynamic classes of different bases depending upon the inputs
+ This makes it so:
+ - the returned instance is always a subclass of our defining schema
+ - this allows us to check type based on whether an instance is a subclass of a schema
+ - the returned instance is a serializable type (except for None, True, and False) which are enums
+
+ Returns:
+ new_cls (type): the new class
+
+ Raises:
+ ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
+ ApiTypeError: when the input type is not in the list of allowed spec types
+ """
+ # validation checking on types, validations, and enums
+ path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+ updated_vm = ValidationMetadata(
+ configuration=validation_metadata.configuration,
+ from_server=validation_metadata.from_server,
+ path_to_item=validation_metadata.path_to_item,
+ seen_classes=validation_metadata.seen_classes | frozenset({cls}),
+ validated_path_to_schemas=validation_metadata.validated_path_to_schemas
+ )
+
+ # process composed schema
+ discriminator = None
+ if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'):
+ discriminator = cls.MetaOapg.discriminator()
+ discriminated_cls = None
+ if discriminator and arg and isinstance(arg, frozendict.frozendict):
+ disc_property_name = list(discriminator.keys())[0]
+ cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg)
+ # get discriminated_cls by looking at the dict in the current class
+ discriminated_cls = cls.get_discriminated_class_oapg(
+ disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name])
+ if discriminated_cls is None:
+ raise ApiValueError(
+ "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format(
+ arg[disc_property_name],
+ cls.__name__,
+ disc_property_name,
+ list(discriminator[disc_property_name].keys()),
+ updated_vm.path_to_item + (disc_property_name,)
+ )
+ )
+
+ if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
+ other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
+ update(path_to_schemas, other_path_to_schemas)
+ if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
+ other_path_to_schemas = cls.__get_oneof_class(
+ arg,
+ discriminated_cls=discriminated_cls,
+ validation_metadata=updated_vm
+ )
+ update(path_to_schemas, other_path_to_schemas)
+ if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
+ other_path_to_schemas = cls.__get_anyof_classes(
+ arg,
+ discriminated_cls=discriminated_cls,
+ validation_metadata=updated_vm
+ )
+ update(path_to_schemas, other_path_to_schemas)
+ not_cls = None
+ if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'):
+ not_cls = cls.MetaOapg.not_schema
+ not_cls = cls._get_class_oapg(not_cls)
+ if not_cls:
+ other_path_to_schemas = None
+ not_exception = ApiValueError(
+ "Invalid value '{}' was passed in to {}. Value is invalid because it is disallowed by {}".format(
+ arg,
+ cls.__name__,
+ not_cls.__name__,
+ )
+ )
+ if updated_vm.validation_ran_earlier(not_cls):
+ raise not_exception
+
+ try:
+ other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm)
+ except (ApiValueError, ApiTypeError):
+ pass
+ if other_path_to_schemas:
+ raise not_exception
+
+ if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls):
+ if discriminated_cls not in path_to_schemas[updated_vm.path_to_item]:
+ raise ApiValueError("Could not find discriminator in value")
+ return path_to_schemas
+
+
+# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase
+class ComposedSchema(
+ ComposedBase,
+ DictBase,
+ ListBase,
+ NumberBase,
+ StrBase,
+ BoolBase,
+ NoneBase,
+ Schema,
+ NoneFrozenDictTupleStrDecimalBoolMixin
+):
+ @classmethod
+ def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs):
+ if not args:
+ if not kwargs:
+ raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__))
+ args = (kwargs, )
+ return super().from_openapi_data_oapg(args[0], _configuration=_configuration)
+
+
+class ListSchema(
+ ListBase,
+ Schema,
+ TupleMixin
+):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class NoneSchema(
+ NoneBase,
+ Schema,
+ NoneMixin
+):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: None, **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class NumberSchema(
+ NumberBase,
+ Schema,
+ NumberMixin
+):
+ """
+ This is used for type: number with no format
+ Both integers AND floats are accepted
+ """
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class IntBase:
+ @property
+ def as_int_oapg(self) -> int:
+ try:
+ return self._as_int
+ except AttributeError:
+ self._as_int = int(self)
+ return self._as_int
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
+ if isinstance(arg, decimal.Decimal):
+
+ denominator = arg.as_integer_ratio()[-1]
+ if denominator != 1:
+ raise ApiValueError(
+ "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ IntBase _validate_oapg
+ TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class IntSchema(IntBase, NumberBase, Schema, IntMixin):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class Int32Base:
+ __inclusive_minimum = decimal.Decimal(-2147483648)
+ __inclusive_maximum = decimal.Decimal(2147483647)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
+ if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
+ if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
+ raise ApiValueError(
+ "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ Int32Base _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class Int32Schema(
+ Int32Base,
+ IntSchema
+):
+ pass
+
+
+class Int64Base:
+ __inclusive_minimum = decimal.Decimal(-9223372036854775808)
+ __inclusive_maximum = decimal.Decimal(9223372036854775807)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
+ if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
+ if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
+ raise ApiValueError(
+ "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ Int64Base _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class Int64Schema(
+ Int64Base,
+ IntSchema
+):
+ pass
+
+
+class Float32Base:
+ __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
+ __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
+ if isinstance(arg, decimal.Decimal):
+ if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
+ raise ApiValueError(
+ "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ Float32Base _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+
+class Float32Schema(
+ Float32Base,
+ NumberSchema
+):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+
+class Float64Base:
+ __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
+ __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
+
+ @classmethod
+ def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
+ if isinstance(arg, decimal.Decimal):
+ if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
+ raise ApiValueError(
+ "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item)
+ )
+
+ @classmethod
+ def _validate_oapg(
+ cls,
+ arg,
+ validation_metadata: ValidationMetadata,
+ ):
+ """
+ Float64Base _validate_oapg
+ """
+ cls.__validate_format(arg, validation_metadata=validation_metadata)
+ return super()._validate_oapg(arg, validation_metadata=validation_metadata)
+
+class Float64Schema(
+ Float64Base,
+ NumberSchema
+):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
+ # todo check format
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+
+class StrSchema(
+ StrBase,
+ Schema,
+ StrMixin
+):
+ """
+ date + datetime string types must inherit from this class
+ That is because one can validate a str payload as both:
+ - type: string (format unset)
+ - type: string, format: date
+ """
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema':
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class UUIDSchema(UUIDBase, StrSchema):
+
+ def __new__(cls, arg: typing.Union[str, uuid.UUID], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class DateSchema(DateBase, StrSchema):
+
+ def __new__(cls, arg: typing.Union[str, date], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class DateTimeSchema(DateTimeBase, StrSchema):
+
+ def __new__(cls, arg: typing.Union[str, datetime], **kwargs: Configuration):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class DecimalSchema(DecimalBase, StrSchema):
+
+ def __new__(cls, arg: str, **kwargs: Configuration):
+ """
+ Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads
+ which can be simple (str) or complex (dicts or lists with nested values)
+ Because casting is only done once and recursively casts all values prior to validation then for a potential
+ client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know
+ if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema
+ where it should stay as Decimal.
+ """
+ return super().__new__(cls, arg, **kwargs)
+
+
+class BytesSchema(
+ Schema,
+ BytesMixin
+):
+ """
+ this class will subclass bytes and is immutable
+ """
+ def __new__(cls, arg: bytes, **kwargs: Configuration):
+ return super(Schema, cls).__new__(cls, arg)
+
+
+class FileSchema(
+ Schema,
+ FileMixin
+):
+ """
+ This class is NOT immutable
+ Dynamic classes are built using it for example when AnyType allows in binary data
+ Al other schema classes ARE immutable
+ If one wanted to make this immutable one could make this a DictSchema with required properties:
+ - data = BytesSchema (which would be an immutable bytes based schema)
+ - file_name = StrSchema
+ and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name
+ The downside would be that data would be stored in memory which one may not want to do for very large files
+
+ The developer is responsible for closing this file and deleting it
+
+ This class was kept as mutable:
+ - to allow file reading and writing to disk
+ - to be able to preserve file name info
+ """
+
+ def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration):
+ return super(Schema, cls).__new__(cls, arg)
+
+
+class BinaryBase:
+ pass
+
+
+class BinarySchema(
+ ComposedBase,
+ BinaryBase,
+ Schema,
+ BinaryMixin
+):
+ class MetaOapg:
+ @staticmethod
+ def one_of():
+ return [
+ BytesSchema,
+ FileSchema,
+ ]
+
+ def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration):
+ return super().__new__(cls, arg)
+
+
+class BoolSchema(
+ BoolBase,
+ Schema,
+ BoolMixin
+):
+
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, arg: bool, **kwargs: ValidationMetadata):
+ return super().__new__(cls, arg, **kwargs)
+
+
+class AnyTypeSchema(
+ DictBase,
+ ListBase,
+ NumberBase,
+ StrBase,
+ BoolBase,
+ NoneBase,
+ Schema,
+ NoneFrozenDictTupleStrIntDecimalBoolFileBytesMixin
+):
+ # Python representation of a schema defined as true or {}
+ pass
+
+
+class UnsetAnyTypeSchema(AnyTypeSchema):
+ # Used when additionalProperties/items was not explicitly defined and a defining schema is needed
+ pass
+
+
+class NotAnyTypeSchema(
+ ComposedSchema,
+):
+ """
+ Python representation of a schema defined as false or {'not': {}}
+ Does not allow inputs in of AnyType
+ Note: validations on this class are never run because the code knows that no inputs will ever validate
+ """
+
+ class MetaOapg:
+ not_schema = AnyTypeSchema
+
+ def __new__(
+ cls,
+ *args,
+ _configuration: typing.Optional[Configuration] = None,
+ ) -> 'NotAnyTypeSchema':
+ return super().__new__(
+ cls,
+ *args,
+ _configuration=_configuration,
+ )
+
+
+class DictSchema(
+ DictBase,
+ Schema,
+ FrozenDictMixin
+):
+ @classmethod
+ def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None):
+ return super().from_openapi_data_oapg(arg, _configuration=_configuration)
+
+ def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]):
+ return super().__new__(cls, *args, **kwargs)
+
+
+schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema}
+
+
+@functools.lru_cache()
+def get_new_class(
+ class_name: str,
+ bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...]
+) -> typing.Type[Schema]:
+ """
+ Returns a new class that is made with the subclass bases
+ """
+ new_cls: typing.Type[Schema] = type(class_name, bases, {})
+ return new_cls
+
+
+LOG_CACHE_USAGE = False
+
+
+def log_cache_usage(cache_fn):
+ if LOG_CACHE_USAGE:
+ print(cache_fn.__name__, cache_fn.cache_info())
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/type/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/type/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/type_util.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/type_util.py
new file mode 100644
index 000000000..9e9bc5e67
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/type_util.py
@@ -0,0 +1,23 @@
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from typing import Callable, Generic, TypeVar, Any
+
+F = TypeVar("F", bound=Callable[..., Any])
+
+
+class copy_signature(Generic[F]):
+ def __init__(self, func: F, *args) -> None:
+ self.func = func
+
+ def __call__(self, *args, **kwargs) -> F:
+ if len(args) == 1:
+ return args[0]
+ return self.func(*args, **kwargs)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/validation_metadata.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/validation_metadata.py
new file mode 100644
index 000000000..c88c2f493
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/python_readme_header_snippet/validation_metadata.py
@@ -0,0 +1,75 @@
+import frozendict
+import typing
+from python_readme_header_snippet.configuration import (
+ Configuration,
+)
+
+class ValidationMetadata(frozendict.frozendict):
+ """
+ A class storing metadata that is needed to validate OpenApi Schema payloads
+ """
+ def __new__(
+ cls,
+ path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
+ from_server: bool = False,
+ configuration: typing.Optional[Configuration] = None,
+ seen_classes: typing.FrozenSet[typing.Type] = frozenset(),
+ validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]] = frozendict.frozendict()
+ ):
+ """
+ Args:
+ path_to_item: the path to the current data being instantiated.
+ For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0)
+ This changes from location to location
+ from_server: whether or not this data came form the server
+ True when receiving server data
+ False when instantiating model with client side data not form the server
+ This does not change from location to location
+ configuration: the Configuration instance to use
+ This is needed because in Configuration:
+ - one can disable validation checking
+ This does not change from location to location
+ seen_classes: when deserializing data that matches multiple schemas, this is used to store
+ the schemas that have been traversed. This is used to stop processing when a cycle is seen.
+ This changes from location to location
+ validated_path_to_schemas: stores the already validated schema classes for a given path location
+ This does not change from location to location
+ """
+ return super().__new__(
+ cls,
+ path_to_item=path_to_item,
+ from_server=from_server,
+ configuration=configuration,
+ seen_classes=seen_classes,
+ validated_path_to_schemas=validated_path_to_schemas
+ )
+
+ def validation_ran_earlier(self, cls: type) -> bool:
+ validated_schemas = self.validated_path_to_schemas.get(self.path_to_item, set())
+ validation_ran_earlier = validated_schemas and cls in validated_schemas
+ if validation_ran_earlier:
+ return True
+ if cls in self.seen_classes:
+ return True
+ return False
+
+ @property
+ def path_to_item(self) -> typing.Tuple[typing.Union[str, int], ...]:
+ return self.get('path_to_item')
+
+ @property
+ def from_server(self) -> bool:
+ return self.get('from_server')
+
+ @property
+ def configuration(self) -> typing.Optional[Configuration]:
+ return self.get('configuration')
+
+ @property
+ def seen_classes(self) -> typing.FrozenSet[typing.Type]:
+ return self.get('seen_classes')
+
+ @property
+ def validated_path_to_schemas(self) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]]:
+ return self.get('validated_path_to_schemas')
+
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.cfg b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.cfg
new file mode 100644
index 000000000..11433ee87
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.cfg
@@ -0,0 +1,2 @@
+[flake8]
+max-line-length=99
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.py
new file mode 100644
index 000000000..55837517c
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/setup.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+from setuptools import setup, find_packages # noqa: H301
+
+NAME = "python-readme-header-snippet"
+VERSION = "1.0.0-beta.1"
+# To install the library, run the following
+#
+# python setup.py install
+#
+# prerequisite: setuptools
+# http://pypi.python.org/pypi/setuptools
+
+# read the contents of README file
+from pathlib import Path
+this_directory = Path(__file__).parent
+long_description = (this_directory / "README.md").read_text()
+
+REQUIRES = [
+ "certifi >= 2023.7.22",
+ "python-dateutil ~= 2.8.2",
+ "typing_extensions ~= 4.3.0",
+ "urllib3 ~= 1.26.18",
+ "frozendict ~= 2.3.4",
+ "aiohttp ~= 3.8.4",
+ "pydantic ~= 2.4.2"
+]
+
+setup(
+ name=NAME,
+ version=VERSION,
+ description="python-readme-header-snippet API",
+ author="API Support",
+ author_email="support@example.com",
+ url="https://github.com/konfig-dev/konfig/tree/main/python",
+ keywords=["Konfig", "python-readme-header-snippet API"],
+ python_requires=">=3.7",
+ install_requires=REQUIRES,
+ packages=find_packages(exclude=["test", "tests"]),
+ include_package_data=True,
+ license="Apache 2.0",
+ long_description=long_description,
+ long_description_content_type='text/markdown'
+)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_check_url.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_check_url.py
new file mode 100644
index 000000000..d60b6d01c
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_check_url.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import unittest
+from python_readme_header_snippet.configuration import check_url
+from python_readme_header_snippet.exceptions import InvalidHostConfigurationError
+
+
+class TestIsValidUrl(unittest.TestCase):
+ def test_valid_urls(self):
+ valid_urls = [
+ "http://www.example.com",
+ "https://www.example.com",
+ "http://example.com",
+ "https://example.com/path/to/resource",
+ "http://example.com:8080",
+ "https://example.co.uk",
+ "https://subdomain.example.com",
+ "https://api.example.com/v1/resource",
+ "https://example.com/path/to/resource/123",
+ "https://www.example.com:8080",
+ "https://www.example.com:8080/path/to/resource",
+ "http://sub.example.com:8080",
+ "http://deep.sub.domain.example.com",
+ "http://127.0.0.1:4010",
+ "https://deep.sub.domain.example.com:8080/path",
+ "http://example.io",
+ "https://example.app",
+ ]
+ for url in valid_urls:
+ with self.subTest(url=url):
+ self.assertTrue(check_url(url))
+
+ def test_invalid_urls(self):
+ invalid_urls = [
+ "not_a_url",
+ "http:/example.com",
+ "http://",
+ "http://.com",
+ "example.com",
+ "http://example.com#fragment",
+ "www.example.com",
+ "https://example.com/path/to/resource?query=value",
+ "https://example.com/path/to/resource?query=value&key2=value2",
+ "https://",
+ "ftp://files.example.com",
+ "//example.com",
+ "https://example,com",
+ "https:/example.com",
+ "https:// example.com",
+ "https://example.com path",
+ "http://..com",
+ "https://..example.com",
+ "http://example..com",
+ "https://example.com./path",
+ "https://example.com..",
+ "http://:8080",
+ "https://example.com:",
+ "http://example.com:abc",
+ "https://.example.com",
+ "http://example.",
+ "https:// example:8080.com",
+ "http:// example.com:8080/path",
+ "https://:8080/path",
+ ]
+ for url in invalid_urls:
+ with self.subTest(url=url):
+ with self.assertRaises(InvalidHostConfigurationError):
+ check_url(url)
+ raise Exception("URL should be invalid: " + url)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_deprecation_warning.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_deprecation_warning.py
new file mode 100644
index 000000000..b403f6ef2
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_deprecation_warning.py
@@ -0,0 +1,55 @@
+import unittest
+from unittest.mock import patch
+
+from python_readme_header_snippet.api_client import ApiClient, DeprecationWarningOnce
+from python_readme_header_snippet.configuration import Configuration
+
+
+class RandomClass:
+ configuration = Configuration(
+
+ api_key = 'YOUR_API_KEY',
+ )
+ api_client = ApiClient(configuration)
+
+ @DeprecationWarningOnce
+ def deprecated_method(self):
+ return "Method called"
+
+ @DeprecationWarningOnce(prefix="tag")
+ def deprecated_method_with_prefix(self):
+ return "Method with prefix called"
+
+
+class TestDeprecationWarning(unittest.TestCase):
+ @patch("logging.Logger.warning")
+ def test_deprecation_warning_without_prefix(self, mock_warning):
+ obj = RandomClass()
+
+ obj.deprecated_method()
+ obj.deprecated_method()
+
+ # Check that the logger.warning() was called once
+ self.assertEqual(mock_warning.call_count, 1)
+
+ # Get the warning message
+ warning_msg = mock_warning.call_args[0][0]
+
+ # Check the content of the warning message
+ self.assertNotIn("tag", warning_msg)
+
+ @patch("logging.Logger.warning")
+ def test_deprecation_warning_with_prefix(self, mock_warning):
+ obj = RandomClass()
+
+ obj.deprecated_method_with_prefix()
+ obj.deprecated_method_with_prefix()
+
+ # Check that the logger.warning() was called once
+ self.assertEqual(mock_warning.call_count, 1)
+
+ # Get the warning message
+ warning_msg = mock_warning.call_args[0][0]
+
+ # Check the content of the warning message
+ self.assertIn("tag", warning_msg)
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_models/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_models/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/__init__.py
new file mode 100644
index 000000000..ccdcdc538
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/__init__.py
@@ -0,0 +1,68 @@
+import json
+import typing
+
+import urllib3
+from urllib3._collections import HTTPHeaderDict
+
+
+class ApiTestMixin:
+ json_content_type = 'application/json'
+ user_agent = 'Konfig/1.0.0/python'
+
+ @classmethod
+ def assert_pool_manager_request_called_with(
+ cls,
+ mock_request,
+ url: str,
+ method: str = 'POST',
+ body: typing.Optional[bytes] = None,
+ content_type: typing.Optional[str] = None,
+ accept_content_type: typing.Optional[str] = None,
+ stream: bool = False,
+ ):
+ headers = {
+ 'User-Agent': cls.user_agent
+ }
+ if accept_content_type:
+ headers['Accept'] = accept_content_type
+ if content_type:
+ headers['Content-Type'] = content_type
+ kwargs = dict(
+ headers=HTTPHeaderDict(headers),
+ preload_content=not stream,
+ timeout=None,
+ )
+ if content_type and method != 'GET':
+ kwargs['body'] = body
+ mock_request.assert_called_with(
+ method,
+ url,
+ **kwargs
+ )
+
+ @staticmethod
+ def headers_for_content_type(content_type: str) -> typing.Dict[str, str]:
+ return {'content-type': content_type}
+
+ @classmethod
+ def response(
+ cls,
+ body: typing.Union[str, bytes],
+ status: int = 200,
+ content_type: str = json_content_type,
+ headers: typing.Optional[typing.Dict[str, str]] = None,
+ preload_content: bool = True
+ ) -> urllib3.HTTPResponse:
+ if headers is None:
+ headers = {}
+ headers.update(cls.headers_for_content_type(content_type))
+ return urllib3.HTTPResponse(
+ body,
+ headers=headers,
+ status=status,
+ preload_content=preload_content
+ )
+
+ @staticmethod
+ def json_bytes(in_data: typing.Any) -> bytes:
+ return json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode('utf-8')
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/test_simple_endpoint/__init__.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/test_simple_endpoint/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/test_simple_endpoint/test_get.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/test_simple_endpoint/test_get.py
new file mode 100644
index 000000000..8f3eef311
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_paths/test_simple_endpoint/test_get.py
@@ -0,0 +1,43 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import unittest
+from unittest.mock import patch
+
+import urllib3
+
+import python_readme_header_snippet
+from python_readme_header_snippet.paths.simple_endpoint import get
+from python_readme_header_snippet import configuration, schemas, api_client
+
+from .. import ApiTestMixin
+
+
+class TestSimpleEndpoint(ApiTestMixin, unittest.TestCase):
+ """
+ SimpleEndpoint unit test stubs
+ Fetches a JSON value based on input parameter
+ """
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ response_status = 200
+
+
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_simple.py b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_simple.py
new file mode 100644
index 000000000..2b3b4835f
--- /dev/null
+++ b/generator/konfig-integration-tests/sdks/python-readme-header-snippet/python/test/test_simple.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+
+"""
+ python-readme-header-snippet API
+
+ A simple API based for testing python-readme-header-snippet.
+
+ The version of the OpenAPI document: 1.0.0
+ Contact: support@example.com
+ Created by: http://example.com/support
+"""
+
+import unittest
+
+import os
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient
+
+class TestSimple(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def test_client(self):
+ pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+
+ api_key = 'YOUR_API_KEY',
+ )
+ self.assertIsNotNone(pythonreadmeheadersnippetclient)
+
+ def tearDown(self):
+ pass
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/generator/konfig-integration-tests/tests/__snapshots__/python-readme-header-snippet.test.ts.snap b/generator/konfig-integration-tests/tests/__snapshots__/python-readme-header-snippet.test.ts.snap
new file mode 100644
index 000000000..f827d1ce3
--- /dev/null
+++ b/generator/konfig-integration-tests/tests/__snapshots__/python-readme-header-snippet.test.ts.snap
@@ -0,0 +1,170 @@
+// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
+
+exports[`python-readme-header-snippet 1`] = `
+"# python-readme-header-snippet
+
+A simple API based for testing python-readme-header-snippet.
+
+
+[![PyPI](https://img.shields.io/badge/PyPI-v1.0.0beta.1-blue)](https://pypi.org/project/python-readme-header-snippet/1.0.0-beta.1)
+[![README.md](https://img.shields.io/badge/README-Click%20Here-green)](https://github.com/konfig-dev/konfig/tree/main/python#readme)
+[![More Info](https://img.shields.io/badge/More%20Info-Click%20Here-orange)](http://example.com/support)
+
+> [!WARNING]
+> This SDK has breaking changes from \`< 1.0.0\` versions.
+> All methods now return Pydantic models.
+>
+> ### Before (\`< 1.0.0\`)
+>
+> Previously, you had to use the \`[]\` syntax to access response values. This
+> required a little more code for every property access.
+>
+> \`\`\`python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.body[\\"project_id\\"])
+> \`\`\`
+>
+> ### After (\`>= 1.0.0\`)
+>
+> With Pydantic-based response values, you can use the \`.\` syntax to access. This
+> is slightly less verbose and looks more Pythonic.
+>
+> \`\`\`python
+> chat_response = humanloop.chat(
+> # parameters
+> )
+> print(chat_response.project_id)
+> \`\`\`
+>
+> See the [Raw HTTP Response](#raw-http-response) for accessing raw HTTP response values like headers and status codes.
+
+## Table of Contents
+
+
+
+- [Requirements](#requirements)
+- [Installing](#installing)
+- [Getting Started](#getting-started)
+- [Async](#async)
+- [Raw HTTP Response](#raw-http-response)
+- [Reference](#reference)
+ * [\`pythonreadmeheadersnippetclient.test.fetch\`](#pythonreadmeheadersnippetclienttestfetch)
+
+
+
+## Requirements
+
+Python >=3.7
+
+## Installing
+
+\`\`\`sh
+pip install python-readme-header-snippet==1.0.0-beta.1
+\`\`\`
+
+## Getting Started
+
+\`\`\`python
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key=\\"YOUR_API_KEY\\",
+)
+
+try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = pythonreadmeheadersnippetclient.test.fetch()
+except ApiException as e:
+ print(\\"Exception when calling TestApi.fetch: %s\\\\n\\" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+\`\`\`
+
+## Async
+
+\`async\` support is available by prepending \`a\` to any method.
+
+\`\`\`python
+import asyncio
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key=\\"YOUR_API_KEY\\",
+)
+
+
+async def main():
+ try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = await pythonreadmeheadersnippetclient.test.afetch()
+ except ApiException as e:
+ print(\\"Exception when calling TestApi.fetch: %s\\\\n\\" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+
+
+asyncio.run(main())
+\`\`\`
+
+## Raw HTTP Response
+
+To access raw HTTP response values, use the \`.raw\` namespace.
+
+\`\`\`python
+from pprint import pprint
+from python_readme_header_snippet import PythonReadmeHeaderSnippetClient, ApiException
+
+pythonreadmeheadersnippetclient = PythonReadmeHeaderSnippetClient(
+ api_key=\\"YOUR_API_KEY\\",
+)
+
+try:
+ # Fetches a JSON value based on input parameter
+ fetch_response = pythonreadmeheadersnippetclient.test.raw.fetch()
+ pprint(fetch_response.headers)
+ pprint(fetch_response.status)
+ pprint(fetch_response.round_trip_time)
+except ApiException as e:
+ print(\\"Exception when calling TestApi.fetch: %s\\\\n\\" % e)
+ pprint(e.body)
+ pprint(e.headers)
+ pprint(e.status)
+ pprint(e.reason)
+ pprint(e.round_trip_time)
+\`\`\`
+
+
+## Reference
+### \`pythonreadmeheadersnippetclient.test.fetch\`
+
+Provide an input parameter to receive a JSON value with properties.
+
+#### 🛠️ Usage
+
+\`\`\`python
+fetch_response = pythonreadmeheadersnippetclient.test.fetch()
+\`\`\`
+
+#### 🌐 Endpoint
+
+\`/simple-endpoint\` \`get\`
+
+[🔙 **Back to Table of Contents**](#table-of-contents)
+
+---
+
+
+## Author
+This Python package is automatically generated by [Konfig](https://konfigthis.com)
+"
+`;
diff --git a/generator/konfig-integration-tests/tests/python-readme-header-snippet.test.ts b/generator/konfig-integration-tests/tests/python-readme-header-snippet.test.ts
new file mode 100644
index 000000000..729b8e00e
--- /dev/null
+++ b/generator/konfig-integration-tests/tests/python-readme-header-snippet.test.ts
@@ -0,0 +1,6 @@
+import { e2e } from "../util";
+import { test } from "vitest";
+
+test("python-readme-header-snippet", async () => {
+ await e2e(4009);
+});
\ No newline at end of file
diff --git a/misc/openapi-generator-configs/openapi-generator-api/api.yaml b/misc/openapi-generator-configs/openapi-generator-api/api.yaml
index 34427f5f9..f77aea79e 100644
--- a/misc/openapi-generator-configs/openapi-generator-api/api.yaml
+++ b/misc/openapi-generator-configs/openapi-generator-api/api.yaml
@@ -215,6 +215,8 @@ components:
type: string
readmeSnippet:
type: string
+ readmeHeaderSnippet:
+ type: string
asyncReadmeSnippet:
type: string
readmeSupportingDescriptionSnippet: