diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index eb2bce690..834643955 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -11,13 +11,13 @@ jobs: test: strategy: matrix: - python: [ "3.8", "3.9", "3.10", "3.11", "3.12" ] + python: [ "3.10", "3.11", "3.12", "3.13", "3.14" ] os: [ ubuntu-latest, macos-latest, windows-latest ] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@v6.0.1 - name: Set up Python - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@v6.0.0 with: python-version: ${{ matrix.python }} @@ -27,7 +27,7 @@ jobs: shell: bash - name: Cache dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: .venv key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-dependencies-${{ hashFiles('**/pdm.lock') }} @@ -42,9 +42,6 @@ jobs: - name: Check formatting run: pdm run ruff format . --check - - name: Run safety - run: pdm safety_check - - name: Run mypy run: pdm mypy --show-error-codes @@ -54,37 +51,70 @@ jobs: - name: Run pytest without coverage if: matrix.os != 'ubuntu-latest' run: pdm test - env: - TASKIPY: true - name: Run pytest with coverage if: matrix.os == 'ubuntu-latest' run: pdm test_with_coverage - env: - TASKIPY: true - + - run: mv .coverage .coverage.${{ matrix.python }} if: matrix.os == 'ubuntu-latest' - name: Store coverage report - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v6.0.0 if: matrix.os == 'ubuntu-latest' with: name: coverage-${{ matrix.python }} path: .coverage.${{ matrix.python }} if-no-files-found: error + include-hidden-files: true + + test_min_deps: + strategy: + matrix: + os: [ ubuntu-latest, macos-latest, windows-latest ] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v6.0.1 + - name: Set up Python + uses: actions/setup-python@v6.0.0 + with: + python-version: "3.10" + + - name: Get Python Version + id: get_python_version + run: echo "python_version=$(python --version)" >> $GITHUB_OUTPUT + shell: bash + + - name: Cache dependencies + uses: actions/cache@v5 + with: + path: .venv + key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-min-dependencies-${{ hashFiles('**/pdm.lock') }} + restore-keys: | + ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-min-dependencies + - name: Install PDM + run: pip install pdm + + - name: Install minimum dependencies + run: pdm install -L pdm.minimal.lock + + - name: Run mypy + run: pdm mypy --show-error-codes + + - name: Run unit tests only # snapshots are expected to fail + run: pdm unit_test coverage: name: Combine & check coverage needs: test runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4.1.1 - - uses: actions/setup-python@v5 + - uses: actions/checkout@v6.0.1 + - uses: actions/setup-python@v6 with: python-version: "3.12" - name: Download coverage reports - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v7.0.0 with: merge-multiple: true @@ -98,18 +128,18 @@ jobs: # Find all of the downloaded coverage reports and combine them .venv/bin/python -m coverage combine - + # Create html report .venv/bin/python -m coverage html --skip-covered --skip-empty - + # Report in Markdown and write to summary. .venv/bin/python -m coverage report --format=markdown >> $GITHUB_STEP_SUMMARY - + # Report again and fail if under 100%. - .venv/bin/python -m coverage report --fail-under=100 + .venv/bin/python -m coverage report --fail-under=100 - name: Upload HTML report if check failed. - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v6.0.0 with: name: html-report path: htmlcov @@ -120,51 +150,35 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - httpx_version: - - "0.20.0" - - "" + lockfile: + - "pdm.lock" + - "pdm.minimal.lock" services: openapi-test-server: - image: ghcr.io/openapi-generators/openapi-test-server:0.0.1 + image: ghcr.io/openapi-generators/openapi-test-server:0.2.1 ports: - "3000:3000" steps: - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@v6.0.1 - name: Set up Python - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@v6.0.0 with: - python-version: "3.8" + python-version: "3.10" - name: Get Python Version id: get_python_version run: echo "python_version=$(python --version)" >> $GITHUB_OUTPUT - - name: Cache dependencies - uses: actions/cache@v4 - with: - path: .venv - key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-dependencies-${{ hashFiles('**/pdm.lock') }} - restore-keys: | - ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-dependencies - - name: Install dependencies - run: | - pip install pdm - python -m venv .venv - pdm install - name: Cache Generated Client Dependencies - uses: actions/cache@v4 + uses: actions/cache@v5 with: path: integration-tests/.venv - key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-integration-dependencies-${{ hashFiles('**/pdm.lock') }} + key: ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-integration-dependencies-${{ hashFiles('integration-tests/pdm*.lock') }} restore-keys: | ${{ runner.os }}-${{ steps.get_python_version.outputs.python_version }}-integration-dependencies - - name: Set httpx version - if: matrix.httpx_version != '' - run: | - cd integration-tests - pdm add httpx==${{ matrix.httpx_version }} - name: Install Integration Dependencies run: | cd integration-tests - pdm install + pip install pdm + pdm install -L ${{ matrix.lockfile }} - name: Run Tests run: | cd integration-tests diff --git a/.github/workflows/preview_release_pr.yml b/.github/workflows/preview_release_pr.yml deleted file mode 100644 index 42d428d1a..000000000 --- a/.github/workflows/preview_release_pr.yml +++ /dev/null @@ -1,24 +0,0 @@ -on: - push: - branches: [main] -name: Create Release PR -jobs: - prepare-release: - if: "!contains(github.event.head_commit.message, 'chore: prepare release')" # Skip merges from releases - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - with: - fetch-depth: 0 - token: ${{ secrets.PAT }} - - name: Configure Git - run: | - git config --global user.name GitHub Actions - git config user.email github-actions@github.com - - uses: knope-dev/action@v2.0.0 - with: - version: 0.14.1 - - run: knope prepare-release --verbose - env: - GITHUB_TOKEN: ${{ secrets.PAT }} - continue-on-error: true diff --git a/.github/workflows/release-dry-run.yml b/.github/workflows/release-dry-run.yml deleted file mode 100644 index 36054548c..000000000 --- a/.github/workflows/release-dry-run.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Release Dry Run - -on: - pull_request: -jobs: - release: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - with: - fetch-depth: 0 - token: ${{ secrets.GITHUB_TOKEN }} - - name: Install Knope - uses: knope-dev/action@v2.0.0 - with: - version: 0.14.1 - - run: knope prepare-release --dry-run diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e0ad85d0d..88fddd9ae 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,26 +7,17 @@ on: jobs: release: - if: github.head_ref == 'release' && github.event.pull_request.merged == true + if: github.head_ref == 'knope/release' && github.event.pull_request.merged == true runs-on: ubuntu-latest permissions: id-token: write steps: - - uses: actions/checkout@v4.1.1 - with: - fetch-depth: 0 - token: ${{ secrets.PAT }} - - name: Install Knope - uses: knope-dev/action@v2.0.0 - with: - version: 0.14.1 + - uses: actions/checkout@v6.0.1 - name: Install Hatchling run: pip install --upgrade hatchling - name: Build run: hatchling build - name: Push to PyPI - uses: pypa/gh-action-pypi-publish@v1.8.14 - - name: Create GitHub Release - run: knope release - env: - GITHUB_TOKEN: ${{ secrets.PAT }} + uses: pypa/gh-action-pypi-publish@v1.13.0 + with: + attestations: true diff --git a/.gitignore b/.gitignore index 5689da19b..1cd5f6801 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,9 @@ dmypy.json # JetBrains .idea/ +# Visual Studio Code +.vscode/ + test-reports/ /coverage.xml @@ -33,4 +36,5 @@ htmlcov/ # Generated end to end test data my-test-api-client/ custom-e2e/ -3-1-features-client \ No newline at end of file +3-1-features-client +tests/tmp \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b87c180a..16cdae6b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,565 @@ Programmatic usage of this project (e.g., importing it as a Python module) and t The 0.x prefix used in versions for this project is to indicate that breaking changes are expected frequently (several times a year). Breaking changes will increment the minor number, all other changes will increment the patch number. You can track the progress toward 1.0 [here](https://github.com/openapi-generators/openapi-python-client/projects/2). +## 0.28.1 (2026-01-10) + +### Fixes + +- Apply required overrides from allOf schemas (#1384) +- Sort lazy imports to increase stability of generated code (#1378) + +## 0.28.0 (2025-12-03) + +### Breaking Changes + +- URL-encode path parameters in generated endpoints (#1349) + +### Fixes + +#### Fix bad code generation + +##1360 by @EricAtORS + +This fixes: +- missing parenthesis in to_multipart + #1338 #1318 +- missing imports in the lazy eval in to_multipart: +##931 and #1051 + +#### Fix optional bodies + +If a body is not required (the default), it will now: + +1. Have `Unset` as part of its type annotation. +2. Default to a value of `UNSET` +3. Not be included in the request if it is `UNSET` + +Thanks @orelmaliach for the report! Fixes #1354 + +## 0.27.1 (2025-11-03) + +### Fixes + +- Remove non-existent CHANGELOG.md references from UV and Poetry templates (#1344) +- Initialize optional lists as UNSET, not [] (#1346) +- Correct docstring typos in client template (#1347) +- Replace bare except blocks with specific exception types (#1348) + +#### Update `uv_build` to 0.9 + +##1352 by @johnthagen + +`uv` has been in the `0.9.x` release cycle for a while, so update templates to use the corresponding `uv_build` range. + +## 0.27.0 (2025-10-28) + +### Breaking Changes + +#### Drop support for Python 3.9 + +Both `openapi-python-client` itself and any generated clients no longer support Python 3.9. + +#### Generated models now use `from __future__ import annotations` + +This simplifies using forward references with the newer union syntax. + +### Features + +#### Upgrade generated clients to 3.10 union syntax + +All generated types now use the `A | B` syntax instead of `Union[A, B]` or `Optional[A]`. + +### Fixes + +- Drop generated `requires-python` upper bounds for uv and PDM (#1329) + +#### Change default Ruff hook to `--fix-only` + +This should enable `openapi-python-client` to keep auto-fixing lints (like removing unused imports) but _not_ fail to +generate when unfixable lints are violated. + +Since it's now unlikely for breaking changes to affect our usage (and by popular request), the upper bound of `ruff` +has been lifted. Newer versions of `openapi-python-client` should no longer be required to support newer versions of `ruff`. + +### Notes + +- Minimum Typer version is now 0.16 + +## 0.26.2 (2025-10-06) + +### Fixes + +- ambigious tilde specifier requires-python with`--meta=uv` (#1321) + +## 0.26.1 (2025-09-13) + +### Features + +- Reference schema support (#800) (#1307) +- Support Ruff 0.13 + +## 0.26.0 (2025-08-26) + +### Breaking Changes + +#### Change some union variant names + +When creating a union with `oneOf`, `anyOf`, or a list of `type`, the name of each variant used to be `type_{index}` +where the index is based on the order of the types in the union. + +This made some modules difficult to understand, what is a `my_type_type_0` after all? +It also meant that reordering union members, while not a breaking change to the API, _would_ be a breaking change +for generated clients. + +Now, if an individual variant has a `title` attribute, that `title` will be used in the name instead. +This is only an enhancement for documents which use `title` in union variants, and only a breaking change for +_inline models_ (not `#/components/schemas` which should already have used more descriptive names). + +Thanks @wallagib for PR #962! + +### Features + +#### Support patterned and default HTTP statuses + +HTTP statuses like `2XX` and `default` are now supported! + +A big thank you to: +- @PSU3D0 for PR #973 (eons ago 😅) +- @obs-gh-peterkolloch for PR #1300 +- @goodsonjr for PR #1304 + +Closes #1271 and #832 + +> [!NOTE] +> Custom template users: the `endpoint.responses` type has changed quite a bit. Check out #1303 for the changes. + +## 0.25.3 (2025-07-21) + +### Features + +- Add --meta uv for generating astral-sh/uv compatible packages. (#1286) +- Switch to `uv_build` build backend. (#1290) + +## 0.25.2 (2025-07-03) + +### Fixes + +- Import error for `types.FileType` (#1274) (#1278) + +## 0.25.1 (2025-06-19) + +### Fixes + +- Support ruff 0.12 (#1270) + +## 0.25.0 (2025-06-06) + +### Breaking Changes + +- Raise minimum httpx version to 0.23 + +#### Removed ability to set an array as a multipart body + +Previously, when defining a request's body as `multipart/form-data`, the generator would attempt to generate code +for both `object` schemas and `array` schemas. However, most arrays could not generate valid multipart bodies, as +there would be no field names (required to set the `Content-Disposition` headers). + +The code to generate any body for `multipart/form-data` where the schema is `array` has been removed, and any such +bodies will be skipped. This is not _expected_ to be a breaking change in practice, since the code generated would +probably never work. + +If you have a use-case for `multipart/form-data` with an `array` schema, please [open a new discussion](https://github.com/openapi-generators/openapi-python-client/discussions) with an example schema and the desired functional Python code. + +#### Change default multipart array serialization + +Previously, any arrays of values in a `multipart/form-data` body would be serialized as an `application/json` part. +This matches the default behavior specified by OpenAPI and supports arrays of files (`binary` format strings). +However, because this generator doesn't yet support specifying `encoding` per property, this may result in +now-incorrect code when the encoding _was_ explicitly set to `application/json` for arrays of scalar values. + +PR #938 fixes #692. Thanks @micha91 for the fix, @ratgen and @FabianSchurig for testing, and @davidlizeng for the original report... many years ago 😅. + +## 0.24.3 (2025-03-31) + +### Features + +#### Adding support for named integer enums + +##1214 by @barrybarrette + +Adding support for named integer enums via an optional extension, `x-enum-varnames`. + +This extension is added to the schema inline with the `enum` definition: +``` +"MyEnum": { + "enum": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 99 + ], + "type": "integer", + "format": "int32", + "x-enum-varnames": [ + "Deinstalled", + "Installed", + "Upcoming_Site", + "Lab_Site", + "Pending_Deinstall", + "Suspended", + "Install_In_Progress", + "Unknown" + ] +} +``` + +The result: +![image](https://github.com/user-attachments/assets/780880b3-2f1f-49be-823b-f9abb713a3e1) + +## 0.24.2 (2025-03-22) + +### Fixes + +#### Make lists of models and enums work correctly in custom templates + +Lists of model and enum classes should be available to custom templates via the Jinja +variables `openapi.models` and `openapi.enums`, but these were being passed in a way that made +them always appear empty. This has been fixed so a custom template can now iterate over them. + +Closes #1188. + +## 0.24.1 (2025-03-15) + +### Features + +- allow Ruff to 0.10 (#1220) +- allow Ruff 0.11 (#1222) +- Allow any `Mapping` in generated `from_dict` functions (#1211) + +### Fixes + +#### Always parse `$ref` as a reference + +If additional attributes were included with a `$ref` (for example `title` or `description`), the property could be +interpreted as a new type instead of a reference, usually resulting in `Any` in the generated code. +Now, any sibling properties to `$ref` will properly be ignored, as per the OpenAPI specification. + +Thanks @nkrishnaswami! + +## 0.24.0 (2025-03-03) + +### Breaking Changes + +#### Support `$ref` in responses + +Previously, using a `$ref` to define a response was ignored, the code to call the endpoint was still generated, but +the response would not be parsed. Now, responses defined with `$ref` will be used to generate the response model, which +will parse the response at runtime. + +If a `$ref` is incorrect or uses a feature that is not supported by the generator, these endpoints will start failing to +generate. + +### Features + +#### Make `config` available in custom templates + +The configuration options object is now exposed as a variable called `config` in Jinja2 templates. + +#### Add `docstrings_on_attributes` config setting + +Setting this option to `true` changes the docstring behavior in model classes: for any attribute that have a non-empty `description`, instead of describing the attribute as part of the class's docstring, the description will appear in an individual docstring for that attribute. + +## 0.23.1 (2025-01-13) + +### Features + +- allow Ruff 0.9 (#1192) + +## 0.23.0 (2024-12-24) + +### Breaking Changes + +#### Delete fewer files with `--overwrite` + +`--overwrite` will no longer delete the entire output directory before regenerating. Instead, it will only delete +specific, known directories within that directory. Right now, that is only the generated `models` and `api` directories. + +Other generated files, like `README.md`, will be overwritten. Extra files and directories outside of those listed above +will be left untouched, so you can any extra modules or files around while still updating `pyproject.toml` automatically. + +Closes #1105. + +### Features + +- Support httpx 0.28 (#1172) + +#### Add `generate_all_tags` config option + +You can now, optionally, generate **duplicate** endpoint functions/modules using _every_ tag for an endpoint, +not just the first one, by setting `generate_all_tags: true` in your configuration file. + +### Fixes + +- Support Typer 0.14 and 0.15 (#1173) + +#### Fix minimum `attrs` version + +The minimum `attrs` dependency version was incorrectly set to 21.3.0. This has been corrected to 22.2.0, the minimum +supported version since `openapi-python-client` 0.19.1. + +Closes #1084, thanks @astralblue! + +#### Fix compatibility with Pydantic 2.10+ + +##1176 by @Viicos + +Set `defer_build` to models that we know will fail to build, and call `model_rebuild` +in the `__init__.py` file. + +## 0.22.0 (2024-11-23) + +### Breaking Changes + +#### Drop support for Python 3.8 + +Python 3.8 is no longer supported. "New" 3.9 syntax, like generics on builtin collections, is used both in the generator +and the generated code. + +#### `type` is now a reserved field name + +Because `type` is used in type annotations now, it is no longer a valid field name. Fields which were previously named +`type` will be renamed to `type_`. + +### Features + +- Support Ruff 0.8 (#1169) + +## 0.21.7 (2024-10-28) + +### Fixes + +- allow required fields list to be specified as empty (#651) (#1149) +- import cast for required const properties, since it's used in the template (#1153) + +## 0.21.6 (2024-10-20) + +### Features + +- update Ruff to >=0.2,<0.8 (#1137) +- Add UUID string format. Thanks @estyrke! (#1140) +- Support OpenAPI 3.1 prefixItems property for arrays. Thanks @estyrke! (#1141) + +#### Add `literal_enums` config setting + +Instead of the default `Enum` classes for enums, you can now generate `Literal` sets wherever `enum` appears in the OpenAPI spec by setting `literal_enums: true` in your config file. + +```yaml +literal_enums: true +``` + +Thanks to @emosenkis for PR #1114 closes #587, #725, #1076, and probably many more. +Thanks also to @eli-bl, @expobrain, @theorm, @chrisguillory, and anyone else who helped getting to this design! + +### Fixes + +- Typo in docstring (#1128) + +#### Use literal value instead of `HTTPStatus` enum when checking response statuses + +Python 3.13 renamed some of the `HTTPStatus` enum members, which means clients generated with Python 3.13 may not work +with older versions of Python. This change stops using the `HTTPStatus` enum directly when checking response statuses. + +Statuses will still be checked for validity at generation time, and transformed into `HTTPStatus` _after_ being checked +at runtime. + +This may cause some linters to complain. + +## 0.21.5 (2024-09-07) + +### Features + +#### Improved property-merging behavior with `allOf` + +When using `allOf` to extend a base object type, `openapi-python-client` is now able to handle some kinds of modifications to an existing property that would have previously caused an error: + +- Overriding attributes that do not affect validation, such as `description`. +- Combining properties that this generator ignores, like `maxLength` or `pattern`. +- Combining a generic numeric type with `int` (resulting in `int`). +- Adding a `format` to a string. +- Combining `any` with a specific type (resulting in that specific type). +- Adding or overriding a `default` + +> [!NOTE] +> `pattern` and `max_length` are no longer fields on `StringProperty`, which may impact custom templates. + +This also fixes a bug where properties of inline objects (as opposed to references) were not using the +merge logic, but were simply overwriting previous definitions of the same property. + +### Fixes + +- Allow default values for properties of `Any` type + +#### Produce valid code for an object that has no properties at all + +Fixed by PR #1109. Thanks @eli-bl! + +## 0.21.4 (2024-08-25) + +### Fixes + +#### Allow OpenAPI 3.1-style `exclusiveMinimum` and `exclusiveMaximum` + +Fixed by PR #1092. Thanks @mikkelam! + +#### Add missing `cast` import when using `const` + +Fixed by PR #1072. Thanks @dorcohe! + +#### Correctly resolve references to a type that is itself just a single allOf reference + +PR #1103 fixed issue #1091. Thanks @eli-bl! + +#### Support `const` booleans and floats + +Fixed in PR #1086. Thanks @flxdot! + +## 0.21.3 (2024-08-18) + +### Features + +- update Ruff to >=0.2,<0.7 (#1097) + +## 0.21.2 (2024-07-20) + +### Features + +- Update to Ruff 0.5 + +## 0.21.1 (2024-06-15) + +### Features + +#### Support request body refs + +You can now define and reuse bodies via refs, with a document like this: + +```yaml +paths: + /something: + post: + requestBody: + "$ref": "#/components/requestBodies/SharedBody" +components: + requestBodies: + SharedBody: + content: + application/json: + schema: + type: string +``` + +Thanks to @kigawas and @supermihi for initial implementations and @RockyMM for the initial request. + +Closes #633, closes #664, resolves #595. + +### Fixes + +- Indent of generated code for non-required lists. Thanks @sfowl! (#1050) +- Parsing requestBody with $ref (#633) + +## 0.21.0 (2024-06-08) + +### Breaking Changes + +#### Removed the `update` command + +The `update` command is no more, you can (mostly) replace its usage with some new flags on the `generate` command. + +If you had a package named `my-api-client` in the current working directory, the `update` command previously would update the `my_api_client` module within it. You can now _almost_ perfectly replicate this behavior using `openapi-python-client generate --meta=none --output-path=my-api-client/my_api_client --overwrite`. + +The only difference is that `my-api-client` would have run `post_hooks` in the `my-api-client` directory, +but `generate` will run `post_hooks` in the `output-path` directory. + +Alternatively, you can now also run `openapi-python-client generate --meta= --overwrite` to regenerate +the entire client, if you don't care about keeping any changes you've made to the generated client. + +Please comment on [discussion #824](https://github.com/openapi-generators/openapi-python-client/discussions/824) +(or a new discussion, as appropriate) to aid in designing future features that fill any gaps this leaves for you. + +### Features + +#### Added an `--output-path` option to `generate` + +Rather than changing directories before running `generate` you can now specify an output directory with `--output-path`. +Note that the project name will _not_ be appended to the `--output-path`, whatever path you specify is where the +generated code will be placed. + +#### Added an `--overwrite` flag to `generate` + +You can now tell `openapi-python-client` to overwrite an existing directory, rather than deleting it yourself before +running `generate`. + +## 0.20.0 (2024-05-18) + +### Breaking Changes + +#### `const` values in responses are now validated at runtime + +Prior to this version, `const` values returned from servers were assumed to always be correct. Now, if a server returns +an unexpected value, the client will raise a `ValueError`. This should enable better usage with `oneOf`. + +PR #1024. Thanks @peter-greenatlas! + +#### Switch YAML parsing to 1.2 + +This change switches the YAML parsing library to `ruamel.yaml` which follows the YAML 1.2 specification. +[There are breaking changes](https://yaml.readthedocs.io/en/latest/pyyaml/#defaulting-to-yaml-12-support) from YAML 1.1 to 1.2, +though they will not affect most use cases. + +PR #1042 fixes #1041. Thanks @rtaycher! + +### Features + +- allow Ruff 0.4 (#1031) + +### Fixes + +#### Fix nullable and required properties in multipart bodies + +Fixes #926. + +> [!WARNING] +> This change is likely to break custom templates. Multipart body handling has been completely split from JSON bodies. + +## 0.19.1 (2024-03-27) + +### Features + +#### Add config option to override content types + +You can now define a `content_type_overrides` field in your `config.yml`: + +```yaml +content_type_overrides: + application/zip: application/octet-stream +``` + +This allows `openapi-python-client` to generate code for content types it doesn't recognize. + +PR #1010 closes #810. Thanks @gaarutyunov! + +### Fixes + +#### Add aliases to `Client` for pyright + +This should resolve incompatibilities between the generated `Client` class and the pyright type checker. + +PR #1009 closes #909. Thanks @patrick91! + ## 0.19.0 (2024-03-06) ### Breaking Changes diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 52b67100a..e3d9c68ab 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -41,7 +41,7 @@ To request a feature: ### Setting up a Dev Environment 1. Make sure you have [PDM](https://pdm-project.org) installed and up to date. -2. Make sure you have a supported Python version (e.g. 3.8) installed. +2. Make sure you have a supported Python version (e.g. 3.13) installed. 3. Use `pdm install` in the project directory to create a virtual environment with the relevant dependencies. ### Writing tests @@ -50,26 +50,40 @@ All changes must be tested, I recommend writing the test first, then writing the If you think that some of the added code is not testable (or testing it would add little value), mention that in your PR and we can discuss it. -1. If you're adding support for a new OpenAPI feature or covering a new edge case, add an [end-to-end test](#end-to-end-tests) -2. If you're modifying the way an existing feature works, make sure an existing test generates the _old_ code in `end_to_end_tests/golden-record`. You'll use this to check for the new code once your changes are complete. -3. If you're improving an error or adding a new error, add a [unit test](#unit-tests) +1. If you're adding support for a new OpenAPI feature or covering a new edge case, add [functional tests](#functional-tests), and optionally an [end-to-end snapshot test](#end-to-end-snapshot-tests). +2. If you're modifying the way an existing feature works, make sure functional tests cover this case. Existing end-to-end snapshot tests might also be affected if you have changed what generated model/endpoint code looks like. +3. If you're improving error handling or adding a new error, add [functional tests](#functional-tests). +4. For tests of low-level pieces of code that are fairly self-contained, and not tightly coupled to other internal implementation details, you can use regular [unit tests](#unit-tests). -#### End-to-end tests +#### End-to-end snapshot tests -This project aims to have all "happy paths" (types of code which _can_ be generated) covered by end to end tests (snapshot tests). In order to check code changes against the previous set of snapshots (called a "golden record" here), you can run `pdm e2e`. To regenerate the snapshots, run `pdm regen`. +This project aims to have all "happy paths" (types of code which _can_ be generated) covered by end-to-end tests. There are two types of these: snapshot tests, and functional tests. -There are 4 types of snapshots generated right now, you may have to update only some or all of these depending on the changes you're making. Within the `end_to_end_tets` directory: +Snapshot tests verify that the generated code is identical to a previously-committed set of snapshots (called a "golden record" here). They are basically regression tests to catch any unintended changes in the generator output. + +In order to check code changes against the previous set of snapshots (called a "golden record" here), you can run `pdm e2e`. To regenerate the snapshots, run `pdm regen`. + +There are 4 types of snapshots generated right now, you may have to update only some or all of these depending on the changes you're making. Within the `end_to_end_tests` directory: 1. `baseline_openapi_3.0.json` creates `golden-record` for testing OpenAPI 3.0 features 2. `baseline_openapi_3.1.yaml` is checked against `golden-record` for testing OpenAPI 3.1 features (and ensuring consistency with 3.0) 3. `test_custom_templates` are used with `baseline_openapi_3.0.json` to generate `custom-templates-golden-record` for testing custom templates 4. `3.1_specific.openapi.yaml` is used to generate `test-3-1-golden-record` and test 3.1-specific features (things which do not have a 3.0 equivalent) +#### Functional tests + +These are black-box tests that verify the runtime behavior of generated code, as well as the generator's validation behavior. They are also end-to-end tests, since they run the generator as a shell command. + +This can sometimes identify issues with error handling, validation logic, module imports, etc., that might be harder to diagnose via the snapshot tests, especially during development of a new feature. For instance, they can verify that JSON data is correctly decoded into model class attributes, or that the generator will emit an appropriate warning or error for an invalid spec. + +See [`end_to_end_tests/functional_tests`](./end_to_end_tests/functional_tests). + #### Unit tests -> **NOTE**: Several older-style unit tests using mocks exist in this project. These should be phased out rather than updated, as the tests are brittle and difficult to maintain. Only error cases should be tests with unit tests going forward. +These include: -In some cases, we need to test things which cannot be generated—like validating that errors are caught and handled correctly. These should be tested via unit tests in the `tests` directory, using the `pytest` framework. +* Regular unit tests of basic pieces of fairly self-contained low-level functionality, such as helper functions. These are implemented in the `tests` directory, using the `pytest` framework. +* Older-style unit tests of low-level functions like `property_from_data` that have complex behavior. These are brittle and difficult to maintain, and should not be used going forward. Instead, they should be migrated to functional tests. ### Creating a Pull Request diff --git a/README.md b/README.md index 2c5d0b0b4..4a886d299 100644 --- a/README.md +++ b/README.md @@ -41,23 +41,18 @@ Then, if you want tab completion: `openapi-python-client --install-completion` `openapi-python-client generate --url https://my.api.com/openapi.json` This will generate a new client library named based on the title in your OpenAPI spec. For example, if the title -of your API is "My API", the expected output will be "my-api-client". If a folder already exists by that name, you'll -get an error. +of your API is "My API", the expected output will be "my-api-client". You can change that directory name with the config file (documented below) or with `--output-path`. -If you have an `openapi.json` file available on disk, in any CLI invocation you can build off that instead by replacing `--url` with a `--path`: +If the directory to generate already exists, you'll get an error unless you use `--overwrite`. -`openapi-python-client generate --path location/on/disk/openapi.json` - -### Update an existing client - -`openapi-python-client update --url https://my.api.com/openapi.json` +You can use an OpenAPI file instead of a URL like `openapi-python-client generate --path location/on/disk/openapi.json`. ### Using custom templates -This feature leverages Jinja2's [ChoiceLoader](https://jinja.palletsprojects.com/en/2.11.x/api/#jinja2.ChoiceLoader) and [FileSystemLoader](https://jinja.palletsprojects.com/en/2.11.x/api/#jinja2.FileSystemLoader). This means you do _not_ need to customize every template. Simply copy the template(s) you want to customize from [the default template directory](openapi_python_client/templates) to your own custom template directory (file names _must_ match exactly) and pass the template directory through the `custom-template-path` flag to the `generate` and `update` commands. For instance, +This feature leverages Jinja2's [ChoiceLoader](https://jinja.palletsprojects.com/en/2.11.x/api/#jinja2.ChoiceLoader) and [FileSystemLoader](https://jinja.palletsprojects.com/en/2.11.x/api/#jinja2.FileSystemLoader). This means you do _not_ need to customize every template. Simply copy the template(s) you want to customize from [the default template directory](openapi_python_client/templates) to your own custom template directory (file names _must_ match exactly) and pass the template directory through the `custom-template-path` flag to the `generate` command: ``` -openapi-python-client update \ +openapi-python-client generate \ --url https://my.api.com/openapi.json \ --custom-template-path=relative/path/to/mytemplates ``` @@ -102,6 +97,37 @@ class_overrides: The easiest way to find what needs to be overridden is probably to generate your client and go look at everything in the `models` folder. +### docstrings_on_attributes + +By default, when `openapi-python-client` generates a model class, it includes a list of attributes and their +descriptions in the docstring for the class. If you set this option to `true`, then the attribute descriptions +will be put in docstrings for the attributes themselves, and will not be in the class docstring. + +```yaml +docstrings_on_attributes: true +``` + +### literal_enums + +By default, `openapi-python-client` generates classes inheriting for `Enum` for enums. It can instead use `Literal` +values for enums by setting this to `true`: + +```yaml +literal_enums: true +``` + +This is especially useful if enum values, when transformed to their Python names, end up conflicting due to case sensitivity or special symbols. + +### generate_all_tags + +`openapi-python-client` generates module names within the `api` module based on the OpenAPI `tags` of each endpoint. +By default, only the _first_ tag is generated. If you want to generate **duplicate** endpoint functions using _every_ tag +listed, you can enable this option: + +```yaml +generate_all_tags: true +``` + ### project_name_override and package_name_override Used to change the name of generated client library project/package. If the project name is changed but an override for the package name @@ -140,7 +166,7 @@ In the config file, there's an easy way to tell `openapi-python-client` to run a ```yaml post_hooks: - - "ruff check . --fix" + - "ruff check . --fix-only" - "ruff format ." ``` @@ -156,6 +182,49 @@ If this option results in conflicts, you will need to manually override class na By default, the timeout for retrieving the schema file via HTTP is 5 seconds. In case there is an error when retrieving the schema, you might try and increase this setting to a higher value. +### content_type_overrides + +Normally, `openapi-python-client` will skip any bodies or responses that it doesn't recognize the content type for. +This config tells the generator to treat a given content type like another. + +```yaml +content_type_overrides: + application/zip: application/octet-stream +``` + +## Supported Extensions + +### x-enum-varnames + +This extension has been adopted by similar projects such as [OpenAPI Tools](https://github.com/OpenAPITools/openapi-generator/pull/917). +It is intended to provide user-friendly names for integer Enum members that get generated. +It is critical that the length of the array matches that of the enum values. + +``` +"Colors": { + "type": "integer", + "format": "int32", + "enum": [ + 0, + 1, + 2 + ], + "x-enum-varnames": [ + "Red", + "Green", + "Blue" + ] +} +``` + +Results in: +``` +class Color(IntEnum): + RED = 0 + GREEN = 1 + BLUE = 2 +``` + [changelog.md]: CHANGELOG.md [poetry]: https://python-poetry.org/ [PDM]: https://pdm-project.org/latest/ diff --git a/end_to_end_tests/3.1_specific.openapi.yaml b/end_to_end_tests/3.1_specific.openapi.yaml index 3540d04ac..04d693449 100644 --- a/end_to_end_tests/3.1_specific.openapi.yaml +++ b/end_to_end_tests/3.1_specific.openapi.yaml @@ -47,3 +47,34 @@ paths: "application/json": schema: const: "Why have a fixed response? I dunno" + "/prefixItems": + post: + tags: [ "prefixItems" ] + requestBody: + required: true + content: + "application/json": + schema: + type: object + properties: + prefixItemsAndItems: + type: array + prefixItems: + - type: string + const: "prefix" + - type: string + items: + type: number + prefixItemsOnly: + type: array + prefixItems: + - type: string + - type: number + maxItems: 2 + responses: + "200": + description: "Successful Response" + content: + "application/json": + schema: + type: string diff --git a/end_to_end_tests/__init__.py b/end_to_end_tests/__init__.py index 1bf33f63f..3793e0395 100644 --- a/end_to_end_tests/__init__.py +++ b/end_to_end_tests/__init__.py @@ -1 +1,5 @@ """ Generate a complete client and verify that it is correct """ +import pytest + +pytest.register_assert_rewrite("end_to_end_tests.end_to_end_test_helpers") +pytest.register_assert_rewrite("end_to_end_tests.functional_tests.helpers") diff --git a/end_to_end_tests/__snapshots__/test_end_to_end.ambr b/end_to_end_tests/__snapshots__/test_end_to_end.ambr new file mode 100644 index 000000000..6f4a9f2d2 --- /dev/null +++ b/end_to_end_tests/__snapshots__/test_end_to_end.ambr @@ -0,0 +1,77 @@ +# serializer version: 1 +# name: test_documents_with_errors[bad-status-code] + ''' + Generating /test-documents-with-errors + + Warning(s) encountered while generating. Client was generated, but some pieces may be missing + WARNING parsing GET / within default. + + Invalid response status code pattern: abcdef, response will be omitted from generated client + + + If you believe this was a mistake or this tool is missing a feature you need, please open an issue at https://github.com/openapi-generators/openapi-python-client/issues/new/choose + + ''' +# --- +# name: test_documents_with_errors[circular-body-ref] + ''' + Generating /test-documents-with-errors + + Warning(s) encountered while generating. Client was generated, but some pieces may be missing + WARNING parsing POST / within default. Endpoint will not be generated. + + Circular $ref in request body + + + If you believe this was a mistake or this tool is missing a feature you need, please open an issue at https://github.com/openapi-generators/openapi-python-client/issues/new/choose + + ''' +# --- +# name: test_documents_with_errors[invalid-uuid-defaults] + ''' + Generating /test-documents-with-errors + + Warning(s) encountered while generating. Client was generated, but some pieces may be missing + WARNING parsing PUT / within default. Endpoint will not be generated. + + cannot parse parameter of endpoint put_: Invalid UUID value: 3 + + + WARNING parsing POST / within default. Endpoint will not be generated. + + cannot parse parameter of endpoint post_: Invalid UUID value: notauuid + + + If you believe this was a mistake or this tool is missing a feature you need, please open an issue at https://github.com/openapi-generators/openapi-python-client/issues/new/choose + + ''' +# --- +# name: test_documents_with_errors[missing-body-ref] + ''' + Generating /test-documents-with-errors + + Warning(s) encountered while generating. Client was generated, but some pieces may be missing + WARNING parsing POST / within default. Endpoint will not be generated. + + Could not resolve $ref #/components/requestBodies/body in request body + + + If you believe this was a mistake or this tool is missing a feature you need, please open an issue at https://github.com/openapi-generators/openapi-python-client/issues/new/choose + + ''' +# --- +# name: test_documents_with_errors[optional-path-param] + ''' + Generating /test-documents-with-errors + + Warning(s) encountered while generating. Client was generated, but some pieces may be missing + WARNING parsing GET /{optional} within default. Endpoint will not be generated. + + Path parameter must be required + + Parameter(name='optional', param_in=, description=None, required=False, deprecated=False, allowEmptyValue=False, style=None, explode=False, allowReserved=False, param_schema=Schema(title=None, multipleOf=None, maximum=None, exclusiveMaximum=None, minimum=None, exclusiveMinimum=None, maxLength=None, minLength=None, pattern=None, maxItems=None, minItems=None, uniqueItems=None, maxProperties=None, minProperties=None, required=None, enum=None, const=None, type=, allOf=[], oneOf=[], anyOf=[], schema_not=None, items=None, prefixItems=[], properties=None, additionalProperties=None, description=None, schema_format=None, default=None, nullable=False, discriminator=None, readOnly=None, writeOnly=None, xml=None, externalDocs=None, example=None, deprecated=None), example=None, examples=None, content=None) + + If you believe this was a mistake or this tool is missing a feature you need, please open an issue at https://github.com/openapi-generators/openapi-python-client/issues/new/choose + + ''' +# --- diff --git a/end_to_end_tests/baseline_openapi_3.0.json b/end_to_end_tests/baseline_openapi_3.0.json index e70de4c99..c47048218 100644 --- a/end_to_end_tests/baseline_openapi_3.0.json +++ b/end_to_end_tests/baseline_openapi_3.0.json @@ -87,6 +87,47 @@ } } }, + "/bodies/refs": { + "post": { + "tags": [ + "bodies" + ], + "description": "Test request body defined via ref", + "operationId": "refs", + "requestBody": { + "$ref": "#/components/requestBodies/NestedRef" + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/bodies/optional": { + "post": { + "tags": [ + "bodies" + ], + "description": "Test optional request body", + "operationId": "optional-body", + "requestBody": { + "required": false, + "content": { + "application/json": { + "schema": { + "type": "object" + } + } + } + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/tests/": { "get": { "tags": [ @@ -377,52 +418,15 @@ "content": { "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/Body_upload_file_tests_upload_post" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": {} - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/tests/upload/multiple": { - "post": { - "tags": [ - "tests" - ], - "summary": "Upload multiple files", - "description": "Upload several files in the same request", - "operationId": "upload_multiple_files_tests_upload_post", - "parameters": [], - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "type": "array", - "items": { - "type": "string", - "format": "binary" - } + "$ref": "#/components/schemas/Body_upload_file_tests_upload_post", + "title": "Body_upload_file_tests_upload_post", + "required": [ + "some_file", + "some_object", + "some_nullable_object", + "some_required_number" + ], + "properties": {} } } }, @@ -579,6 +583,16 @@ "name": "float_prop", "in": "query" }, + { + "required": true, + "schema": { + "title": "Float with int default", + "type": "number", + "default": 3 + }, + "name": "float_with_int", + "in": "query" + }, { "required": true, "schema": { @@ -740,7 +754,13 @@ "content": { "application/json": { "schema": { - "type": "string" + "type": "object", + "properties": { + "data": { + "type": "string", + "format": "binary" + } + } } } } @@ -964,6 +984,138 @@ } } }, + "/responses/reference": { + "get": { + "tags": [ + "responses" + ], + "summary": "Endpoint using predefined response", + "operationId": "reference_response", + "responses": { + "200": { + "$ref": "#/components/responses/AResponse" + } + } + } + }, + "/responses/status-codes/default": { + "get": { + "tags": [ + "responses" + ], + "summary": "Default Status Code Only", + "operationId": "default_status_code", + "responses": { + "default": { + "description": "Default response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/response/status-codes/patterns": { + "get": { + "tags": [ + "responses" + ], + "summary": "Status Code Patterns", + "operationId": "status_code_patterns", + "responses": { + "2XX": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "status": { + "type": "string", + "enum": [ + "success", + "failure" + ] + } + } + } + } + } + }, + "4XX": { + "description": "Bad Request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + } + } + } + } + } + } + } + } + }, + "/response/status-codes/precedence": { + "get": { + "operationId": "status_code_precedence", + "tags": [ + "responses" + ], + "summary": "Status Codes Precedence", + "description": "Verify that specific status codes are always checked first, then ranges, then default", + "responses": { + "default": { + "description": "Default Response Should Be Last", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "4XX": { + "description": "Pattern should be after specific codes", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "404": { + "description": "Not Found", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "200": { + "description": "OK", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, "/auth/token_with_cookie": { "get": { "tags": [ @@ -1123,7 +1275,8 @@ "/tag_with_number": { "get": { "tags": [ - "1" + "1", + "2" ], "responses": { "200": { @@ -1415,7 +1568,9 @@ }, "/naming/mixed-case": { "get": { - "tags": ["naming"], + "tags": [ + "naming" + ], "operationId": "mixed_case", "parameters": [ { @@ -1436,30 +1591,32 @@ } ], "responses": { - "200": { - "description": "Successful response", - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "mixed_case": { - "type": "string" - }, - "mixedCase": { - "type": "string" - } + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "mixed_case": { + "type": "string" + }, + "mixedCase": { + "type": "string" } } } } } + } } } }, "/naming/{hyphen-in-path}": { "get": { - "tags": ["naming"], + "tags": [ + "naming" + ], "operationId": "hyphen_in_path", "parameters": [ { @@ -1578,6 +1735,149 @@ } } } + }, + "/config/content-type-override": { + "post": { + "tags": [ + "config" + ], + "summary": "Content Type Override", + "operationId": "content_type_override", + "requestBody": { + "content": { + "openapi/python/client": { + "schema": { + "type": "string" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "openapi/python/client": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/models/allof": { + "get": { + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "aliased": { + "$ref": "#/components/schemas/Aliased" + }, + "extended": { + "$ref": "#/components/schemas/Extended" + }, + "model": { + "$ref": "#/components/schemas/AModel" + } + } + } + } + } + } + } + } + }, + "/models/oneof-with-required-const": { + "get": { + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "type": "object", + "properties": { + "type": { + "const": "alpha" + }, + "color": { + "type": "string" + } + }, + "required": [ + "type" + ] + }, + { + "type": "object", + "properties": { + "type": { + "const": "beta" + }, + "texture": { + "type": "string" + } + }, + "required": [ + "type" + ] + } + ] + } + } + } + } + } + } + }, + "/types/unions/duplicate-types": { + "post": { + "title": "duplicate union members", + "requestBody": { + "description": "The request body", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "$ref": "#/components/schemas/AModel" + }, + { + "$ref": "#/components/schemas/AModel" + } + ] + } + } + } + }, + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "$ref": "#/components/schemas/AModel" + }, + { + "$ref": "#/components/schemas/AModel" + } + ] + } + } + } + } + } + } } }, "components": { @@ -1596,6 +1896,52 @@ "an_required_field" ] }, + "Aliased": { + "allOf": [ + { + "$ref": "#/components/schemas/AModel" + } + ] + }, + "Extended": { + "allOf": [ + { + "$ref": "#/components/schemas/Aliased" + }, + { + "type": "object", + "properties": { + "fromExtended": { + "type": "string" + } + } + } + ] + }, + "AllOfRequiredBase": { + "type": "object", + "properties": { + "bar": { + "type": "string", + "description": "The bar property" + }, + "baz": { + "type": "string", + "description": "The baz property" + } + } + }, + "AllOfRequiredDerived": { + "allOf": [ + { + "$ref": "#/components/schemas/AllOfRequiredBase" + }, + { + "type": "object", + "required": ["bar"] + } + ] + }, "AModel": { "title": "AModel", "required": [ @@ -1604,6 +1950,8 @@ "aCamelDateTime", "a_date", "a_nullable_date", + "a_uuid", + "a_nullable_uuid", "required_nullable", "required_not_nullable", "model", @@ -1613,7 +1961,9 @@ ], "type": "object", "properties": { - "any_value": {}, + "any_value": { + "default": "default" + }, "an_enum_value": { "$ref": "#/components/schemas/AnEnum" }, @@ -1672,6 +2022,23 @@ "type": "string", "format": "date" }, + "a_uuid": { + "title": "A Uuid", + "type": "string", + "format": "uuid" + }, + "a_nullable_uuid": { + "title": "A Nullable Uuid", + "type": "string", + "format": "uuid", + "nullable": true, + "default": "07EF8B4D-AA09-4FFA-898D-C710796AFF41" + }, + "a_not_required_uuid": { + "title": "A Not Required Uuid", + "type": "string", + "format": "uuid" + }, "1_leading_digit": { "title": "Leading Digit", "type": "string" @@ -1833,7 +2200,8 @@ "required": [ "some_file", "some_object", - "some_nullable_object" + "some_nullable_object", + "some_required_number" ], "type": "object", "properties": { @@ -1866,6 +2234,23 @@ "title": "Some Number", "type": "number" }, + "some_nullable_number": { + "title": "Some Nullable Number", + "type": "number", + "nullable": true + }, + "some_required_number": { + "title": "Some Required Number", + "type": "number" + }, + "some_int_array": { + "title": "Some Integer Array", + "type": "array", + "items": { + "type": "integer", + "nullable": true + } + }, "some_array": { "title": "Some Array", "nullable": true, @@ -1999,6 +2384,7 @@ "oneOf": [ { "type": "object", + "title": "Apples", "properties": { "apples": { "type": "string" @@ -2007,6 +2393,7 @@ }, { "type": "object", + "title": "Bananas", "properties": { "bananas": { "type": "string" @@ -2039,6 +2426,59 @@ "additionalProperties": {} } }, + "ModelWithMergedProperties": { + "title": "ModelWithMergedProperties", + "allOf": [ + { + "type": "object", + "properties": { + "simpleString": { + "type": "string", + "description": "base simpleString description" + }, + "stringToEnum": { + "type": "string", + "default": "a" + }, + "stringToDate": { + "type": "string" + }, + "numberToInt": { + "type": "number" + }, + "anyToString": {} + } + }, + { + "type": "object", + "properties": { + "simpleString": { + "type": "string", + "description": "extended simpleString description", + "default": "new default" + }, + "stringToEnum": { + "type": "string", + "enum": [ + "a", + "b" + ] + }, + "stringToDate": { + "type": "string", + "format": "date" + }, + "numberToInt": { + "type": "integer" + }, + "anyToString": { + "type": "string", + "default": "x" + } + } + } + ] + }, "ModelWithPrimitiveAdditionalProperties": { "title": "ModelWithPrimitiveAdditionalProperties", "type": "object", @@ -2106,6 +2546,11 @@ } ] }, + "ModelWithNoProperties": { + "type": "object", + "properties": {}, + "additionalProperties": false + }, "AllOfSubModel": { "title": "AllOfSubModel", "type": "object", @@ -2709,6 +3154,32 @@ "type": "string" } } + }, + "requestBodies": { + "NestedRef": { + "$ref": "#/components/requestBodies/ARequestBody" + }, + "ARequestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AModel" + } + } + } + } + }, + "responses": { + "AResponse": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AModel" + } + } + } + } } } } diff --git a/end_to_end_tests/baseline_openapi_3.1.yaml b/end_to_end_tests/baseline_openapi_3.1.yaml index 1b5664e77..295e6818a 100644 --- a/end_to_end_tests/baseline_openapi_3.1.yaml +++ b/end_to_end_tests/baseline_openapi_3.1.yaml @@ -83,6 +83,47 @@ info: } } }, + "/bodies/refs": { + "post": { + "tags": [ + "bodies" + ], + "description": "Test request body defined via ref", + "operationId": "refs", + "requestBody": { + "$ref": "#/components/requestBodies/NestedRef" + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, + "/bodies/optional": { + "post": { + "tags": [ + "bodies" + ], + "description": "Test optional request body", + "operationId": "optional-body", + "requestBody": { + "required": false, + "content": { + "application/json": { + "schema": { + "type": "object" + } + } + } + }, + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/tests/": { "get": { "tags": [ @@ -401,51 +442,6 @@ info: } } }, - "/tests/upload/multiple": { - "post": { - "tags": [ - "tests" - ], - "summary": "Upload multiple files", - "description": "Upload several files in the same request", - "operationId": "upload_multiple_files_tests_upload_post", - "parameters": [ ], - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "type": "array", - "items": { - "type": "string", - "format": "binary" - } - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, "/tests/json_body": { "post": { "tags": [ @@ -575,6 +571,16 @@ info: "name": "float_prop", "in": "query" }, + { + "required": true, + "schema": { + "title": "Float with int default", + "type": "number", + "default": 3 + }, + "name": "float_with_int", + "in": "query" + }, { "required": true, "schema": { @@ -732,7 +738,13 @@ info: "content": { "application/json": { "schema": { - "type": "string" + "type": "object", + "properties": { + "data": { + "type": "string", + "format": "binary" + } + } } } } @@ -956,6 +968,129 @@ info: } } }, + "/responses/reference": { + "get": { + "tags": [ + "responses" + ], + "summary": "Endpoint using predefined response", + "operationId": "reference_response", + "responses": { + "200": { + "$ref": "#/components/responses/AResponse" + } + } + } + }, + "/responses/status-codes/default": { + "get": { + "tags": ["responses"], + "summary": "Default Status Code Only", + "operationId": "default_status_code", + "responses": { + "default": { + "description": "Default response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/response/status-codes/patterns": { + "get": { + "tags": ["responses"], + "summary": "Status Code Patterns", + "operationId": "status_code_patterns", + "responses": { + "2XX": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "status": { + "type": "string", + "enum": ["success", "failure"] + } + } + } + } + } + }, + "4XX": { + "description": "Bad Request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + } + } + } + } + } + } + } + } + }, + "/response/status-codes/precedence": { + "get": { + "operationId": "status_code_precedence", + "tags": ["responses"], + "summary": "Status Codes Precedence", + "description": "Verify that specific status codes are always checked first, then ranges, then default", + "responses": { + "default": { + "description": "Default Response Should Be Last", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "4XX": { + "description": "Pattern should be after specific codes", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "404": { + "description": "Not Found", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + }, + "200": { + "description": "OK", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, "/auth/token_with_cookie": { "get": { "tags": [ @@ -1114,9 +1249,7 @@ info: }, "/tag_with_number": { "get": { - "tags": [ - "1" - ], + "tags": [ "1", "2" ], "responses": { "200": { "description": "Success" @@ -1572,9 +1705,140 @@ info: } } } + }, + "/config/content-type-override": { + "post": { + "tags": [ + "config" + ], + "summary": "Content Type Override", + "operationId": "content_type_override", + "requestBody": { + "content": { + "openapi/python/client": { + "schema": { + "type": "string" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "openapi/python/client": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/models/allof": { + "get": { + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "aliased": { + "$ref": "#/components/schemas/Aliased" + }, + "extended": { + "$ref": "#/components/schemas/Extended" + }, + "model": { + "$ref": "#/components/schemas/AModel" + } + } + } + } + } + } + } + } + }, + "/models/oneof-with-required-const": { + "get": { + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "type": "object", + "properties": { + "type": { + "const": "alpha" + }, + "color": { + "type": "string" + } + }, + "required": [ "type" ] + }, + { + "type": "object", + "properties": { + "type": { + "const": "beta" + }, + "texture": { + "type": "string" + } + }, + "required": [ "type" ] + } + ] + } + } + } + } + } + } + }, + "/types/unions/duplicate-types": { + "post": { + "title": "duplicate union members", + "requestBody": { + "description": "The request body", + "content": { + "application/json": { + "schema": { + "oneOf": [ + {"$ref": "#/components/schemas/AModel"}, + {"$ref": "#/components/schemas/AModel"} + ] + } + } + } + }, + "responses": { + "200": { + "description": "Successful response", + "content": { + "application/json": { + "schema": { + "oneOf": [ + {"$ref": "#/components/schemas/AModel"}, + {"$ref": "#/components/schemas/AModel"} + ] + } + } + } + } + } + } } } -"components": { +"components": "schemas": { "AFormData": { "type": "object", @@ -1590,6 +1854,44 @@ info: "an_required_field" ] }, + "Aliased": { + "allOf": [ + { "$ref": "#/components/schemas/AModel" } + ] + }, + "Extended": { + "allOf": [ + { "$ref": "#/components/schemas/Aliased" }, + { "type": "object", + "properties": { + "fromExtended": { + "type": "string" + } + } + } + ] + }, + "AllOfRequiredBase": { + "type": "object", + "properties": { + "bar": { + "type": "string", + "description": "The bar property" + }, + "baz": { + "type": "string", + "description": "The baz property" + } + } + }, + "AllOfRequiredDerived": { + "allOf": [ + { "$ref": "#/components/schemas/AllOfRequiredBase" }, + { "type": "object", + "required": ["bar"] + } + ] + }, "AModel": { "title": "AModel", "required": [ @@ -1598,6 +1900,8 @@ info: "aCamelDateTime", "a_date", "a_nullable_date", + "a_uuid", + "a_nullable_uuid", "required_nullable", "required_not_nullable", "model", @@ -1607,7 +1911,9 @@ info: ], "type": "object", "properties": { - "any_value": { }, + "any_value": { + "default": "default", + }, "an_enum_value": { "$ref": "#/components/schemas/AnEnum" }, @@ -1620,11 +1926,7 @@ info: "default": "overridden_default" }, "an_optional_allof_enum": { - "allOf": [ - { - "$ref": "#/components/schemas/AnAllOfEnum" - } - ] + "$ref": "#/components/schemas/AnAllOfEnum", }, "nested_list_of_enums": { "title": "Nested List Of Enums", @@ -1672,6 +1974,29 @@ info: "type": "string", "format": "date" }, + "a_uuid": { + "title": "A Uuid", + "type": "string", + "format": "uuid" + }, + "a_nullable_uuid": { + "title": "A Nullable Uuid", + "anyOf": [ + { + "type": "string", + "format": "uuid", + }, + { + "type": "null" + } + ], + "default": "07EF8B4D-AA09-4FFA-898D-C710796AFF41" + }, + "a_not_required_uuid": { + "title": "A Not Required Uuid", + "type": "string", + "format": "uuid" + }, "1_leading_digit": { "title": "Leading Digit", "type": "string" @@ -1761,11 +2086,7 @@ info: ] }, "model": { - "allOf": [ - { - "$ref": "#/components/schemas/ModelWithUnionProperty" - } - ] + "$ref": "#/components/schemas/ModelWithUnionProperty" }, "nullable_model": { "oneOf": [ @@ -1778,11 +2099,7 @@ info: ] }, "not_required_model": { - "allOf": [ - { - "$ref": "#/components/schemas/ModelWithUnionProperty" - } - ] + "$ref": "#/components/schemas/ModelWithUnionProperty" }, "not_required_nullable_model": { "oneOf": [ @@ -1847,7 +2164,8 @@ info: "required": [ "some_file", "some_object", - "some_nullable_object" + "some_nullable_object", + "some_required_number" ], "type": "object", "properties": { @@ -1880,6 +2198,21 @@ info: "title": "Some Number", "type": "number" }, + "some_nullable_number": { + "title": "Some Nullable Number", + "type": [ "number", "null" ] + }, + "some_required_number": { + "title": "Some Number", + "type": "number" + }, + "some_int_array": { + "title": "Some Integer Array", + "type": "array", + "items": { + "type": [ "integer", "null" ] + } + }, "some_array": { "title": "Some Array", "type": [ "array", "null" ], @@ -2011,6 +2344,7 @@ info: "oneOf": [ { "type": "object", + "title": "Apples", "properties": { "apples": { "type": "string" @@ -2019,6 +2353,7 @@ info: }, { "type": "object", + "title": "Bananas", "properties": { "bananas": { "type": "string" @@ -2051,6 +2386,56 @@ info: "additionalProperties": { } } }, + "ModelWithMergedProperties": { + "title": "ModelWithMergedProperties", + "allOf": [ + { + "type": "object", + "properties": { + "simpleString": { + "type": "string", + "description": "base simpleString description" + }, + "stringToEnum": { + "type": "string", + "default": "a" + }, + "stringToDate": { + "type": "string" + }, + "numberToInt": { + "type": "number" + }, + "anyToString": { } + } + }, + { + "type": "object", + "properties": { + "simpleString": { + "type": "string", + "description": "extended simpleString description", + "default": "new default" + }, + "stringToEnum": { + "type": "string", + "enum": [ "a", "b" ] + }, + "stringToDate": { + "type": "string", + "format": "date" + }, + "numberToInt": { + "type": "integer" + }, + "anyToString": { + "type": "string", + "default": "x" + } + } + } + ] + }, "ModelWithPrimitiveAdditionalProperties": { "title": "ModelWithPrimitiveAdditionalProperties", "type": "object", @@ -2118,6 +2503,11 @@ info: } ] }, + "ModelWithNoProperties": { + "type": "object", + "properties": { }, + "additionalProperties": false + }, "AllOfSubModel": { "title": "AllOfSubModel", "type": "object", @@ -2658,7 +3048,7 @@ info: } } } - }, + } "parameters": { "integer-param": { "name": "integer param", @@ -2726,5 +3116,18 @@ info: } } } -} - + requestBodies: + NestedRef: + "$ref": "#/components/requestBodies/ARequestBody" + ARequestBody: + content: + "application/json": + "schema": + "$ref": "#/components/schemas/AModel" + responses: + AResponse: + description: OK + content: + "application/json": + "schema": + "$ref": "#/components/schemas/AModel" diff --git a/end_to_end_tests/config.yml b/end_to_end_tests/config.yml index 05ac674fc..a813deddd 100644 --- a/end_to_end_tests/config.yml +++ b/end_to_end_tests/config.yml @@ -9,3 +9,6 @@ class_overrides: class_name: AnEnumValue module_name: an_enum_value field_prefix: attr_ +content_type_overrides: + openapi/python/client: application/json +generate_all_tags: true diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/__init__.py index f03fd5cfa..d1102fa1a 100644 --- a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/__init__.py +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/__init__.py @@ -1,8 +1,7 @@ """Contains methods for accessing the API""" -from typing import Type - from .bodies import BodiesEndpoints +from .config import ConfigEndpoints from .default import DefaultEndpoints from .defaults import DefaultsEndpoints from .enums import EnumsEndpoints @@ -12,55 +11,64 @@ from .parameters import ParametersEndpoints from .responses import ResponsesEndpoints from .tag1 import Tag1Endpoints +from .tag2 import Tag2Endpoints from .tests import TestsEndpoints from .true_ import True_Endpoints class MyTestApiClientApi: @classmethod - def bodies(cls) -> Type[BodiesEndpoints]: + def bodies(cls) -> type[BodiesEndpoints]: return BodiesEndpoints @classmethod - def tests(cls) -> Type[TestsEndpoints]: + def tests(cls) -> type[TestsEndpoints]: return TestsEndpoints @classmethod - def defaults(cls) -> Type[DefaultsEndpoints]: + def defaults(cls) -> type[DefaultsEndpoints]: return DefaultsEndpoints @classmethod - def enums(cls) -> Type[EnumsEndpoints]: + def enums(cls) -> type[EnumsEndpoints]: return EnumsEndpoints @classmethod - def responses(cls) -> Type[ResponsesEndpoints]: + def responses(cls) -> type[ResponsesEndpoints]: return ResponsesEndpoints @classmethod - def default(cls) -> Type[DefaultEndpoints]: + def default(cls) -> type[DefaultEndpoints]: return DefaultEndpoints @classmethod - def parameters(cls) -> Type[ParametersEndpoints]: + def parameters(cls) -> type[ParametersEndpoints]: return ParametersEndpoints @classmethod - def tag1(cls) -> Type[Tag1Endpoints]: + def tag1(cls) -> type[Tag1Endpoints]: return Tag1Endpoints @classmethod - def location(cls) -> Type[LocationEndpoints]: + def tag2(cls) -> type[Tag2Endpoints]: + return Tag2Endpoints + + @classmethod + def location(cls) -> type[LocationEndpoints]: return LocationEndpoints @classmethod - def true_(cls) -> Type[True_Endpoints]: + def true_(cls) -> type[True_Endpoints]: return True_Endpoints @classmethod - def naming(cls) -> Type[NamingEndpoints]: + def naming(cls) -> type[NamingEndpoints]: return NamingEndpoints @classmethod - def parameter_references(cls) -> Type[ParameterReferencesEndpoints]: + def parameter_references(cls) -> type[ParameterReferencesEndpoints]: return ParameterReferencesEndpoints + + @classmethod + def config(cls) -> type[ConfigEndpoints]: + return ConfigEndpoints diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/bodies/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/bodies/__init__.py index 92367f620..5ff7fceb8 100644 --- a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/bodies/__init__.py +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/bodies/__init__.py @@ -2,7 +2,7 @@ import types -from . import json_like, post_bodies_multiple +from . import json_like, optional_body, post_bodies_multiple, refs class BodiesEndpoints: @@ -19,3 +19,17 @@ def json_like(cls) -> types.ModuleType: A content type that works like json but isn't application/json """ return json_like + + @classmethod + def refs(cls) -> types.ModuleType: + """ + Test request body defined via ref + """ + return refs + + @classmethod + def optional_body(cls) -> types.ModuleType: + """ + Test optional request body + """ + return optional_body diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/config/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/config/__init__.py new file mode 100644 index 000000000..3e07e8d69 --- /dev/null +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/config/__init__.py @@ -0,0 +1,14 @@ +"""Contains methods for accessing the API Endpoints""" + +import types + +from . import content_type_override + + +class ConfigEndpoints: + @classmethod + def content_type_override(cls) -> types.ModuleType: + """ + Content Type Override + """ + return content_type_override diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/default/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/default/__init__.py index ab2d97db8..14b47083b 100644 --- a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/default/__init__.py +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/default/__init__.py @@ -2,7 +2,14 @@ import types -from . import get_common_parameters, post_common_parameters, reserved_parameters +from . import ( + get_common_parameters, + get_models_allof, + get_models_oneof_with_required_const, + post_common_parameters, + post_types_unions_duplicate_types, + reserved_parameters, +) class DefaultEndpoints: @@ -17,3 +24,15 @@ def post_common_parameters(cls) -> types.ModuleType: @classmethod def reserved_parameters(cls) -> types.ModuleType: return reserved_parameters + + @classmethod + def get_models_allof(cls) -> types.ModuleType: + return get_models_allof + + @classmethod + def get_models_oneof_with_required_const(cls) -> types.ModuleType: + return get_models_oneof_with_required_const + + @classmethod + def post_types_unions_duplicate_types(cls) -> types.ModuleType: + return post_types_unions_duplicate_types diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/responses/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/responses/__init__.py index 6000bd0e7..d1361e311 100644 --- a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/responses/__init__.py +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/responses/__init__.py @@ -2,7 +2,14 @@ import types -from . import post_responses_unions_simple_before_complex, text_response +from . import ( + default_status_code, + post_responses_unions_simple_before_complex, + reference_response, + status_code_patterns, + status_code_precedence, + text_response, +) class ResponsesEndpoints: @@ -19,3 +26,31 @@ def text_response(cls) -> types.ModuleType: Text Response """ return text_response + + @classmethod + def reference_response(cls) -> types.ModuleType: + """ + Endpoint using predefined response + """ + return reference_response + + @classmethod + def default_status_code(cls) -> types.ModuleType: + """ + Default Status Code Only + """ + return default_status_code + + @classmethod + def status_code_patterns(cls) -> types.ModuleType: + """ + Status Code Patterns + """ + return status_code_patterns + + @classmethod + def status_code_precedence(cls) -> types.ModuleType: + """ + Verify that specific status codes are always checked first, then ranges, then default + """ + return status_code_precedence diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tag2/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tag2/__init__.py new file mode 100644 index 000000000..65edddf25 --- /dev/null +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tag2/__init__.py @@ -0,0 +1,11 @@ +"""Contains methods for accessing the API Endpoints""" + +import types + +from . import get_tag_with_number + + +class Tag2Endpoints: + @classmethod + def get_tag_with_number(cls) -> types.ModuleType: + return get_tag_with_number diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tests/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tests/__init__.py index 1b91acc98..d7ef5cd7c 100644 --- a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tests/__init__.py +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/api/tests/__init__.py @@ -21,7 +21,6 @@ token_with_cookie_auth_token_with_cookie_get, unsupported_content_tests_unsupported_content_get, upload_file_tests_upload_post, - upload_multiple_files_tests_upload_post, ) @@ -82,13 +81,6 @@ def upload_file_tests_upload_post(cls) -> types.ModuleType: """ return upload_file_tests_upload_post - @classmethod - def upload_multiple_files_tests_upload_post(cls) -> types.ModuleType: - """ - Upload several files in the same request - """ - return upload_multiple_files_tests_upload_post - @classmethod def json_body_tests_json_body_post(cls) -> types.ModuleType: """ diff --git a/end_to_end_tests/custom-templates-golden-record/my_test_api_client/models/__init__.py b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/models/__init__.py new file mode 100644 index 000000000..611ed15f6 --- /dev/null +++ b/end_to_end_tests/custom-templates-golden-record/my_test_api_client/models/__init__.py @@ -0,0 +1,16 @@ +# Testing that we can access model-related information via Jinja variables. + +# To avoid having to update this file in the golden record every time the test specs are changed, +# we won't include all the classes in this output - we'll just look for one of them. + +# Using "alls" +# AModel + +# Using "imports" +# from .a_model import AModel + +# Using "openapi.models" +# AModel (a_model) + +# Using "openapi.enums" +# AnEnum (an_enum) diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/.gitignore b/end_to_end_tests/docstrings-on-attributes-golden-record/.gitignore new file mode 100644 index 000000000..79a2c3d73 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/.gitignore @@ -0,0 +1,23 @@ +__pycache__/ +build/ +dist/ +*.egg-info/ +.pytest_cache/ + +# pyenv +.python-version + +# Environments +.env +.venv + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# JetBrains +.idea/ + +/coverage.xml +/.coverage diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/README.md b/end_to_end_tests/docstrings-on-attributes-golden-record/README.md new file mode 100644 index 000000000..79b20f411 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/README.md @@ -0,0 +1,124 @@ +# my-test-api-client +A client library for accessing My Test API + +## Usage +First, create a client: + +```python +from my_test_api_client import Client + +client = Client(base_url="https://api.example.com") +``` + +If the endpoints you're going to hit require authentication, use `AuthenticatedClient` instead: + +```python +from my_test_api_client import AuthenticatedClient + +client = AuthenticatedClient(base_url="https://api.example.com", token="SuperSecretToken") +``` + +Now call your endpoint and use your models: + +```python +from my_test_api_client.models import MyDataModel +from my_test_api_client.api.my_tag import get_my_data_model +from my_test_api_client.types import Response + +with client as client: + my_data: MyDataModel = get_my_data_model.sync(client=client) + # or if you need more info (e.g. status_code) + response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) +``` + +Or do the same thing with an async version: + +```python +from my_test_api_client.models import MyDataModel +from my_test_api_client.api.my_tag import get_my_data_model +from my_test_api_client.types import Response + +async with client as client: + my_data: MyDataModel = await get_my_data_model.asyncio(client=client) + response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) +``` + +By default, when you're calling an HTTPS API it will attempt to verify that SSL is working correctly. Using certificate verification is highly recommended most of the time, but sometimes you may need to authenticate to a server (especially an internal server) using a custom certificate bundle. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl="/path/to/certificate_bundle.pem", +) +``` + +You can also disable certificate validation altogether, but beware that **this is a security risk**. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl=False +) +``` + +Things to know: +1. Every path/method combo becomes a Python module with four functions: + 1. `sync`: Blocking request that returns parsed data (if successful) or `None` + 1. `sync_detailed`: Blocking request that always returns a `Request`, optionally with `parsed` set if the request was successful. + 1. `asyncio`: Like `sync` but async instead of blocking + 1. `asyncio_detailed`: Like `sync_detailed` but async instead of blocking + +1. All path/query params, and bodies become method arguments. +1. If your endpoint had any tags on it, the first tag will be used as a module name for the function (my_tag above) +1. Any endpoint which did not have a tag will be in `my_test_api_client.api.default` + +## Advanced customizations + +There are more settings on the generated `Client` class which let you control more runtime behavior, check out the docstring on that class for more info. You can also customize the underlying `httpx.Client` or `httpx.AsyncClient` (depending on your use-case): + +```python +from my_test_api_client import Client + +def log_request(request): + print(f"Request event hook: {request.method} {request.url} - Waiting for response") + +def log_response(response): + request = response.request + print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}") + +client = Client( + base_url="https://api.example.com", + httpx_args={"event_hooks": {"request": [log_request], "response": [log_response]}}, +) + +# Or get the underlying httpx client to modify directly with client.get_httpx_client() or client.get_async_httpx_client() +``` + +You can even set the httpx client directly, but beware that this will override any existing settings (e.g., base_url): + +```python +import httpx +from my_test_api_client import Client + +client = Client( + base_url="https://api.example.com", +) +# Note that base_url needs to be re-set, as would any shared cookies, headers, etc. +client.set_httpx_client(httpx.Client(base_url="https://api.example.com", proxies="http://localhost:8030")) +``` + +## Building / publishing this package +This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: +1. Update the metadata in pyproject.toml (e.g. authors, version) +1. If you're using a private repository, configure it with Poetry + 1. `poetry config repositories. ` + 1. `poetry config http-basic. ` +1. Publish the client with `poetry publish --build -r ` or, if for public PyPI, just `poetry publish --build` + +If you want to install this client into another project without publishing it (e.g. for development) then: +1. If that project **is using Poetry**, you can simply do `poetry add ` from that project +1. If that project is not using Poetry: + 1. Build a wheel with `poetry build -f wheel` + 1. Install that wheel from the other project `pip install ` diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/__init__.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/__init__.py new file mode 100644 index 000000000..3747245da --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/__init__.py @@ -0,0 +1,8 @@ +"""A client library for accessing My Test API""" + +from .client import AuthenticatedClient, Client + +__all__ = ( + "AuthenticatedClient", + "Client", +) diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/api/__init__.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/api/__init__.py new file mode 100644 index 000000000..81f9fa241 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/api/__init__.py @@ -0,0 +1 @@ +"""Contains methods for accessing the API""" diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/client.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/client.py new file mode 100644 index 000000000..190510bac --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/client.py @@ -0,0 +1,260 @@ +import ssl +from typing import Any + +import httpx +from attrs import define, evolve, field + + +@define +class Client: + """A class for keeping track of data related to the API + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + """Whether or not to raise an errors.UnexpectedStatus if the API returns a status code that was not documented in the source OpenAPI document. Can also be provided as a keyword argument to the constructor.""" + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "Client": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "Client": + """Get a new client matching this one with a new timeout configuration""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "Client": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "Client": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": + """Manually set the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "Client": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) + + +@define +class AuthenticatedClient: + """A Client which has been authenticated for use on secured endpoints + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + """Whether or not to raise an errors.UnexpectedStatus if the API returns a status code that was not documented in the source OpenAPI document. Can also be provided as a keyword argument to the constructor.""" + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + token: str + """The token to use for authentication""" + prefix: str = "Bearer" + """The prefix to use for the Authorization header""" + auth_header_name: str = "Authorization" + """The name of the Authorization header""" + + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": + """Get a new client matching this one with a new timeout configuration""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "AuthenticatedClient": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": + """Manually set the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "AuthenticatedClient": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/errors.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/errors.py new file mode 100644 index 000000000..5f92e76ac --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/errors.py @@ -0,0 +1,16 @@ +"""Contains shared errors types that can be raised from API functions""" + + +class UnexpectedStatus(Exception): + """Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True""" + + def __init__(self, status_code: int, content: bytes): + self.status_code = status_code + self.content = content + + super().__init__( + f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}" + ) + + +__all__ = ["UnexpectedStatus"] diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/__init__.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/__init__.py new file mode 100644 index 000000000..3f5aca91f --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/__init__.py @@ -0,0 +1,9 @@ +"""Contains all the data models used in inputs/outputs""" + +from .model_with_description import ModelWithDescription +from .model_with_no_description import ModelWithNoDescription + +__all__ = ( + "ModelWithDescription", + "ModelWithNoDescription", +) diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_description.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_description.py new file mode 100644 index 000000000..b0be6a72c --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_description.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ModelWithDescription") + + +@_attrs_define +class ModelWithDescription: + """This is a nice model.""" + + prop_with_no_desc: str | Unset = UNSET + prop_with_desc: str | Unset = UNSET + """ This is a nice property. """ + prop_with_long_desc: str | Unset = UNSET + """ It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of + foolishness, + it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of + darkness, it was the spring of hope, it was the winter of despair. + """ + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + prop_with_no_desc = self.prop_with_no_desc + + prop_with_desc = self.prop_with_desc + + prop_with_long_desc = self.prop_with_long_desc + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if prop_with_no_desc is not UNSET: + field_dict["propWithNoDesc"] = prop_with_no_desc + if prop_with_desc is not UNSET: + field_dict["propWithDesc"] = prop_with_desc + if prop_with_long_desc is not UNSET: + field_dict["propWithLongDesc"] = prop_with_long_desc + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + prop_with_no_desc = d.pop("propWithNoDesc", UNSET) + + prop_with_desc = d.pop("propWithDesc", UNSET) + + prop_with_long_desc = d.pop("propWithLongDesc", UNSET) + + model_with_description = cls( + prop_with_no_desc=prop_with_no_desc, + prop_with_desc=prop_with_desc, + prop_with_long_desc=prop_with_long_desc, + ) + + model_with_description.additional_properties = d + return model_with_description + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_no_description.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_no_description.py new file mode 100644 index 000000000..0a020f657 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/models/model_with_no_description.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ModelWithNoDescription") + + +@_attrs_define +class ModelWithNoDescription: + prop_with_no_desc: str | Unset = UNSET + prop_with_desc: str | Unset = UNSET + """ This is a nice property. """ + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + prop_with_no_desc = self.prop_with_no_desc + + prop_with_desc = self.prop_with_desc + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if prop_with_no_desc is not UNSET: + field_dict["propWithNoDesc"] = prop_with_no_desc + if prop_with_desc is not UNSET: + field_dict["propWithDesc"] = prop_with_desc + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + prop_with_no_desc = d.pop("propWithNoDesc", UNSET) + + prop_with_desc = d.pop("propWithDesc", UNSET) + + model_with_no_description = cls( + prop_with_no_desc=prop_with_no_desc, + prop_with_desc=prop_with_desc, + ) + + model_with_no_description.additional_properties = d + return model_with_no_description + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/py.typed b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/py.typed new file mode 100644 index 000000000..1aad32711 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 \ No newline at end of file diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/types.py b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/types.py new file mode 100644 index 000000000..b64af0952 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/my_test_api_client/types.py @@ -0,0 +1,54 @@ +"""Contains some shared types for properties""" + +from collections.abc import Mapping, MutableMapping +from http import HTTPStatus +from typing import IO, BinaryIO, Generic, Literal, TypeVar + +from attrs import define + + +class Unset: + def __bool__(self) -> Literal[False]: + return False + + +UNSET: Unset = Unset() + +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] + + +@define +class File: + """Contains information for file uploads""" + + payload: BinaryIO + file_name: str | None = None + mime_type: str | None = None + + def to_tuple(self) -> FileTypes: + """Return a tuple representation that httpx will accept for multipart/form-data""" + return self.file_name, self.payload, self.mime_type + + +T = TypeVar("T") + + +@define +class Response(Generic[T]): + """A response from an endpoint""" + + status_code: HTTPStatus + content: bytes + headers: MutableMapping[str, str] + parsed: T | None + + +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/end_to_end_tests/docstrings-on-attributes-golden-record/pyproject.toml b/end_to_end_tests/docstrings-on-attributes-golden-record/pyproject.toml new file mode 100644 index 000000000..d3fd8e4d7 --- /dev/null +++ b/end_to_end_tests/docstrings-on-attributes-golden-record/pyproject.toml @@ -0,0 +1,26 @@ +[tool.poetry] +name = "my-test-api-client" +version = "0.1.0" +description = "A client library for accessing My Test API" +authors = [] +readme = "README.md" +packages = [ + { include = "my_test_api_client" }, +] +include = ["my_test_api_client/py.typed"] + +[tool.poetry.dependencies] +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" +python-dateutil = "^2.8.0" + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +select = ["F", "I", "UP"] diff --git a/end_to_end_tests/docstrings_on_attributes.config.yml b/end_to_end_tests/docstrings_on_attributes.config.yml new file mode 100644 index 000000000..0b21ad0b5 --- /dev/null +++ b/end_to_end_tests/docstrings_on_attributes.config.yml @@ -0,0 +1 @@ +docstrings_on_attributes: true diff --git a/end_to_end_tests/docstrings_on_attributes.yml b/end_to_end_tests/docstrings_on_attributes.yml new file mode 100644 index 000000000..22e6e227d --- /dev/null +++ b/end_to_end_tests/docstrings_on_attributes.yml @@ -0,0 +1,32 @@ +openapi: 3.1.0 +info: + title: My Test API + description: An API for testing docstrings_on_attributes behavior + version: 0.1.0 +paths: + {} +components: + schemas: + ModelWithDescription: + type: object + description: This is a nice model. + properties: + propWithNoDesc: + type: string + propWithDesc: + type: string + description: This is a nice property. + propWithLongDesc: + type: string + description: | + It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, + it was the epoch of belief, it was the epoch of incredulity, it was the season of light, it was the season of + darkness, it was the spring of hope, it was the winter of despair. + ModelWithNoDescription: + type: object + properties: + propWithNoDesc: + type: string + propWithDesc: + type: string + description: This is a nice property. diff --git a/end_to_end_tests/documents_with_errors/bad-status-code.yaml b/end_to_end_tests/documents_with_errors/bad-status-code.yaml new file mode 100644 index 000000000..17c3ab2cf --- /dev/null +++ b/end_to_end_tests/documents_with_errors/bad-status-code.yaml @@ -0,0 +1,14 @@ +openapi: "3.1.0" +info: + title: "There's something wrong with me" + version: "0.1.0" +paths: + "/": + get: + responses: + "abcdef": + description: "Successful Response" + content: + "application/json": + schema: + const: "Why have a fixed response? I dunno" diff --git a/end_to_end_tests/documents_with_errors/circular-body-ref.yaml b/end_to_end_tests/documents_with_errors/circular-body-ref.yaml new file mode 100644 index 000000000..98761a35d --- /dev/null +++ b/end_to_end_tests/documents_with_errors/circular-body-ref.yaml @@ -0,0 +1,20 @@ +openapi: "3.1.0" +info: + title: "Circular Body Ref" + version: "0.1.0" +paths: + /: + post: + requestBody: + $ref: "#/components/requestBodies/body" + responses: + "200": + description: "Successful Response" + content: + "application/json": + schema: + const: "Why have a fixed response? I dunno" +components: + requestBodies: + body: + $ref: "#/components/requestBodies/body" \ No newline at end of file diff --git a/end_to_end_tests/documents_with_errors/invalid-uuid-defaults.yaml b/end_to_end_tests/documents_with_errors/invalid-uuid-defaults.yaml new file mode 100644 index 000000000..dd768de4f --- /dev/null +++ b/end_to_end_tests/documents_with_errors/invalid-uuid-defaults.yaml @@ -0,0 +1,30 @@ +openapi: "3.1.0" +info: + title: "Circular Body Ref" + version: "0.1.0" +paths: + /: + post: + parameters: + - name: id + in: query + required: false + schema: + type: string + format: uuid + default: "notauuid" + responses: + "200": + description: "Successful Response" + put: + parameters: + - name: another_id + in: query + required: false + schema: + type: string + format: uuid + default: 3 + responses: + "200": + description: "Successful Response" \ No newline at end of file diff --git a/end_to_end_tests/documents_with_errors/missing-body-ref.yaml b/end_to_end_tests/documents_with_errors/missing-body-ref.yaml new file mode 100644 index 000000000..bf02ba6b1 --- /dev/null +++ b/end_to_end_tests/documents_with_errors/missing-body-ref.yaml @@ -0,0 +1,16 @@ +openapi: "3.1.0" +info: + title: "Trying to use a request body ref that does not exist" + version: "0.1.0" +paths: + /: + post: + requestBody: + $ref: "#/components/requestBodies/body" + responses: + "200": + description: "Successful Response" + content: + "application/json": + schema: + const: "Why have a fixed response? I dunno" \ No newline at end of file diff --git a/end_to_end_tests/documents_with_errors/optional-path-param.yaml b/end_to_end_tests/documents_with_errors/optional-path-param.yaml new file mode 100644 index 000000000..ccd0237db --- /dev/null +++ b/end_to_end_tests/documents_with_errors/optional-path-param.yaml @@ -0,0 +1,19 @@ +openapi: "3.1.0" +info: + title: "There's something wrong with me" + version: "0.1.0" +paths: + "/{optional}": + get: + parameters: + - in: "path" + name: "optional" + schema: + type: "string" + responses: + "200": + description: "Successful Response" + content: + "application/json": + schema: + const: "Why have a fixed response? I dunno" diff --git a/end_to_end_tests/functional_tests/README.md b/end_to_end_tests/functional_tests/README.md new file mode 100644 index 000000000..1008527c5 --- /dev/null +++ b/end_to_end_tests/functional_tests/README.md @@ -0,0 +1,75 @@ +## The `functional_tests` module + +These are end-to-end tests which run the client generator against many small API documents that are specific to various test cases. + +Rather than testing low-level implementation details (like the unit tests in `tests`), or making assertions about the exact content of the generated code (like the "golden record"-based end-to-end tests), these treat both the generator and the generated code as black boxes and make assertions about their behavior. + +The tests are in two submodules: + +# `generated_code_execution` + +These tests use valid API specs, and after running the generator, they _import and execute_ pieces of the generated code to verify that it actually works at runtime. + +Each test class follows this pattern: + +- Use the decorator `@with_generated_client_fixture`, providing an inline API spec (JSON or YAML) that contains whatever schemas/paths/etc. are relevant to this test class. + - The spec can omit the `openapi:`, `info:`, and `paths:`, blocks, unless those are relevant to the test. + - The decorator creates a temporary file for the inline spec and a temporary directory for the generated code, and runs the client generator. + - It creates a `GeneratedClientContext` object (defined in `end_to_end_test_helpers.py`) to keep track of things like the location of the generated code and the output of the generator command. + - This object is injected into the test class as a fixture called `generated_client`, although most tests will not need to reference the fixture directly. + - `sys.path` is temporarily changed, for the scope of this test class, to allow imports from the generated code. +- Use the decorator `@with_generated_code_imports` or `@with_generated_code_import` to make classes or functions from the generated code available to the tests. + - `@with_generated_code_imports(".models.MyModel1", ".models.MyModel2)` would execute `from [package name].models import MyModel1, MyModel2` and inject the imported classes into the test class as fixtures called `MyModel1` and `MyModel2`. + - `@with_generated_code_import(".api.my_operation.sync", alias="endpoint_method")` would execute `from [package name].api.my_operation import sync`, but the fixture would be named `endpoint_method`. + - After the test class finishes, these imports are discarded. + +Example: + +```python +@with_generated_client_fixture( +""" +components: + schemas: + MyModel: + type: object + properties: + stringProp: {"type": "string"} +""") +@with_generated_code_import(".models.MyModel") +class TestSimpleJsonObject: + def test_encoding(self, MyModel): + instance = MyModel(string_prop="abc") + assert instance.to_dict() == {"stringProp": "abc"} +``` + +# `generator_failure_cases` + +These run the generator with an invalid API spec and make assertions about the warning/error output. Some of these invalid conditions are expected to only produce warnings about the affected schemas, while others are expected to produce fatal errors that terminate the generator. + +For warning conditions, each test class uses `@with_generated_client_fixture` as above, then uses `assert_bad_schema` to parse the output and check for a specific warning message for a specific schema name. + +```python +@with_generated_client_fixture( +""" +components: + schemas: + MyModel: + # some kind of invalid schema +""") +class TestBadSchema: + def test_encoding(self, generated_client): + assert_bad_schema(generated_client, "MyModel", "some expected warning text") +``` + +Or, for fatal error conditions: + +- Call `inline_spec_should_fail`, providing an inline API spec (JSON or YAML). + +```python +class TestBadSpec: + def test_some_spec_error(self): + result = inline_spec_should_fail(""" +# some kind of invalid spec +""") + assert "some expected error text" in result.output +``` diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_arrays.py b/end_to_end_tests/functional_tests/generated_code_execution/test_arrays.py new file mode 100644 index 000000000..aaf33d906 --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_arrays.py @@ -0,0 +1,143 @@ +from end_to_end_tests.functional_tests.helpers import ( + assert_model_decode_encode, + assert_model_property_type_hint, + with_generated_client_fixture, + with_generated_code_imports, +) + + +@with_generated_client_fixture( +""" +components: + schemas: + SimpleObject: + type: object + properties: + name: {"type": "string"} + ModelWithArrayOfAny: + properties: + arrayProp: + type: array + items: {} + ModelWithArrayOfInts: + properties: + arrayProp: + type: array + items: {"type": "integer"} + ModelWithArrayOfObjects: + properties: + arrayProp: + type: array + items: {"$ref": "#/components/schemas/SimpleObject"} +""") +@with_generated_code_imports( + ".models.ModelWithArrayOfAny", + ".models.ModelWithArrayOfInts", + ".models.ModelWithArrayOfObjects", + ".models.SimpleObject", +) +class TestArraySchemas: + def test_array_of_any(self, ModelWithArrayOfAny): + assert_model_decode_encode( + ModelWithArrayOfAny, + {"arrayProp": ["a", 1]}, + ModelWithArrayOfAny(array_prop=["a", 1]), + ) + + def test_array_of_int(self, ModelWithArrayOfInts): + assert_model_decode_encode( + ModelWithArrayOfInts, + {"arrayProp": [1, 2]}, + ModelWithArrayOfInts(array_prop=[1, 2]), + ) + + def test_array_of_object(self, ModelWithArrayOfObjects, SimpleObject): + assert_model_decode_encode( + ModelWithArrayOfObjects, + {"arrayProp": [{"name": "a"}, {"name": "b"}]}, + ModelWithArrayOfObjects(array_prop=[SimpleObject(name="a"), SimpleObject(name="b")]), + ) + + def test_type_hints(self, ModelWithArrayOfAny, ModelWithArrayOfInts, ModelWithArrayOfObjects): + assert_model_property_type_hint(ModelWithArrayOfAny, "array_prop", "list[Any] | Unset") + assert_model_property_type_hint(ModelWithArrayOfInts, "array_prop", "list[int] | Unset") + assert_model_property_type_hint(ModelWithArrayOfObjects, "array_prop", "list[SimpleObject] | Unset") + + +@with_generated_client_fixture( +""" +components: + schemas: + SimpleObject: + type: object + properties: + name: {"type": "string"} + ModelWithSinglePrefixItem: + type: object + properties: + arrayProp: + type: array + prefixItems: + - type: string + ModelWithPrefixItems: + type: object + properties: + arrayProp: + type: array + prefixItems: + - $ref: "#/components/schemas/SimpleObject" + - type: string + ModelWithMixedItems: + type: object + properties: + arrayProp: + type: array + prefixItems: + - $ref: "#/components/schemas/SimpleObject" + items: + type: string +""") +@with_generated_code_imports( + ".models.ModelWithSinglePrefixItem", + ".models.ModelWithPrefixItems", + ".models.ModelWithMixedItems", + ".models.SimpleObject", +) +class TestArraysWithPrefixItems: + def test_single_prefix_item(self, ModelWithSinglePrefixItem): + assert_model_decode_encode( + ModelWithSinglePrefixItem, + {"arrayProp": ["a"]}, + ModelWithSinglePrefixItem(array_prop=["a"]), + ) + + def test_prefix_items(self, ModelWithPrefixItems, SimpleObject): + assert_model_decode_encode( + ModelWithPrefixItems, + {"arrayProp": [{"name": "a"}, "b"]}, + ModelWithPrefixItems(array_prop=[SimpleObject(name="a"), "b"]), + ) + + def test_prefix_items_and_regular_items(self, ModelWithMixedItems, SimpleObject): + assert_model_decode_encode( + ModelWithMixedItems, + {"arrayProp": [{"name": "a"}, "b"]}, + ModelWithMixedItems(array_prop=[SimpleObject(name="a"), "b"]), + ) + + def test_type_hints(self, ModelWithSinglePrefixItem, ModelWithPrefixItems, ModelWithMixedItems): + assert_model_property_type_hint(ModelWithSinglePrefixItem, "array_prop", "list[str] | Unset") + assert_model_property_type_hint( + ModelWithPrefixItems, + "array_prop", + "list[SimpleObject | str] | Unset", + ) + assert_model_property_type_hint( + ModelWithMixedItems, + "array_prop", + "list[SimpleObject | str] | Unset", + ) + # Note, this test is asserting the current behavior which, due to limitations of the implementation + # (see: https://github.com/openapi-generators/openapi-python-client/pull/1130), is not really doing + # tuple type validation-- the ordering of prefixItems is ignored, and instead all of the types are + # simply treated as a union. diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_defaults.py b/end_to_end_tests/functional_tests/generated_code_execution/test_defaults.py new file mode 100644 index 000000000..5f8affb25 --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_defaults.py @@ -0,0 +1,114 @@ +import datetime +import uuid + +from end_to_end_tests.functional_tests.helpers import ( + with_generated_client_fixture, + with_generated_code_imports, +) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyModel: + type: object + properties: + booleanProp: {"type": "boolean", "default": true} + stringProp: {"type": "string", "default": "a"} + numberProp: {"type": "number", "default": 1.5} + intProp: {"type": "integer", "default": 2} + dateProp: {"type": "string", "format": "date", "default": "2024-01-02"} + dateTimeProp: {"type": "string", "format": "date-time", "default": "2024-01-02T03:04:05Z"} + uuidProp: {"type": "string", "format": "uuid", "default": "07EF8B4D-AA09-4FFA-898D-C710796AFF41"} + anyPropWithString: {"default": "b"} + anyPropWithInt: {"default": 3} + booleanWithStringTrue1: {"type": "boolean", "default": "True"} + booleanWithStringTrue2: {"type": "boolean", "default": "true"} + booleanWithStringFalse1: {"type": "boolean", "default": "False"} + booleanWithStringFalse2: {"type": "boolean", "default": "false"} + intWithStringValue: {"type": "integer", "default": "4"} + numberWithIntValue: {"type": "number", "default": 5} + numberWithStringValue: {"type": "number", "default": "5.5"} + stringWithNumberValue: {"type": "string", "default": 6} + stringConst: {"type": "string", "const": "always", "default": "always"} + unionWithValidDefaultForType1: + anyOf: [{"type": "boolean"}, {"type": "integer"}] + default: true + unionWithValidDefaultForType2: + anyOf: [{"type": "boolean"}, {"type": "integer"}] + default: 3 +""") +@with_generated_code_imports(".models.MyModel") +class TestSimpleDefaults: + # Note, the null/None type is not covered here due to a known bug: + # https://github.com/openapi-generators/openapi-python-client/issues/1162 + def test_defaults_in_initializer(self, MyModel): + instance = MyModel() + assert instance == MyModel( + boolean_prop=True, + string_prop="a", + number_prop=1.5, + int_prop=2, + date_prop=datetime.date(2024, 1, 2), + date_time_prop=datetime.datetime(2024, 1, 2, 3, 4, 5, tzinfo=datetime.timezone.utc), + uuid_prop=uuid.UUID("07EF8B4D-AA09-4FFA-898D-C710796AFF41"), + any_prop_with_string="b", + any_prop_with_int=3, + boolean_with_string_true_1=True, + boolean_with_string_true_2=True, + boolean_with_string_false_1=False, + boolean_with_string_false_2=False, + int_with_string_value=4, + number_with_int_value=5, + number_with_string_value=5.5, + string_with_number_value="6", + string_const="always", + union_with_valid_default_for_type_1=True, + union_with_valid_default_for_type_2=3, + ) + + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "b"] + MyModel: + type: object + properties: + enumProp: + allOf: + - $ref: "#/components/schemas/MyEnum" + default: "a" + +""") +@with_generated_code_imports(".models.MyEnum", ".models.MyModel") +class TestEnumDefaults: + def test_enum_default(self, MyEnum, MyModel): + assert MyModel().enum_prop == MyEnum.A + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "A"] + MyModel: + properties: + enumProp: + allOf: + - $ref: "#/components/schemas/MyEnum" + default: A +""", + config="literal_enums: true", +) +@with_generated_code_imports(".models.MyModel") +class TestLiteralEnumDefaults: + def test_default_value(self, MyModel): + assert MyModel().enum_prop == "A" diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_docstrings.py b/end_to_end_tests/functional_tests/generated_code_execution/test_docstrings.py new file mode 100644 index 000000000..4ed00ac4a --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_docstrings.py @@ -0,0 +1,163 @@ +from typing import Any + +from end_to_end_tests.functional_tests.helpers import ( + with_generated_code_import, + with_generated_client_fixture, +) + + +class DocstringParser: + lines: list[str] + + def __init__(self, item: Any): + self.lines = [line.lstrip() for line in item.__doc__.split("\n")] + + def get_section(self, header_line: str) -> list[str]: + lines = self.lines[self.lines.index(header_line)+1:] + return lines[0:lines.index("")] + + +@with_generated_client_fixture( +""" +components: + schemas: + MyModel: + description: I like this type. + type: object + properties: + reqStr: + type: string + description: This is necessary. + optStr: + type: string + description: This isn't necessary. + undescribedProp: + type: string + required: ["reqStr", "undescribedProp"] +""") +@with_generated_code_import(".models.MyModel") +class TestSchemaDocstrings: + def test_model_description(self, MyModel): + assert DocstringParser(MyModel).lines[0] == "I like this type." + + def test_model_properties(self, MyModel): + assert set(DocstringParser(MyModel).get_section("Attributes:")) == { + "req_str (str): This is necessary.", + "opt_str (str | Unset): This isn't necessary.", + "undescribed_prop (str):", + } + + +@with_generated_client_fixture( +""" +tags: + - name: service1 +paths: + "/simple": + get: + operationId: getSimpleThing + description: Get a simple thing. + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + tags: + - service1 + post: + operationId: postSimpleThing + description: Post a simple thing. + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Thing" + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + "400": + description: Failure!! + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + tags: + - service1 + "/simple/{id}/{index}": + get: + operationId: getAttributeByIndex + description: Get a simple thing's attribute. + parameters: + - name: id + in: path + required: true + schema: + type: string + description: Which one. + - name: index + in: path + required: true + schema: + type: integer + - name: fries + in: query + required: false + schema: + type: boolean + description: Do you want fries with that? + responses: + "200": + description: Success! + content: + application/json: + schema: + $ref: "#/components/schemas/GoodResponse" + tags: + - service1 + +components: + schemas: + GoodResponse: + type: object + ErrorResponse: + type: object + Thing: + type: object + description: The thing. +""") +@with_generated_code_import(".api.service1.get_simple_thing.sync", alias="get_simple_thing_sync") +@with_generated_code_import(".api.service1.post_simple_thing.sync", alias="post_simple_thing_sync") +@with_generated_code_import(".api.service1.get_attribute_by_index.sync", alias="get_attribute_by_index_sync") +class TestEndpointDocstrings: + def test_description(self, get_simple_thing_sync): + assert DocstringParser(get_simple_thing_sync).lines[0] == "Get a simple thing." + + def test_response_single_type(self, get_simple_thing_sync): + assert DocstringParser(get_simple_thing_sync).get_section("Returns:") == [ + "GoodResponse", + ] + + def test_response_union_type(self, post_simple_thing_sync): + returns_line = DocstringParser(post_simple_thing_sync).get_section("Returns:")[0] + assert returns_line in ( + "GoodResponse | ErrorResponse", + "ErrorResponse | GoodResponse", + ) + + def test_request_body(self, post_simple_thing_sync): + assert DocstringParser(post_simple_thing_sync).get_section("Args:") == [ + "body (Thing | Unset): The thing." + ] + + def test_params(self, get_attribute_by_index_sync): + assert DocstringParser(get_attribute_by_index_sync).get_section("Args:") == [ + "id (str): Which one.", + "index (int):", + "fries (bool | Unset): Do you want fries with that?", + ] diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_enums_and_consts.py b/end_to_end_tests/functional_tests/generated_code_execution/test_enums_and_consts.py new file mode 100644 index 000000000..4bd0b2c81 --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_enums_and_consts.py @@ -0,0 +1,363 @@ +from typing import Literal +import pytest + +from end_to_end_tests.functional_tests.helpers import ( + assert_model_decode_encode, + assert_model_property_type_hint, + with_generated_client_fixture, + with_generated_code_imports, +) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "B", "a23", "123", "1bc", "a Thing WIth spaces", ""] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + inlineEnumProp: + type: string + enum: ["a", "b"] + MyModelWithRequired: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + required: ["enumProp"] +""") +@with_generated_code_imports( + ".models.MyEnum", + ".models.MyModel", + ".models.MyModelInlineEnumProp", + ".models.MyModelWithRequired", +) +class TestStringEnumClass: + @pytest.mark.parametrize( + "expected_name,expected_value", + [ + ("A", "a"), + ("B", "B"), + ("A23", "a23"), + ("VALUE_3", "123"), + ("VALUE_4", "1bc"), + ("A_THING_WITH_SPACES", "a Thing WIth spaces"), + ("VALUE_6", ""), + ], + ) + def test_enum_values(self, MyEnum, expected_name, expected_value): + assert getattr(MyEnum, expected_name) == MyEnum(expected_value) + + def test_enum_prop_in_object(self, MyEnum, MyModel, MyModelInlineEnumProp): + assert_model_decode_encode(MyModel, {"enumProp": "B"}, MyModel(enum_prop=MyEnum.B)) + assert_model_decode_encode( + MyModel, + {"inlineEnumProp": "a"}, + MyModel(inline_enum_prop=MyModelInlineEnumProp.A), + ) + + def test_type_hints(self, MyModel, MyModelWithRequired): + optional_type = "MyEnum | Unset" + assert_model_property_type_hint(MyModel,"enum_prop", optional_type) + assert_model_property_type_hint(MyModelWithRequired, "enum_prop", "MyEnum") + + def test_invalid_values(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": "c"}) + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": "A"}) + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": 2}) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: integer + enum: [2, 3, -4] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + inlineEnumProp: + type: string + enum: [2, 3] + MyModelWithRequired: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + required: ["enumProp"] +""") +@with_generated_code_imports( + ".models.MyEnum", + ".models.MyModel", + ".models.MyModelInlineEnumProp", + ".models.MyModelWithRequired", +) +class TestIntEnumClass: + @pytest.mark.parametrize( + "expected_name,expected_value", + [ + ("VALUE_2", 2), + ("VALUE_3", 3), + ("VALUE_NEGATIVE_4", -4), + ], + ) + def test_enum_values(self, MyEnum, expected_name, expected_value): + assert getattr(MyEnum, expected_name) == MyEnum(expected_value) + + def test_enum_prop_in_object(self, MyEnum, MyModel, MyModelInlineEnumProp): + assert_model_decode_encode(MyModel, {"enumProp": 2}, MyModel(enum_prop=MyEnum.VALUE_2)) + assert_model_decode_encode( + MyModel, + {"inlineEnumProp": 2}, + MyModel(inline_enum_prop=MyModelInlineEnumProp.VALUE_2), + ) + + def test_type_hints(self, MyModel, MyModelWithRequired): + optional_type = "MyEnum | Unset" + assert_model_property_type_hint(MyModel,"enum_prop", optional_type) + assert_model_property_type_hint(MyModelWithRequired, "enum_prop", "MyEnum") + + def test_invalid_values(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": 5}) + with pytest.raises(ValueError): + MyModel.from_dict({"enumProp": "a"}) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: integer + enum: [2, 3, -4] + x-enum-varnames: [ + "Two", + "Three", + "Negative Four" + ] +""") +@with_generated_code_imports( + ".models.MyEnum", +) +class TestIntEnumVarNameExtensions: + @pytest.mark.parametrize( + "expected_name,expected_value", + [ + ("TWO", 2), + ("THREE", 3), + ("NEGATIVE_FOUR", -4), + ], + ) + def test_enum_values(self, MyEnum, expected_name, expected_value): + assert getattr(MyEnum, expected_name) == MyEnum(expected_value) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "b"] + MyEnumIncludingNull: + type: ["string", "null"] + enum: ["a", "b", null] + MyNullOnlyEnum: + enum: [null] + MyModel: + properties: + nullableEnumProp: + oneOf: + - {"$ref": "#/components/schemas/MyEnum"} + - type: "null" + enumIncludingNullProp: {"$ref": "#/components/schemas/MyEnumIncludingNull"} + nullOnlyEnumProp: {"$ref": "#/components/schemas/MyNullOnlyEnum"} +""") +@with_generated_code_imports( + ".models.MyEnum", + ".models.MyEnumIncludingNullType1", # see comment in test_nullable_enum_prop + ".models.MyModel", +) +class TestNullableEnums: + def test_nullable_enum_prop(self, MyModel, MyEnum, MyEnumIncludingNullType1): + # Note, MyEnumIncludingNullType1 should be named just MyEnumIncludingNull - + # known bug: https://github.com/openapi-generators/openapi-python-client/issues/1120 + assert_model_decode_encode(MyModel, {"nullableEnumProp": "b"}, MyModel(nullable_enum_prop=MyEnum.B)) + assert_model_decode_encode(MyModel, {"nullableEnumProp": None}, MyModel(nullable_enum_prop=None)) + assert_model_decode_encode( + MyModel, + {"enumIncludingNullProp": "a"}, + MyModel(enum_including_null_prop=MyEnumIncludingNullType1.A), + ) + assert_model_decode_encode( MyModel, {"enumIncludingNullProp": None}, MyModel(enum_including_null_prop=None)) + assert_model_decode_encode(MyModel, {"nullOnlyEnumProp": None}, MyModel(null_only_enum_prop=None)) + + def test_type_hints(self, MyModel): + expected_type = "MyEnum | None | Unset" + assert_model_property_type_hint(MyModel, "nullable_enum_prop", expected_type) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyModel: + properties: + mustBeErnest: + const: Ernest + mustBeThirty: + const: 30 +""", +) +@with_generated_code_imports(".models.MyModel") +class TestConst: + def test_valid_string(self, MyModel): + assert_model_decode_encode( + MyModel, + {"mustBeErnest": "Ernest"}, + MyModel(must_be_ernest="Ernest"), + ) + + def test_valid_int(self, MyModel): + assert_model_decode_encode( + MyModel, + {"mustBeThirty": 30}, + MyModel(must_be_thirty=30), + ) + + def test_invalid_string(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"mustBeErnest": "Jack"}) + + def test_invalid_int(self, MyModel): + with pytest.raises(ValueError): + MyModel.from_dict({"mustBeThirty": 29}) + + +# The following tests of literal enums use basically the same specs as the tests above, but +# the "literal_enums" option is enabled in the test configuration. + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "A", "b"] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + inlineEnumProp: + type: string + enum: ["a", "b"] + MyModelWithRequired: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + required: ["enumProp"] +""", + config="literal_enums: true", +) +@with_generated_code_imports( + ".models.MyModel", + ".models.MyModelWithRequired", + ".models.MyEnum", +) +class TestStringLiteralEnum: + def test_enum_prop(self, MyModel): + assert_model_decode_encode(MyModel, {"enumProp": "a"}, MyModel(enum_prop="a")) + assert_model_decode_encode(MyModel, {"enumProp": "A"}, MyModel(enum_prop="A")) + assert_model_decode_encode(MyModel, {"inlineEnumProp": "a"}, MyModel(inline_enum_prop="a")) + + def test_type_hints(self, MyModel, MyModelWithRequired, MyEnum): + optional_type = "MyEnum | Unset" + assert_model_property_type_hint(MyModel, "enum_prop", optional_type) + assert_model_property_type_hint(MyModelWithRequired, "enum_prop", "MyEnum") + assert MyEnum == Literal["a", "A", "b"] + + def test_invalid_values(self, MyModel): + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": "c"}) + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": 2}) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: integer + enum: [2, 3, -4] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + inlineEnumProp: + type: string + enum: [2, 3] + MyModelWithRequired: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + required: ["enumProp"] +""", + config="literal_enums: true", +) +@with_generated_code_imports( + ".models.MyModel", + ".models.MyModelWithRequired", + ".models.MyEnum", +) +class TestIntLiteralEnum: + def test_enum_prop(self, MyModel): + assert_model_decode_encode(MyModel, {"enumProp": 2}, MyModel(enum_prop=2)) + assert_model_decode_encode(MyModel, {"enumProp": -4}, MyModel(enum_prop=-4)) + assert_model_decode_encode(MyModel, {"inlineEnumProp": 2}, MyModel(inline_enum_prop=2)) + + def test_type_hints(self, MyModel, MyModelWithRequired, MyEnum): + optional_type = "MyEnum | Unset" + assert_model_property_type_hint(MyModel, "enum_prop", optional_type) + assert_model_property_type_hint(MyModelWithRequired, "enum_prop", "MyEnum") + assert MyEnum == Literal[2, 3, -4] + + def test_invalid_values(self, MyModel): + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": 4}) + with pytest.raises(TypeError): + MyModel.from_dict({"enumProp": "a"}) + + +@with_generated_client_fixture( +""" +components: + schemas: + MyEnum: + type: string + enum: ["a", "A"] + MyEnumIncludingNull: + type: ["string", "null"] + enum: ["a", "b", null] + MyNullOnlyEnum: + enum: [null] + MyModel: + properties: + enumProp: {"$ref": "#/components/schemas/MyEnum"} + nullableEnumProp: + oneOf: + - {"$ref": "#/components/schemas/MyEnum"} + - type: "null" + enumIncludingNullProp: {"$ref": "#/components/schemas/MyEnumIncludingNull"} + nullOnlyEnumProp: {"$ref": "#/components/schemas/MyNullOnlyEnum"} +""", + config="literal_enums: true", +) +@with_generated_code_imports(".models.MyModel") +class TestNullableLiteralEnum: + def test_nullable_enum_prop(self, MyModel): + assert_model_decode_encode(MyModel, {"nullableEnumProp": "B"}, MyModel(nullable_enum_prop="B")) + assert_model_decode_encode(MyModel, {"nullableEnumProp": None}, MyModel(nullable_enum_prop=None)) + assert_model_decode_encode(MyModel, {"enumIncludingNullProp": "a"}, MyModel(enum_including_null_prop="a")) + assert_model_decode_encode(MyModel, {"enumIncludingNullProp": None}, MyModel(enum_including_null_prop=None)) + assert_model_decode_encode(MyModel, {"nullOnlyEnumProp": None}, MyModel(null_only_enum_prop=None)) diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_path_parameters.py b/end_to_end_tests/functional_tests/generated_code_execution/test_path_parameters.py new file mode 100644 index 000000000..694788d96 --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_path_parameters.py @@ -0,0 +1,151 @@ +from unittest.mock import MagicMock + +import httpx +import pytest + +from end_to_end_tests.functional_tests.helpers import ( + with_generated_client_fixture, + with_generated_code_import, +) + + +@with_generated_client_fixture( +""" +paths: + "/items/{item_id}/details/{detail_id}": + get: + operationId: getItemDetail + parameters: + - name: item_id + in: path + required: true + schema: + type: string + - name: detail_id + in: path + required: true + schema: + type: string + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + id: + type: string +""") +@with_generated_code_import(".api.default.get_item_detail.sync_detailed") +@with_generated_code_import(".client.Client") +class TestPathParameterEncoding: + """Test that path parameters are properly URL-encoded""" + + def test_path_params_with_normal_chars_work(self, sync_detailed, Client): + """Test that normal alphanumeric path parameters still work correctly""" + mock_httpx_client = MagicMock(spec=httpx.Client) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = {"id": "test"} + mock_response.content = b'{"id": "test"}' + mock_response.headers = {} + mock_httpx_client.request.return_value = mock_response + + client = Client(base_url="https://api.example.com") + client.set_httpx_client(mock_httpx_client) + + sync_detailed( + item_id="item123", + detail_id="detail456", + client=client, + ) + + mock_httpx_client.request.assert_called_once() + call_kwargs = mock_httpx_client.request.call_args[1] + + # Normal characters should remain unchanged + expected_url = "/items/item123/details/detail456" + assert call_kwargs["url"] == expected_url + + def test_path_params_with_reserved_chars_are_encoded(self, sync_detailed, Client): + """Test that path parameters with reserved characters are properly URL-encoded""" + # Create a mock httpx client + mock_httpx_client = MagicMock(spec=httpx.Client) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = {"id": "test"} + mock_response.content = b'{"id": "test"}' + mock_response.headers = {} + mock_httpx_client.request.return_value = mock_response + + # Create a client with the mock httpx client + client = Client(base_url="https://api.example.com") + client.set_httpx_client(mock_httpx_client) + + # Call the endpoint with path parameters containing reserved characters + sync_detailed( + item_id="item/with/slashes", + detail_id="detail?with=query&chars", + client=client, + ) + + # Verify the request was made with properly encoded URL + mock_httpx_client.request.assert_called_once() + call_kwargs = mock_httpx_client.request.call_args[1] + + # The URL should have encoded slashes and query characters + expected_url = "/items/item%2Fwith%2Fslashes/details/detail%3Fwith%3Dquery%26chars" + assert call_kwargs["url"] == expected_url + + def test_path_params_with_spaces_are_encoded(self, sync_detailed, Client): + """Test that path parameters with spaces are properly URL-encoded""" + mock_httpx_client = MagicMock(spec=httpx.Client) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = {"id": "test"} + mock_response.content = b'{"id": "test"}' + mock_response.headers = {} + mock_httpx_client.request.return_value = mock_response + + client = Client(base_url="https://api.example.com") + client.set_httpx_client(mock_httpx_client) + + sync_detailed( + item_id="item with spaces", + detail_id="detail with spaces", + client=client, + ) + + mock_httpx_client.request.assert_called_once() + call_kwargs = mock_httpx_client.request.call_args[1] + + # Spaces should be encoded as %20 + expected_url = "/items/item%20with%20spaces/details/detail%20with%20spaces" + assert call_kwargs["url"] == expected_url + + def test_path_params_with_hash_are_encoded(self, sync_detailed, Client): + """Test that path parameters with hash/fragment characters are properly URL-encoded""" + mock_httpx_client = MagicMock(spec=httpx.Client) + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = {"id": "test"} + mock_response.content = b'{"id": "test"}' + mock_response.headers = {} + mock_httpx_client.request.return_value = mock_response + + client = Client(base_url="https://api.example.com") + client.set_httpx_client(mock_httpx_client) + + sync_detailed( + item_id="item#1", + detail_id="detail#id", + client=client, + ) + + mock_httpx_client.request.assert_called_once() + call_kwargs = mock_httpx_client.request.call_args[1] + + # Hash should be encoded as %23 + expected_url = "/items/item%231/details/detail%23id" + assert call_kwargs["url"] == expected_url diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_properties.py b/end_to_end_tests/functional_tests/generated_code_execution/test_properties.py new file mode 100644 index 000000000..948293bba --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_properties.py @@ -0,0 +1,242 @@ +import datetime +import uuid +import pytest + +from end_to_end_tests.functional_tests.helpers import ( + assert_model_decode_encode, + assert_model_property_type_hint, + with_generated_client_fixture, + with_generated_code_imports, +) + + +@with_generated_client_fixture( + """ +components: + schemas: + MyModel: + type: object + properties: + req1: {"type": "string"} + req2: {"type": "string"} + opt: {"type": "string"} + required: ["req1", "req2"] + DerivedModel: + allOf: + - $ref: "#/components/schemas/MyModel" + - type: object + properties: + req3: {"type": "string"} + required: ["req3"] +""" +) +@with_generated_code_imports( + ".models.MyModel", + ".models.DerivedModel", + ".types.Unset", +) +class TestRequiredAndOptionalProperties: + def test_required_ok(self, MyModel, DerivedModel): + assert_model_decode_encode( + MyModel, + {"req1": "a", "req2": "b"}, + MyModel(req1="a", req2="b"), + ) + assert_model_decode_encode( + DerivedModel, + {"req1": "a", "req2": "b", "req3": "c"}, + DerivedModel(req1="a", req2="b", req3="c"), + ) + + def test_required_and_optional(self, MyModel, DerivedModel): + assert_model_decode_encode( + MyModel, + {"req1": "a", "req2": "b", "opt": "c"}, + MyModel(req1="a", req2="b", opt="c"), + ) + assert_model_decode_encode( + DerivedModel, + {"req1": "a", "req2": "b", "req3": "c", "opt": "d"}, + DerivedModel(req1="a", req2="b", req3="c", opt="d"), + ) + + def test_required_missing(self, MyModel, DerivedModel): + with pytest.raises(KeyError): + MyModel.from_dict({"req1": "a"}) + with pytest.raises(KeyError): + MyModel.from_dict({"req2": "b"}) + with pytest.raises(KeyError): + DerivedModel.from_dict({"req1": "a", "req2": "b"}) + + def test_type_hints(self, MyModel): + assert_model_property_type_hint(MyModel, "req1", "str") + assert_model_property_type_hint(MyModel, "opt", "str | Unset") + + +@with_generated_client_fixture( + """ +components: + schemas: + MyModel: + type: object + properties: + booleanProp: {"type": "boolean"} + stringProp: {"type": "string"} + numberProp: {"type": "number"} + intProp: {"type": "integer"} + anyObjectProp: {"$ref": "#/components/schemas/AnyObject"} + nullProp: {"type": "null"} + anyProp: {} + AnyObject: + type: object +""" +) +@with_generated_code_imports( + ".models.MyModel", + ".models.AnyObject", +) +class TestBasicModelProperties: + def test_decode_encode(self, MyModel, AnyObject): + json_data = { + "booleanProp": True, + "stringProp": "a", + "numberProp": 1.5, + "intProp": 2, + "anyObjectProp": {"d": 3}, + "nullProp": None, + "anyProp": "e", + } + expected_any_object = AnyObject() + expected_any_object.additional_properties = {"d": 3} + assert_model_decode_encode( + MyModel, + json_data, + MyModel( + boolean_prop=True, + string_prop="a", + number_prop=1.5, + int_prop=2, + any_object_prop=expected_any_object, + null_prop=None, + any_prop="e", + ), + ) + + @pytest.mark.parametrize( + "bad_data", + ["a", True, 2, None], + ) + def test_decode_error_not_object(self, bad_data, MyModel): + with pytest.raises(Exception): + # Exception is overly broad, but unfortunately in the current implementation, the error + # being raised is AttributeError (because it tries to call bad_data.copy()) which isn't + # very meaningful + MyModel.from_dict(bad_data) + + def test_type_hints(self, MyModel): + assert_model_property_type_hint(MyModel, "boolean_prop", "bool | Unset") + assert_model_property_type_hint(MyModel, "string_prop", "str | Unset") + assert_model_property_type_hint(MyModel, "number_prop", "float | Unset") + assert_model_property_type_hint(MyModel, "int_prop", "int | Unset") + assert_model_property_type_hint(MyModel, "any_object_prop", "AnyObject | Unset") + assert_model_property_type_hint(MyModel, "null_prop", "None | Unset") + assert_model_property_type_hint(MyModel, "any_prop", "Any | Unset") + + +@with_generated_client_fixture( + """ +components: + schemas: + MyModel: + type: object + properties: + dateProp: {"type": "string", "format": "date"} + dateTimeProp: {"type": "string", "format": "date-time"} + uuidProp: {"type": "string", "format": "uuid"} + unknownFormatProp: {"type": "string", "format": "weird"} +""" +) +@with_generated_code_imports( + ".models.MyModel", +) +class TestSpecialStringFormats: + def test_date(self, MyModel): + date_value = datetime.date.today() + json_data = {"dateProp": date_value.isoformat()} + assert_model_decode_encode(MyModel, json_data, MyModel(date_prop=date_value)) + + def test_date_time(self, MyModel): + date_time_value = datetime.datetime.now(datetime.timezone.utc) + json_data = {"dateTimeProp": date_time_value.isoformat()} + assert_model_decode_encode(MyModel, json_data, MyModel(date_time_prop=date_time_value)) + + def test_uuid(self, MyModel): + uuid_value = uuid.uuid1() + json_data = {"uuidProp": str(uuid_value)} + assert_model_decode_encode(MyModel, json_data, MyModel(uuid_prop=uuid_value)) + + def test_unknown_format(self, MyModel): + json_data = {"unknownFormatProp": "whatever"} + assert_model_decode_encode(MyModel, json_data, MyModel(unknown_format_prop="whatever")) + + def test_type_hints(self, MyModel): + assert_model_property_type_hint(MyModel, "date_prop", "datetime.date | Unset") + assert_model_property_type_hint(MyModel, "date_time_prop", "datetime.datetime | Unset") + assert_model_property_type_hint(MyModel, "uuid_prop", "UUID | Unset") + assert_model_property_type_hint(MyModel, "unknown_format_prop", "str | Unset") + + +@with_generated_client_fixture( + """ +components: + schemas: + MyModel: + type: object + properties: + booleanProp: {"type": "boolean"} + stringProp: {"type": "string"} + numberProp: {"type": "number"} + intProp: {"type": "integer"} + anyObjectProp: {"$ref": "#/components/schemas/AnyObject"} + nullProp: {"type": "null"} + anyProp: {} + AnyObject: + $ref: "#/components/schemas/OtherObject" + OtherObject: + $ref: "#/components/schemas/AnotherObject" + AnotherObject: + type: object + properties: + booleanProp: {"type": "boolean"} + +""" +) +@with_generated_code_imports( + ".models.MyModel", + ".models.AnyObject", + ".types.Unset", +) +class TestReferenceSchemaProperties: + def test_decode_encode(self, MyModel, AnyObject): + json_data = { + "booleanProp": True, + "stringProp": "a", + "numberProp": 1.5, + "intProp": 2, + "anyObjectProp": {"booleanProp": False}, + "nullProp": None, + "anyProp": "e", + } + assert_model_decode_encode( + MyModel, + json_data, + MyModel( + boolean_prop=True, + string_prop="a", + number_prop=1.5, + int_prop=2, + any_object_prop=AnyObject(boolean_prop=False), + null_prop=None, + any_prop="e", + ), + ) diff --git a/end_to_end_tests/functional_tests/generated_code_execution/test_unions.py b/end_to_end_tests/functional_tests/generated_code_execution/test_unions.py new file mode 100644 index 000000000..27dc7430e --- /dev/null +++ b/end_to_end_tests/functional_tests/generated_code_execution/test_unions.py @@ -0,0 +1,146 @@ +from end_to_end_tests.functional_tests.helpers import ( + assert_model_decode_encode, + assert_model_property_type_hint, + with_generated_client_fixture, + with_generated_code_imports, +) + + +@with_generated_client_fixture( +""" +components: + schemas: + StringOrInt: + type: ["string", "integer"] + MyModel: + type: object + properties: + stringOrIntProp: + type: ["string", "integer"] +""" +) +@with_generated_code_imports( + ".models.MyModel", +) +class TestSimpleTypeList: + def test_decode_encode(self, MyModel): + assert_model_decode_encode(MyModel, {"stringOrIntProp": "a"}, MyModel(string_or_int_prop="a")) + assert_model_decode_encode(MyModel, {"stringOrIntProp": 1}, MyModel(string_or_int_prop=1)) + + def test_type_hints(self, MyModel): + assert_model_property_type_hint(MyModel, "string_or_int_prop", "int | str | Unset") + + +@with_generated_client_fixture( +""" +components: + schemas: + ThingA: + type: object + properties: + propA: { type: "string" } + required: ["propA"] + ThingB: + type: object + properties: + propB: { type: "string" } + required: ["propB"] + ThingAOrB: + oneOf: + - $ref: "#/components/schemas/ThingA" + - $ref: "#/components/schemas/ThingB" + ModelWithUnion: + type: object + properties: + thing: {"$ref": "#/components/schemas/ThingAOrB"} + thingOrString: + oneOf: + - $ref: "#/components/schemas/ThingA" + - type: string + ModelWithRequiredUnion: + type: object + properties: + thing: {"$ref": "#/components/schemas/ThingAOrB"} + required: ["thing"] + ModelWithNestedUnion: + type: object + properties: + thingOrValue: + oneOf: + - "$ref": "#/components/schemas/ThingAOrB" + - oneOf: + - type: string + - type: number + ModelWithUnionOfOne: + type: object + properties: + thing: + oneOf: + - $ref: "#/components/schemas/ThingA" + requiredThing: + oneOf: + - $ref: "#/components/schemas/ThingA" + required: ["requiredThing"] +""") +@with_generated_code_imports( + ".models.ThingA", + ".models.ThingB", + ".models.ModelWithUnion", + ".models.ModelWithRequiredUnion", + ".models.ModelWithNestedUnion", + ".models.ModelWithUnionOfOne", +) +class TestOneOf: + def test_disambiguate_objects_via_required_properties(self, ThingA, ThingB, ModelWithUnion): + assert_model_decode_encode( + ModelWithUnion, + {"thing": {"propA": "x"}}, + ModelWithUnion(thing=ThingA(prop_a="x")), + ) + assert_model_decode_encode( + ModelWithUnion, + {"thing": {"propB": "x"}}, + ModelWithUnion(thing=ThingB(prop_b="x")), + ) + + def test_disambiguate_object_and_non_object(self, ThingA, ModelWithUnion): + assert_model_decode_encode( + ModelWithUnion, + {"thingOrString": {"propA": "x"}}, + ModelWithUnion(thing_or_string=ThingA(prop_a="x")), + ) + assert_model_decode_encode( + ModelWithUnion, + {"thingOrString": "x"}, + ModelWithUnion(thing_or_string="x"), + ) + + def test_disambiguate_nested_union(self, ThingA, ThingB, ModelWithNestedUnion): + assert_model_decode_encode( + ModelWithNestedUnion, + {"thingOrValue": {"propA": "x"}}, + ModelWithNestedUnion(thing_or_value=ThingA(prop_a="x")), + ) + assert_model_decode_encode( + ModelWithNestedUnion, + {"thingOrValue": 3}, + ModelWithNestedUnion(thing_or_value=3), + ) + + def test_type_hints(self, ModelWithUnion, ModelWithRequiredUnion, ModelWithUnionOfOne, ThingA): + assert_model_property_type_hint( + ModelWithUnion, + "thing", + "ThingA | ThingB | Unset", + ) + assert_model_property_type_hint( + ModelWithRequiredUnion, + "thing", + "ThingA | ThingB", + ) + assert_model_property_type_hint( + ModelWithUnionOfOne, "thing", "ThingA | Unset" + ) + assert_model_property_type_hint( + ModelWithUnionOfOne, "required_thing", "ThingA" + ) diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_arrays.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_arrays.py new file mode 100644 index 000000000..e4ef0cffd --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_arrays.py @@ -0,0 +1,23 @@ +import pytest + +from end_to_end_tests.functional_tests.helpers import assert_bad_schema, with_generated_client_fixture + + +@with_generated_client_fixture( +""" +components: + schemas: + ArrayWithNoItems: + type: array + ArrayWithInvalidItemsRef: + type: array + items: + $ref: "#/components/schemas/DoesntExist" +""" +) +class TestArrayInvalidSchemas: + def test_no_items(self, generated_client): + assert_bad_schema(generated_client, "ArrayWithNoItems", "must have items or prefixItems defined") + + def test_invalid_items_ref(self, generated_client): + assert_bad_schema(generated_client, "ArrayWithInvalidItemsRef", "invalid data in items of array") diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_defaults.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_defaults.py new file mode 100644 index 000000000..93f5e11d4 --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_defaults.py @@ -0,0 +1,88 @@ +import pytest + +from end_to_end_tests.functional_tests.helpers import assert_bad_schema, with_generated_client_fixture + + +@with_generated_client_fixture( +""" +components: + schemas: + WithBadBoolean: + properties: + badBoolean: {"type": "boolean", "default": "not a boolean"} + WithBadIntAsString: + properties: + badInt: {"type": "integer", "default": "not an int"} + WithBadIntAsOther: + properties: + badInt: {"type": "integer", "default": true} + WithBadFloatAsString: + properties: + badInt: {"type": "number", "default": "not a number"} + WithBadFloatAsOther: + properties: + badInt: {"type": "number", "default": true} + WithBadDateAsString: + properties: + badDate: {"type": "string", "format": "date", "default": "xxx"} + WithBadDateAsOther: + properties: + badDate: {"type": "string", "format": "date", "default": 3} + WithBadDateTimeAsString: + properties: + badDate: {"type": "string", "format": "date-time", "default": "xxx"} + WithBadDateTimeAsOther: + properties: + badDate: {"type": "string", "format": "date-time", "default": 3} + WithBadUuidAsString: + properties: + badUuid: {"type": "string", "format": "uuid", "default": "xxx"} + WithBadUuidAsOther: + properties: + badUuid: {"type": "string", "format": "uuid", "default": 3} + WithBadEnum: + properties: + badEnum: {"type": "string", "enum": ["a", "b"], "default": "x"} + GoodEnum: + type: string + enum: ["a", "b"] + OverriddenEnumWithBadDefault: + properties: + badEnum: + allOf: + - $ref: "#/components/schemas/GoodEnum" + default: "x" + UnionWithNoValidDefault: + properties: + badBoolOrInt: + anyOf: + - type: boolean + - type: integer + default: "xxx" +""" +) +class TestInvalidDefaultValues: + # Note, the null/None type, and binary strings (files), are not covered here due to a known bug: + # https://github.com/openapi-generators/openapi-python-client/issues/1162 + + @pytest.mark.parametrize( + ("model_name", "message"), + [ + ("WithBadBoolean", "Invalid boolean value"), + ("WithBadIntAsString", "Invalid int value"), + ("WithBadIntAsOther", "Invalid int value"), + ("WithBadFloatAsString", "Invalid float value"), + ("WithBadFloatAsOther", "Cannot convert True to a float"), + ("WithBadDateAsString", "Invalid date"), + ("WithBadDateAsOther", "Cannot convert 3 to a date"), + ("WithBadDateTimeAsString", "Invalid datetime"), + ("WithBadDateTimeAsOther", "Cannot convert 3 to a datetime"), + ("WithBadUuidAsString", "Invalid UUID value"), + ("WithBadUuidAsOther", "Invalid UUID value"), + ("WithBadEnum", "Value x is not valid for enum"), + ("OverriddenEnumWithBadDefault", "Value x is not valid for enum"), + ("UnionWithNoValidDefault", "Invalid int value"), + ] + ) + def test_bad_default_warning(self, model_name, message, generated_client): + assert_bad_schema(generated_client, model_name, message) diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_enums_and_consts.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_enums_and_consts.py new file mode 100644 index 000000000..7f1586f29 --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_enums_and_consts.py @@ -0,0 +1,128 @@ +from end_to_end_tests.functional_tests.helpers import ( + assert_bad_schema, + inline_spec_should_fail, + with_generated_client_fixture, +) + + +@with_generated_client_fixture( +""" +components: + schemas: + WithBadDefaultValue: + enum: ["A"] + default: "B" + WithBadDefaultType: + enum: ["A"] + default: 123 + WithMixedTypes: + enum: ["A", 1] + WithUnsupportedType: + enum: [1.4, 1.5] + DefaultNotMatchingConst: + const: "aaa" + default: "bbb" + WithConflictingInlineNames: + type: object + properties: + "12": + enum: ["a", "b"] + WithConflictingInlineNames1: + type: object + properties: + "2": + enum: ["c", "d"] +""" +) +class TestEnumAndConstInvalidSchemas: + def test_enum_bad_default_value(self, generated_client): + assert_bad_schema(generated_client, "WithBadDefaultValue", "Value B is not valid") + + def test_enum_bad_default_type(self, generated_client): + assert_bad_schema(generated_client, "WithBadDefaultType", "Cannot convert 123 to enum") + + def test_enum_mixed_types(self, generated_client): + assert_bad_schema(generated_client, "WithMixedTypes", "Enum values must all be the same type") + + def test_enum_unsupported_type(self, generated_client): + assert_bad_schema(generated_client, "WithUnsupportedType", "Unsupported enum type") + + def test_const_default_not_matching(self, generated_client): + assert_bad_schema(generated_client, "DefaultNotMatchingConst", "Invalid value for const") + + def test_conflicting_inline_class_names(self, generated_client): + assert "Found conflicting enums named WithConflictingInlineNames12 with incompatible values" in generated_client.generator_result.output + + def test_enum_duplicate_values(self): + # This one currently causes a full generator failure rather than a warning + result = inline_spec_should_fail( +""" +components: + schemas: + WithDuplicateValues: + enum: ["x", "x"] +""" + ) + assert "Duplicate key X in enum" in str(result.exception) + + +@with_generated_client_fixture( +""" +components: + schemas: + WithBadDefaultValue: + enum: ["A"] + default: "B" + WithBadDefaultType: + enum: ["A"] + default: 123 + WithMixedTypes: + enum: ["A", 1] + WithUnsupportedType: + enum: [1.4, 1.5] + DefaultNotMatchingConst: + const: "aaa" + default: "bbb" + WithConflictingInlineNames: + type: object + properties: + "12": + enum: ["a", "b"] + WithConflictingInlineNames1: + type: object + properties: + "2": + enum: ["c", "d"] +""", + config="literal_enums: true", +) +class TestLiteralEnumInvalidSchemas: + def test_literal_enum_bad_default_value(self, generated_client): + assert_bad_schema(generated_client, "WithBadDefaultValue", "Value B is not valid") + + def test_literal_enum_bad_default_type(self, generated_client): + assert_bad_schema(generated_client, "WithBadDefaultType", "Cannot convert 123 to enum") + + def test_literal_enum_mixed_types(self, generated_client): + assert_bad_schema(generated_client, "WithMixedTypes", "Enum values must all be the same type") + + def test_literal_enum_unsupported_type(self, generated_client): + assert_bad_schema(generated_client, "WithUnsupportedType", "Unsupported enum type") + + def test_const_default_not_matching(self, generated_client): + assert_bad_schema(generated_client, "DefaultNotMatchingConst", "Invalid value for const") + + def test_conflicting_inline_literal_enum_names(self, generated_client): + assert "Found conflicting enums named WithConflictingInlineNames12 with incompatible values" in generated_client.generator_result.output + + def test_literal_enum_duplicate_values(self): + # This one currently causes a full generator failure rather than a warning + result = inline_spec_should_fail( +""" +components: + schemas: + WithDuplicateValues: + enum: ["x", "x"] +""" + ) + assert "Duplicate key X in enum" in str(result.exception) diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_references.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_references.py new file mode 100644 index 000000000..9f609eef2 --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_references.py @@ -0,0 +1,29 @@ +import pytest + +from end_to_end_tests.functional_tests.helpers import assert_bad_schema, with_generated_client_fixture + + +@with_generated_client_fixture( + """ +components: + schemas: + MyModel: + type: object + properties: + booleanProp: {"type": "boolean"} + stringProp: {"type": "string"} + numberProp: {"type": "number"} + intProp: {"type": "integer"} + anyObjectProp: {"$ref": "#/components/schemas/AnyObject"} + nullProp: {"type": "null"} + anyProp: {} + AnyObject: + $ref: "#/components/schemas/OtherObject" + OtherObject: + $ref: "#/components/schemas/AnyObject" + +""" +) +class TestReferenceSchemaProperties: + def test_decode_encode(self, generated_client): + assert "Circular schema references found" in generated_client.generator_result.stderr diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_spec_format.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_spec_format.py new file mode 100644 index 000000000..2b0dfdda9 --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_spec_format.py @@ -0,0 +1,86 @@ +import pytest +from end_to_end_tests.functional_tests.helpers import ( + inline_spec_should_fail, +) + + +class TestInvalidSpecFormats: + @pytest.mark.parametrize( + ("filename_suffix", "content", "expected_error"), + ( + (".yaml", "not a valid openapi document", "Failed to parse OpenAPI document"), + (".json", "Invalid JSON", "Invalid JSON"), + (".yaml", "{", "Invalid YAML"), + ), + ids=("invalid_openapi", "invalid_json", "invalid_yaml"), + ) + def test_unparseable_file(self, filename_suffix, content, expected_error): + result = inline_spec_should_fail(content, filename_suffix=filename_suffix, add_missing_sections=False) + assert expected_error in result.output + + def test_missing_openapi_version(self): + result = inline_spec_should_fail( +""" +info: + title: My API + version: "1.0" +paths: {} +""", + add_missing_sections=False, + ) + for text in ["Failed to parse OpenAPI document", "1 validation error", "openapi"]: + assert text in result.output + + def test_missing_title(self): + result = inline_spec_should_fail( +""" +info: + version: "1.0" +openapi: "3.1.0" +paths: {} +""", + add_missing_sections=False, + ) + for text in ["Failed to parse OpenAPI document", "1 validation error", "title"]: + assert text in result.output + + def test_missing_version(self): + result = inline_spec_should_fail( +""" +info: + title: My API +openapi: "3.1.0" +paths: {} +""", + add_missing_sections=False, + ) + for text in ["Failed to parse OpenAPI document", "1 validation error", "version"]: + assert text in result.output + + def test_missing_paths(self): + result = inline_spec_should_fail( +""" +info: + title: My API + version: "1.0" +openapi: "3.1.0" +""", + add_missing_sections=False, + ) + for text in ["Failed to parse OpenAPI document", "1 validation error", "paths"]: + assert text in result.output + + def test_swagger_unsupported(self): + result = inline_spec_should_fail( +""" +swagger: "2.0" +info: + title: My API + version: "1.0" +openapi: "3.1" +paths: {} +components: {} +""", + add_missing_sections=False, + ) + assert "You may be trying to use a Swagger document; this is not supported by this project." in result.output diff --git a/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_unions.py b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_unions.py new file mode 100644 index 000000000..75621a094 --- /dev/null +++ b/end_to_end_tests/functional_tests/generator_failure_cases/test_invalid_unions.py @@ -0,0 +1,28 @@ +from end_to_end_tests.functional_tests.helpers import assert_bad_schema, with_generated_client_fixture + + +@with_generated_client_fixture( +""" +components: + schemas: + UnionWithInvalidReference: + anyOf: + - $ref: "#/components/schemas/DoesntExist" + UnionWithInvalidDefault: + type: ["number", "integer"] + default: aaa + UnionWithMalformedVariant: + anyOf: + - type: string + - type: array # invalid because no items +""" +) +class TestUnionInvalidSchemas: + def test_invalid_reference(self, generated_client): + assert_bad_schema(generated_client, "UnionWithInvalidReference", "Could not find reference") + + def test_invalid_default(self, generated_client): + assert_bad_schema(generated_client, "UnionWithInvalidDefault", "Invalid int value: aaa") + + def test_invalid_property(self, generated_client): + assert_bad_schema(generated_client, "UnionWithMalformedVariant", "Invalid property in union") diff --git a/end_to_end_tests/functional_tests/helpers.py b/end_to_end_tests/functional_tests/helpers.py new file mode 100644 index 000000000..54aefffe1 --- /dev/null +++ b/end_to_end_tests/functional_tests/helpers.py @@ -0,0 +1,134 @@ +from typing import Any, Dict +import re + +from click.testing import Result +import pytest + +from end_to_end_tests.generated_client import generate_client_from_inline_spec, GeneratedClientContext + + +def with_generated_client_fixture( + openapi_spec: str, + name: str="generated_client", + config: str="", + extra_args: list[str] = [], +): + """Decorator to apply to a test class to create a fixture inside it called 'generated_client'. + + The fixture value will be a GeneratedClientContext created by calling + generate_client_from_inline_spec(). + """ + def _decorator(cls): + def generated_client(self): + with generate_client_from_inline_spec(openapi_spec, extra_args=extra_args, config=config) as g: + print(g.generator_result.stdout) # so we'll see the output if a test failed + yield g + + setattr(cls, name, pytest.fixture(scope="class")(generated_client)) + return cls + + return _decorator + + +def with_generated_code_import(import_path: str, alias: str | None = None): + """Decorator to apply to a test class to create a fixture from a generated code import. + + The 'generated_client' fixture must also be present. + + If import_path is "a.b.c", then the fixture's value is equal to "from a.b import c", and + its name is "c" unless you specify a different name with the alias parameter. + """ + parts = import_path.split(".") + module_name = ".".join(parts[0:-1]) + import_name = parts[-1] + + def _decorator(cls): + nonlocal alias + + def _func(self, generated_client): + return generated_client.import_symbol(module_name, import_name) + + alias = alias or import_name + _func.__name__ = alias + setattr(cls, alias, pytest.fixture(scope="class")(_func)) + return cls + + return _decorator + + +def with_generated_code_imports(*import_paths: str): + def _decorator(cls): + decorated = cls + for import_path in import_paths: + decorated = with_generated_code_import(import_path)(decorated) + return decorated + + return _decorator + + +def assert_model_decode_encode(model_class: Any, json_data: dict, expected_instance: Any) -> None: + instance = model_class.from_dict(json_data) + assert instance == expected_instance + assert instance.to_dict() == json_data + + +def assert_model_property_type_hint(model_class: Any, name: str, expected_type_hint: Any) -> None: + assert model_class.__annotations__[name] == expected_type_hint + + +def inline_spec_should_fail( + openapi_spec: str, + extra_args: list[str] = [], + config: str = "", + filename_suffix: str = "", + add_missing_sections = True, +) -> Result: + """Asserts that the generator could not process the spec. + + Returns the command result, which could include stdout data or an exception. + """ + with generate_client_from_inline_spec( + openapi_spec, + extra_args, + config, + filename_suffix=filename_suffix, + add_missing_sections=add_missing_sections, + raise_on_error=False, + ) as generated_client: + assert generated_client.generator_result.exit_code != 0 + return generated_client.generator_result + + +def assert_bad_schema( + generated_client: GeneratedClientContext, + schema_name: str, + expected_message_str: str, +) -> None: + warnings = _GeneratorWarningsParser(generated_client) + assert schema_name in warnings.by_schema, f"Did not find warning for schema {schema_name} in output: {warnings.output}" + assert expected_message_str in warnings.by_schema[schema_name] + + +class _GeneratorWarningsParser: + output: str + by_schema: Dict[str, str] + + def __init__(self, generated_client: GeneratedClientContext) -> None: + """Runs the generator, asserts that it printed warnings, and parses the warnings.""" + + assert generated_client.generator_result.exit_code == 0 + output = generated_client.generator_result.stdout + generated_client.generator_result.stderr + assert "Warning(s) encountered while generating" in output + self.by_schema = {} + self.output = output + bad_schema_regex = "Unable to (parse|process) schema /components/schemas/(\\w*)" + last_name = "" + while True: + if not (match := re.search(bad_schema_regex, output)): + break + if last_name: + self.by_schema[last_name] = output[0:match.start()] + output = output[match.end():] + last_name = match.group(2) + if last_name: + self.by_schema[last_name] = output diff --git a/end_to_end_tests/generated_client.py b/end_to_end_tests/generated_client.py new file mode 100644 index 000000000..6aa81975d --- /dev/null +++ b/end_to_end_tests/generated_client.py @@ -0,0 +1,156 @@ +import importlib +import os +import re +import shutil +from pathlib import Path +import sys +import tempfile +from typing import Any + +from attrs import define +import pytest +from click.testing import Result +from typer.testing import CliRunner + +from openapi_python_client.cli import app + + +@define +class GeneratedClientContext: + """A context manager with helpers for tests that run against generated client code. + + On entering this context, sys.path is changed to include the root directory of the + generated code, so its modules can be imported. On exit, the original sys.path is + restored, and any modules that were loaded within the context are removed. + """ + + output_path: Path + generator_result: Result + base_module: str + monkeypatch: pytest.MonkeyPatch + old_modules: set[str] | None = None + + def __enter__(self) -> "GeneratedClientContext": + self.monkeypatch.syspath_prepend(self.output_path) + self.old_modules = set(sys.modules.keys()) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.monkeypatch.undo() + for module_name in set(sys.modules.keys()) - self.old_modules: + del sys.modules[module_name] + shutil.rmtree(self.output_path, ignore_errors=True) + + def import_module(self, module_path: str) -> Any: + """Attempt to import a module from the generated code.""" + return importlib.import_module(f"{self.base_module}{module_path}") + + def import_symbol(self, module_path: str, name: str) -> Any: + module = self.import_module(module_path) + try: + return getattr(module, name) + except AttributeError: + existing = ", ".join(name for name in dir(module) if not name.startswith("_")) + assert False, ( + f"Couldn't find import \"{name}\" in \"{self.base_module}{module_path}\".\n" + f"Available imports in that module are: {existing}\n" + f"Output from generator was: {self.generator_result.stdout}" + ) + +def _run_command( + command: str, + extra_args: list[str] | None = None, + openapi_document: str | None = None, + url: str | None = None, + config_path: Path | None = None, + raise_on_error: bool = True, +) -> Result: + """Generate a client from an OpenAPI document and return the result of the command.""" + runner = CliRunner() + if openapi_document is not None: + openapi_path = Path(__file__).parent / openapi_document + source_arg = f"--path={openapi_path}" + else: + source_arg = f"--url={url}" + config_path = config_path or (Path(__file__).parent / "config.yml") + args = [command, f"--config={config_path}", source_arg] + if extra_args: + args.extend(extra_args) + result = runner.invoke(app, args) + if result.exit_code != 0 and raise_on_error: + message = f"{result.stdout}\n{result.exception}" if result.exception else result.stdout + raise Exception(message) + return result + + +def generate_client( + openapi_document: str, + extra_args: list[str] = [], + output_path: str = "my-test-api-client", + base_module: str = "my_test_api_client", + specify_output_path_explicitly: bool = True, + overwrite: bool = True, + raise_on_error: bool = True, +) -> GeneratedClientContext: + """Run the generator and return a GeneratedClientContext for accessing the generated code.""" + full_output_path = Path.cwd() / output_path + if not overwrite: + shutil.rmtree(full_output_path, ignore_errors=True) + args = extra_args + if specify_output_path_explicitly: + args = [*args, "--output-path", str(full_output_path)] + if overwrite: + args = [*args, "--overwrite"] + generator_result = _run_command("generate", args, openapi_document, raise_on_error=raise_on_error) + return GeneratedClientContext( + full_output_path, + generator_result, + base_module, + pytest.MonkeyPatch(), + ) + + +def generate_client_from_inline_spec( + openapi_spec: str, + extra_args: list[str] = [], + config: str = "", + filename_suffix: str | None = None, + base_module: str = "testapi_client", + add_missing_sections = True, + raise_on_error: bool = True, +) -> GeneratedClientContext: + """Run the generator on a temporary file created with the specified contents. + + You can also optionally tell it to create a temporary config file. + """ + if add_missing_sections: + if not re.search("^openapi:", openapi_spec, re.MULTILINE): + openapi_spec += "\nopenapi: '3.1.0'\n" + if not re.search("^info:", openapi_spec, re.MULTILINE): + openapi_spec += "\ninfo: {'title': 'testapi', 'description': 'my test api', 'version': '0.0.1'}\n" + if not re.search("^paths:", openapi_spec, re.MULTILINE): + openapi_spec += "\npaths: {}\n" + + output_path = tempfile.mkdtemp() + file = tempfile.NamedTemporaryFile(suffix=filename_suffix, delete=False) + file.write(openapi_spec.encode('utf-8')) + file.close() + + if config: + config_file = tempfile.NamedTemporaryFile(delete=False) + config_file.write(config.encode('utf-8')) + config_file.close() + extra_args = [*extra_args, "--config", config_file.name] + + generated_client = generate_client( + file.name, + extra_args, + output_path, + base_module, + raise_on_error=raise_on_error, + ) + os.unlink(file.name) + if config: + os.unlink(config_file.name) + + return generated_client diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/json_like.py b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/json_like.py index 8eb5c516a..1a4fc2fd9 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/json_like.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/json_like.py @@ -1,44 +1,45 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx from ... import errors from ...client import AuthenticatedClient, Client from ...models.json_like_body import JsonLikeBody -from ...types import Response +from ...types import UNSET, Response, Unset def _get_kwargs( *, - body: JsonLikeBody, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + body: JsonLikeBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/bodies/json-like", } - _body = body.to_dict() + if not isinstance(body, Unset): + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/vnd+json" _kwargs["headers"] = headers return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,13 +50,13 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: JsonLikeBody, + client: AuthenticatedClient | Client, + body: JsonLikeBody | Unset = UNSET, ) -> Response[Any]: """A content type that works like json but isn't application/json Args: - body (JsonLikeBody): + body (JsonLikeBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -78,13 +79,13 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: JsonLikeBody, + client: AuthenticatedClient | Client, + body: JsonLikeBody | Unset = UNSET, ) -> Response[Any]: """A content type that works like json but isn't application/json Args: - body (JsonLikeBody): + body (JsonLikeBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/optional_body.py b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/optional_body.py new file mode 100644 index 000000000..8402cf086 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/optional_body.py @@ -0,0 +1,104 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.optional_body_body import OptionalBodyBody +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + body: OptionalBodyBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/bodies/optional", + } + + if not isinstance(body, Unset): + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + body: OptionalBodyBody | Unset = UNSET, +) -> Response[Any]: + """Test optional request body + + Args: + body (OptionalBodyBody | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + body: OptionalBodyBody | Unset = UNSET, +) -> Response[Any]: + """Test optional request body + + Args: + body (OptionalBodyBody | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/post_bodies_multiple.py b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/post_bodies_multiple.py index f71b1ef25..ca740ee45 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/post_bodies_multiple.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/post_bodies_multiple.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,60 +8,56 @@ from ...models.post_bodies_multiple_data_body import PostBodiesMultipleDataBody from ...models.post_bodies_multiple_files_body import PostBodiesMultipleFilesBody from ...models.post_bodies_multiple_json_body import PostBodiesMultipleJsonBody -from ...types import File, Response +from ...types import UNSET, File, Response, Unset def _get_kwargs( *, - body: Union[ - PostBodiesMultipleJsonBody, - File, - PostBodiesMultipleDataBody, - PostBodiesMultipleFilesBody, - ], -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} - - _kwargs: Dict[str, Any] = { + body: PostBodiesMultipleJsonBody | File | PostBodiesMultipleDataBody | PostBodiesMultipleFilesBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/bodies/multiple", } if isinstance(body, PostBodiesMultipleJsonBody): - _json_body = body.to_dict() + if not isinstance(body, Unset): + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _json_body headers["Content-Type"] = "application/json" if isinstance(body, File): - _content_body = body.payload + if not isinstance(body, Unset): + _kwargs["content"] = body.payload - _kwargs["content"] = _content_body headers["Content-Type"] = "application/octet-stream" if isinstance(body, PostBodiesMultipleDataBody): - _data_body = body.to_dict() + if not isinstance(body, Unset): + _kwargs["data"] = body.to_dict() - _kwargs["data"] = _data_body headers["Content-Type"] = "application/x-www-form-urlencoded" if isinstance(body, PostBodiesMultipleFilesBody): - _files_body = body.to_multipart() + if not isinstance(body, Unset): + _kwargs["files"] = body.to_multipart() - _kwargs["files"] = _files_body headers["Content-Type"] = "multipart/form-data" _kwargs["headers"] = headers return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -72,21 +68,16 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: Union[ - PostBodiesMultipleJsonBody, - File, - PostBodiesMultipleDataBody, - PostBodiesMultipleFilesBody, - ], + client: AuthenticatedClient | Client, + body: PostBodiesMultipleJsonBody | File | PostBodiesMultipleDataBody | PostBodiesMultipleFilesBody | Unset = UNSET, ) -> Response[Any]: """Test multiple bodies Args: - body (PostBodiesMultipleJsonBody): - body (File): - body (PostBodiesMultipleDataBody): - body (PostBodiesMultipleFilesBody): + body (PostBodiesMultipleJsonBody | Unset): + body (File | Unset): + body (PostBodiesMultipleDataBody | Unset): + body (PostBodiesMultipleFilesBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -109,21 +100,16 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: Union[ - PostBodiesMultipleJsonBody, - File, - PostBodiesMultipleDataBody, - PostBodiesMultipleFilesBody, - ], + client: AuthenticatedClient | Client, + body: PostBodiesMultipleJsonBody | File | PostBodiesMultipleDataBody | PostBodiesMultipleFilesBody | Unset = UNSET, ) -> Response[Any]: """Test multiple bodies Args: - body (PostBodiesMultipleJsonBody): - body (File): - body (PostBodiesMultipleDataBody): - body (PostBodiesMultipleFilesBody): + body (PostBodiesMultipleJsonBody | Unset): + body (File | Unset): + body (PostBodiesMultipleDataBody | Unset): + body (PostBodiesMultipleFilesBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/bodies/refs.py b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/refs.py new file mode 100644 index 000000000..2e224bc8c --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/bodies/refs.py @@ -0,0 +1,104 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.a_model import AModel +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + body: AModel | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/bodies/refs", + } + + if not isinstance(body, Unset): + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> Response[Any]: + """Test request body defined via ref + + Args: + body (AModel | Unset): A Model for testing all the ways custom objects can be used + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> Response[Any]: + """Test request body defined via ref + + Args: + body (AModel | Unset): A Model for testing all the ways custom objects can be used + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/config/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/config/__init__.py new file mode 100644 index 000000000..2d7c0b23d --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/config/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/config/content_type_override.py b/end_to_end_tests/golden-record/my_test_api_client/api/config/content_type_override.py new file mode 100644 index 000000000..be06459be --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/config/content_type_override.py @@ -0,0 +1,154 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + body: str | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/config/content-type-override", + } + + if not isinstance(body, Unset): + _kwargs["json"] = body + + headers["Content-Type"] = "openapi/python/client" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> str | None: + if response.status_code == 200: + response_200 = cast(str, response.json()) + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[str]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + body: str | Unset = UNSET, +) -> Response[str]: + """Content Type Override + + Args: + body (str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, + body: str | Unset = UNSET, +) -> str | None: + """Content Type Override + + Args: + body (str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return sync_detailed( + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + body: str | Unset = UNSET, +) -> Response[str]: + """Content Type Override + + Args: + body (str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, + body: str | Unset = UNSET, +) -> str | None: + """Content Type Override + + Args: + body (str | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return ( + await asyncio_detailed( + client=client, + body=body, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/default/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/get_common_parameters.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_common_parameters.py index f782162b1..1fe9e78a0 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/default/get_common_parameters.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_common_parameters.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -10,15 +10,15 @@ def _get_kwargs( *, - common: Union[Unset, str] = UNSET, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + common: str | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} params["common"] = common params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/common_parameters", "params": params, @@ -27,16 +27,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -47,12 +48,12 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - common: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + common: str | Unset = UNSET, ) -> Response[Any]: """ Args: - common (Union[Unset, str]): + common (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -75,12 +76,12 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - common: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + common: str | Unset = UNSET, ) -> Response[Any]: """ Args: - common (Union[Unset, str]): + common (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_allof.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_allof.py new file mode 100644 index 000000000..073ce1e6b --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_allof.py @@ -0,0 +1,123 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_models_allof_response_200 import GetModelsAllofResponse200 +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/models/allof", + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> GetModelsAllofResponse200 | None: + if response.status_code == 200: + response_200 = GetModelsAllofResponse200.from_dict(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[GetModelsAllofResponse200]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[GetModelsAllofResponse200]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetModelsAllofResponse200] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> GetModelsAllofResponse200 | None: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetModelsAllofResponse200 + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[GetModelsAllofResponse200]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetModelsAllofResponse200] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> GetModelsAllofResponse200 | None: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetModelsAllofResponse200 + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_oneof_with_required_const.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_oneof_with_required_const.py new file mode 100644 index 000000000..3401cf323 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/get_models_oneof_with_required_const.py @@ -0,0 +1,146 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_models_oneof_with_required_const_response_200_type_0 import ( + GetModelsOneofWithRequiredConstResponse200Type0, +) +from ...models.get_models_oneof_with_required_const_response_200_type_1 import ( + GetModelsOneofWithRequiredConstResponse200Type1, +) +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/models/oneof-with-required-const", + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1 | None: + if response.status_code == 200: + + def _parse_response_200( + data: object, + ) -> GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1: + try: + if not isinstance(data, dict): + raise TypeError() + response_200_type_0 = GetModelsOneofWithRequiredConstResponse200Type0.from_dict(data) + + return response_200_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + if not isinstance(data, dict): + raise TypeError() + response_200_type_1 = GetModelsOneofWithRequiredConstResponse200Type1.from_dict(data) + + return response_200_type_1 + + response_200 = _parse_response_200(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1 | None: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1 + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1 | None: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetModelsOneofWithRequiredConstResponse200Type0 | GetModelsOneofWithRequiredConstResponse200Type1 + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py index 6cfc13bd4..14bab0778 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/post_common_parameters.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -10,15 +10,15 @@ def _get_kwargs( *, - common: Union[Unset, str] = UNSET, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + common: str | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} params["common"] = common params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/common_parameters", "params": params, @@ -27,16 +27,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -47,12 +48,12 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - common: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + common: str | Unset = UNSET, ) -> Response[Any]: """ Args: - common (Union[Unset, str]): + common (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -75,12 +76,12 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - common: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + common: str | Unset = UNSET, ) -> Response[Any]: """ Args: - common (Union[Unset, str]): + common (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/post_types_unions_duplicate_types.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/post_types_unions_duplicate_types.py new file mode 100644 index 000000000..f6eabc4a7 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/post_types_unions_duplicate_types.py @@ -0,0 +1,160 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.a_model import AModel +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + body: AModel | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/types/unions/duplicate-types", + } + + if isinstance(body, AModel): + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> AModel | None: + if response.status_code == 200: + + def _parse_response_200(data: object) -> AModel: + if not isinstance(data, dict): + raise TypeError() + response_200_type_0 = AModel.from_dict(data) + + return response_200_type_0 + + response_200 = _parse_response_200(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[AModel]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> Response[AModel]: + """ + Args: + body (AModel | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AModel] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> AModel | None: + """ + Args: + body (AModel | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AModel + """ + + return sync_detailed( + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> Response[AModel]: + """ + Args: + body (AModel | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AModel] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, + body: AModel | Unset = UNSET, +) -> AModel | None: + """ + Args: + body (AModel | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AModel + """ + + return ( + await asyncio_detailed( + client=client, + body=body, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/default/reserved_parameters.py b/end_to_end_tests/golden-record/my_test_api_client/api/default/reserved_parameters.py index 9033321c7..990c945b0 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/default/reserved_parameters.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/default/reserved_parameters.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,8 +12,8 @@ def _get_kwargs( *, client_query: str, url_query: str, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} params["client"] = client_query @@ -21,7 +21,7 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/naming/reserved-parameters", "params": params, @@ -30,16 +30,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -50,7 +51,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, client_query: str, url_query: str, ) -> Response[Any]: @@ -81,7 +82,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, client_query: str, url_query: str, ) -> Response[Any]: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/defaults/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/defaults/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/defaults/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/defaults/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/defaults/defaults_tests_defaults_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/defaults/defaults_tests_defaults_post.py index a14cb2670..6cadc465a 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/defaults/defaults_tests_defaults_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/defaults/defaults_tests_defaults_post.py @@ -1,6 +1,6 @@ import datetime from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any import httpx from dateutil.parser import isoparse @@ -19,16 +19,17 @@ def _get_kwargs( string_with_num: str = "1", date_prop: datetime.date = isoparse("1010-10-10").date(), float_prop: float = 3.14, + float_with_int: float = 3.0, int_prop: int = 7, boolean_prop: bool = False, - list_prop: List[AnEnum], - union_prop: Union[float, str] = "not a float", - union_prop_with_ref: Union[AnEnum, Unset, float] = 0.6, + list_prop: list[AnEnum], + union_prop: float | str = "not a float", + union_prop_with_ref: AnEnum | float | Unset = 0.6, enum_prop: AnEnum, - model_prop: "ModelWithUnionProperty", - required_model_prop: "ModelWithUnionProperty", -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + model_prop: ModelWithUnionProperty, + required_model_prop: ModelWithUnionProperty, +) -> dict[str, Any]: + params: dict[str, Any] = {} params["string_prop"] = string_prop @@ -39,6 +40,8 @@ def _get_kwargs( params["float_prop"] = float_prop + params["float_with_int"] = float_with_int + params["int_prop"] = int_prop params["boolean_prop"] = boolean_prop @@ -50,11 +53,11 @@ def _get_kwargs( params["list_prop"] = json_list_prop - json_union_prop: Union[float, str] + json_union_prop: float | str json_union_prop = union_prop params["union_prop"] = json_union_prop - json_union_prop_with_ref: Union[Unset, float, str] + json_union_prop_with_ref: float | str | Unset if isinstance(union_prop_with_ref, Unset): json_union_prop_with_ref = UNSET elif isinstance(union_prop_with_ref, AnEnum): @@ -74,7 +77,7 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/defaults", "params": params, @@ -84,15 +87,17 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 200: response_200 = response.json() return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -100,8 +105,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -112,20 +117,21 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, string_prop: str = "the default string", string_with_num: str = "1", date_prop: datetime.date = isoparse("1010-10-10").date(), float_prop: float = 3.14, + float_with_int: float = 3.0, int_prop: int = 7, boolean_prop: bool = False, - list_prop: List[AnEnum], - union_prop: Union[float, str] = "not a float", - union_prop_with_ref: Union[AnEnum, Unset, float] = 0.6, + list_prop: list[AnEnum], + union_prop: float | str = "not a float", + union_prop_with_ref: AnEnum | float | Unset = 0.6, enum_prop: AnEnum, - model_prop: "ModelWithUnionProperty", - required_model_prop: "ModelWithUnionProperty", -) -> Response[Union[Any, HTTPValidationError]]: + model_prop: ModelWithUnionProperty, + required_model_prop: ModelWithUnionProperty, +) -> Response[Any | HTTPValidationError]: """Defaults Args: @@ -133,11 +139,12 @@ def sync_detailed( string_with_num (str): Default: '1'. date_prop (datetime.date): Default: isoparse('1010-10-10').date(). float_prop (float): Default: 3.14. + float_with_int (float): Default: 3.0. int_prop (int): Default: 7. boolean_prop (bool): Default: False. - list_prop (List[AnEnum]): - union_prop (Union[float, str]): Default: 'not a float'. - union_prop_with_ref (Union[AnEnum, Unset, float]): Default: 0.6. + list_prop (list[AnEnum]): + union_prop (float | str): Default: 'not a float'. + union_prop_with_ref (AnEnum | float | Unset): Default: 0.6. enum_prop (AnEnum): For testing Enums in all the ways they can be used model_prop (ModelWithUnionProperty): required_model_prop (ModelWithUnionProperty): @@ -147,7 +154,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -155,6 +162,7 @@ def sync_detailed( string_with_num=string_with_num, date_prop=date_prop, float_prop=float_prop, + float_with_int=float_with_int, int_prop=int_prop, boolean_prop=boolean_prop, list_prop=list_prop, @@ -174,20 +182,21 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, string_prop: str = "the default string", string_with_num: str = "1", date_prop: datetime.date = isoparse("1010-10-10").date(), float_prop: float = 3.14, + float_with_int: float = 3.0, int_prop: int = 7, boolean_prop: bool = False, - list_prop: List[AnEnum], - union_prop: Union[float, str] = "not a float", - union_prop_with_ref: Union[AnEnum, Unset, float] = 0.6, + list_prop: list[AnEnum], + union_prop: float | str = "not a float", + union_prop_with_ref: AnEnum | float | Unset = 0.6, enum_prop: AnEnum, - model_prop: "ModelWithUnionProperty", - required_model_prop: "ModelWithUnionProperty", -) -> Optional[Union[Any, HTTPValidationError]]: + model_prop: ModelWithUnionProperty, + required_model_prop: ModelWithUnionProperty, +) -> Any | HTTPValidationError | None: """Defaults Args: @@ -195,11 +204,12 @@ def sync( string_with_num (str): Default: '1'. date_prop (datetime.date): Default: isoparse('1010-10-10').date(). float_prop (float): Default: 3.14. + float_with_int (float): Default: 3.0. int_prop (int): Default: 7. boolean_prop (bool): Default: False. - list_prop (List[AnEnum]): - union_prop (Union[float, str]): Default: 'not a float'. - union_prop_with_ref (Union[AnEnum, Unset, float]): Default: 0.6. + list_prop (list[AnEnum]): + union_prop (float | str): Default: 'not a float'. + union_prop_with_ref (AnEnum | float | Unset): Default: 0.6. enum_prop (AnEnum): For testing Enums in all the ways they can be used model_prop (ModelWithUnionProperty): required_model_prop (ModelWithUnionProperty): @@ -209,7 +219,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -218,6 +228,7 @@ def sync( string_with_num=string_with_num, date_prop=date_prop, float_prop=float_prop, + float_with_int=float_with_int, int_prop=int_prop, boolean_prop=boolean_prop, list_prop=list_prop, @@ -231,20 +242,21 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, string_prop: str = "the default string", string_with_num: str = "1", date_prop: datetime.date = isoparse("1010-10-10").date(), float_prop: float = 3.14, + float_with_int: float = 3.0, int_prop: int = 7, boolean_prop: bool = False, - list_prop: List[AnEnum], - union_prop: Union[float, str] = "not a float", - union_prop_with_ref: Union[AnEnum, Unset, float] = 0.6, + list_prop: list[AnEnum], + union_prop: float | str = "not a float", + union_prop_with_ref: AnEnum | float | Unset = 0.6, enum_prop: AnEnum, - model_prop: "ModelWithUnionProperty", - required_model_prop: "ModelWithUnionProperty", -) -> Response[Union[Any, HTTPValidationError]]: + model_prop: ModelWithUnionProperty, + required_model_prop: ModelWithUnionProperty, +) -> Response[Any | HTTPValidationError]: """Defaults Args: @@ -252,11 +264,12 @@ async def asyncio_detailed( string_with_num (str): Default: '1'. date_prop (datetime.date): Default: isoparse('1010-10-10').date(). float_prop (float): Default: 3.14. + float_with_int (float): Default: 3.0. int_prop (int): Default: 7. boolean_prop (bool): Default: False. - list_prop (List[AnEnum]): - union_prop (Union[float, str]): Default: 'not a float'. - union_prop_with_ref (Union[AnEnum, Unset, float]): Default: 0.6. + list_prop (list[AnEnum]): + union_prop (float | str): Default: 'not a float'. + union_prop_with_ref (AnEnum | float | Unset): Default: 0.6. enum_prop (AnEnum): For testing Enums in all the ways they can be used model_prop (ModelWithUnionProperty): required_model_prop (ModelWithUnionProperty): @@ -266,7 +279,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -274,6 +287,7 @@ async def asyncio_detailed( string_with_num=string_with_num, date_prop=date_prop, float_prop=float_prop, + float_with_int=float_with_int, int_prop=int_prop, boolean_prop=boolean_prop, list_prop=list_prop, @@ -291,20 +305,21 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, string_prop: str = "the default string", string_with_num: str = "1", date_prop: datetime.date = isoparse("1010-10-10").date(), float_prop: float = 3.14, + float_with_int: float = 3.0, int_prop: int = 7, boolean_prop: bool = False, - list_prop: List[AnEnum], - union_prop: Union[float, str] = "not a float", - union_prop_with_ref: Union[AnEnum, Unset, float] = 0.6, + list_prop: list[AnEnum], + union_prop: float | str = "not a float", + union_prop_with_ref: AnEnum | float | Unset = 0.6, enum_prop: AnEnum, - model_prop: "ModelWithUnionProperty", - required_model_prop: "ModelWithUnionProperty", -) -> Optional[Union[Any, HTTPValidationError]]: + model_prop: ModelWithUnionProperty, + required_model_prop: ModelWithUnionProperty, +) -> Any | HTTPValidationError | None: """Defaults Args: @@ -312,11 +327,12 @@ async def asyncio( string_with_num (str): Default: '1'. date_prop (datetime.date): Default: isoparse('1010-10-10').date(). float_prop (float): Default: 3.14. + float_with_int (float): Default: 3.0. int_prop (int): Default: 7. boolean_prop (bool): Default: False. - list_prop (List[AnEnum]): - union_prop (Union[float, str]): Default: 'not a float'. - union_prop_with_ref (Union[AnEnum, Unset, float]): Default: 0.6. + list_prop (list[AnEnum]): + union_prop (float | str): Default: 'not a float'. + union_prop_with_ref (AnEnum | float | Unset): Default: 0.6. enum_prop (AnEnum): For testing Enums in all the ways they can be used model_prop (ModelWithUnionProperty): required_model_prop (ModelWithUnionProperty): @@ -326,7 +342,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( @@ -336,6 +352,7 @@ async def asyncio( string_with_num=string_with_num, date_prop=date_prop, float_prop=float_prop, + float_with_int=float_with_int, int_prop=int_prop, boolean_prop=boolean_prop, list_prop=list_prop, diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/enums/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/enums/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/enums/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/enums/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/enums/bool_enum_tests_bool_enum_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/enums/bool_enum_tests_bool_enum_post.py index 92e95162c..4ed5ff6ee 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/enums/bool_enum_tests_bool_enum_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/enums/bool_enum_tests_bool_enum_post.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -11,14 +11,14 @@ def _get_kwargs( *, bool_enum: bool, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} params["bool_enum"] = bool_enum params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/enum/bool", "params": params, @@ -27,16 +27,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -47,7 +48,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, bool_enum: bool, ) -> Response[Any]: """Bool Enum @@ -76,7 +77,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, bool_enum: bool, ) -> Response[Any]: """Bool Enum diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/enums/int_enum_tests_int_enum_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/enums/int_enum_tests_int_enum_post.py index b39df8307..8ebc2b95f 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/enums/int_enum_tests_int_enum_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/enums/int_enum_tests_int_enum_post.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,15 +12,15 @@ def _get_kwargs( *, int_enum: AnIntEnum, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} json_int_enum = int_enum.value params["int_enum"] = json_int_enum params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/enum/int", "params": params, @@ -29,16 +29,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,7 +50,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, int_enum: AnIntEnum, ) -> Response[Any]: """Int Enum @@ -78,7 +79,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, int_enum: AnIntEnum, ) -> Response[Any]: """Int Enum diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/location/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/location/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/location/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/location/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_header_types.py b/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_header_types.py index 904d26c72..20c2db527 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_header_types.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_header_types.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,14 +12,14 @@ def _get_kwargs( *, - boolean_header: Union[Unset, bool] = UNSET, - string_header: Union[Unset, str] = UNSET, - number_header: Union[Unset, float] = UNSET, - integer_header: Union[Unset, int] = UNSET, - int_enum_header: Union[Unset, GetLocationHeaderTypesIntEnumHeader] = UNSET, - string_enum_header: Union[Unset, GetLocationHeaderTypesStringEnumHeader] = UNSET, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + boolean_header: bool | Unset = UNSET, + string_header: str | Unset = UNSET, + number_header: float | Unset = UNSET, + integer_header: int | Unset = UNSET, + int_enum_header: GetLocationHeaderTypesIntEnumHeader | Unset = UNSET, + string_enum_header: GetLocationHeaderTypesStringEnumHeader | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} if not isinstance(boolean_header, Unset): headers["Boolean-Header"] = "true" if boolean_header else "false" @@ -38,7 +38,7 @@ def _get_kwargs( if not isinstance(string_enum_header, Unset): headers["String-Enum-Header"] = str(string_enum_header) - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/location/header/types", } @@ -47,16 +47,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -67,22 +68,22 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - boolean_header: Union[Unset, bool] = UNSET, - string_header: Union[Unset, str] = UNSET, - number_header: Union[Unset, float] = UNSET, - integer_header: Union[Unset, int] = UNSET, - int_enum_header: Union[Unset, GetLocationHeaderTypesIntEnumHeader] = UNSET, - string_enum_header: Union[Unset, GetLocationHeaderTypesStringEnumHeader] = UNSET, + client: AuthenticatedClient | Client, + boolean_header: bool | Unset = UNSET, + string_header: str | Unset = UNSET, + number_header: float | Unset = UNSET, + integer_header: int | Unset = UNSET, + int_enum_header: GetLocationHeaderTypesIntEnumHeader | Unset = UNSET, + string_enum_header: GetLocationHeaderTypesStringEnumHeader | Unset = UNSET, ) -> Response[Any]: """ Args: - boolean_header (Union[Unset, bool]): - string_header (Union[Unset, str]): - number_header (Union[Unset, float]): - integer_header (Union[Unset, int]): - int_enum_header (Union[Unset, GetLocationHeaderTypesIntEnumHeader]): - string_enum_header (Union[Unset, GetLocationHeaderTypesStringEnumHeader]): + boolean_header (bool | Unset): + string_header (str | Unset): + number_header (float | Unset): + integer_header (int | Unset): + int_enum_header (GetLocationHeaderTypesIntEnumHeader | Unset): + string_enum_header (GetLocationHeaderTypesStringEnumHeader | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -110,22 +111,22 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - boolean_header: Union[Unset, bool] = UNSET, - string_header: Union[Unset, str] = UNSET, - number_header: Union[Unset, float] = UNSET, - integer_header: Union[Unset, int] = UNSET, - int_enum_header: Union[Unset, GetLocationHeaderTypesIntEnumHeader] = UNSET, - string_enum_header: Union[Unset, GetLocationHeaderTypesStringEnumHeader] = UNSET, + client: AuthenticatedClient | Client, + boolean_header: bool | Unset = UNSET, + string_header: str | Unset = UNSET, + number_header: float | Unset = UNSET, + integer_header: int | Unset = UNSET, + int_enum_header: GetLocationHeaderTypesIntEnumHeader | Unset = UNSET, + string_enum_header: GetLocationHeaderTypesStringEnumHeader | Unset = UNSET, ) -> Response[Any]: """ Args: - boolean_header (Union[Unset, bool]): - string_header (Union[Unset, str]): - number_header (Union[Unset, float]): - integer_header (Union[Unset, int]): - int_enum_header (Union[Unset, GetLocationHeaderTypesIntEnumHeader]): - string_enum_header (Union[Unset, GetLocationHeaderTypesStringEnumHeader]): + boolean_header (bool | Unset): + string_header (str | Unset): + number_header (float | Unset): + integer_header (int | Unset): + int_enum_header (GetLocationHeaderTypesIntEnumHeader | Unset): + string_enum_header (GetLocationHeaderTypesStringEnumHeader | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_query_optionality.py b/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_query_optionality.py index ea43b6731..e454a50a9 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_query_optionality.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/location/get_location_query_optionality.py @@ -1,6 +1,6 @@ import datetime from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,23 +12,23 @@ def _get_kwargs( *, not_null_required: datetime.datetime, - null_required: Union[None, datetime.datetime], - null_not_required: Union[None, Unset, datetime.datetime] = UNSET, - not_null_not_required: Union[Unset, datetime.datetime] = UNSET, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + null_required: datetime.datetime | None, + null_not_required: datetime.datetime | None | Unset = UNSET, + not_null_not_required: datetime.datetime | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} json_not_null_required = not_null_required.isoformat() params["not_null_required"] = json_not_null_required - json_null_required: Union[None, str] + json_null_required: None | str if isinstance(null_required, datetime.datetime): json_null_required = null_required.isoformat() else: json_null_required = null_required params["null_required"] = json_null_required - json_null_not_required: Union[None, Unset, str] + json_null_not_required: None | str | Unset if isinstance(null_not_required, Unset): json_null_not_required = UNSET elif isinstance(null_not_required, datetime.datetime): @@ -37,14 +37,14 @@ def _get_kwargs( json_null_not_required = null_not_required params["null_not_required"] = json_null_not_required - json_not_null_not_required: Union[Unset, str] = UNSET + json_not_null_not_required: str | Unset = UNSET if not isinstance(not_null_not_required, Unset): json_not_null_not_required = not_null_not_required.isoformat() params["not_null_not_required"] = json_not_null_not_required params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/location/query/optionality", "params": params, @@ -53,16 +53,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -73,18 +74,18 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, not_null_required: datetime.datetime, - null_required: Union[None, datetime.datetime], - null_not_required: Union[None, Unset, datetime.datetime] = UNSET, - not_null_not_required: Union[Unset, datetime.datetime] = UNSET, + null_required: datetime.datetime | None, + null_not_required: datetime.datetime | None | Unset = UNSET, + not_null_not_required: datetime.datetime | Unset = UNSET, ) -> Response[Any]: """ Args: not_null_required (datetime.datetime): - null_required (Union[None, datetime.datetime]): - null_not_required (Union[None, Unset, datetime.datetime]): - not_null_not_required (Union[Unset, datetime.datetime]): + null_required (datetime.datetime | None): + null_not_required (datetime.datetime | None | Unset): + not_null_not_required (datetime.datetime | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -110,18 +111,18 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, not_null_required: datetime.datetime, - null_required: Union[None, datetime.datetime], - null_not_required: Union[None, Unset, datetime.datetime] = UNSET, - not_null_not_required: Union[Unset, datetime.datetime] = UNSET, + null_required: datetime.datetime | None, + null_not_required: datetime.datetime | None | Unset = UNSET, + not_null_not_required: datetime.datetime | Unset = UNSET, ) -> Response[Any]: """ Args: not_null_required (datetime.datetime): - null_required (Union[None, datetime.datetime]): - null_not_required (Union[None, Unset, datetime.datetime]): - not_null_not_required (Union[Unset, datetime.datetime]): + null_required (datetime.datetime | None): + null_not_required (datetime.datetime | None | Unset): + not_null_not_required (datetime.datetime | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/naming/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/naming/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/naming/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/naming/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/naming/hyphen_in_path.py b/end_to_end_tests/golden-record/my_test_api_client/api/naming/hyphen_in_path.py index 8bab45991..a31e2e093 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/naming/hyphen_in_path.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/naming/hyphen_in_path.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -10,25 +11,28 @@ def _get_kwargs( hyphen_in_path: str, -) -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", - "url": f"/naming/{hyphen_in_path}", + "url": "/https/github.com/naming/{hyphen_in_path}".format( + hyphen_in_path=quote(str(hyphen_in_path), safe=""), + ), } return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -40,7 +44,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( hyphen_in_path: str, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Args: @@ -68,7 +72,7 @@ def sync_detailed( async def asyncio_detailed( hyphen_in_path: str, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Args: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/naming/mixed_case.py b/end_to_end_tests/golden-record/my_test_api_client/api/naming/mixed_case.py index 4f6321261..78c119a3b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/naming/mixed_case.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/naming/mixed_case.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -13,8 +13,8 @@ def _get_kwargs( *, mixed_case: str, mixedCase: str, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} params["mixed_case"] = mixed_case @@ -22,7 +22,7 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/naming/mixed-case", "params": params, @@ -31,13 +31,12 @@ def _get_kwargs( return _kwargs -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[MixedCaseResponse200]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> MixedCaseResponse200 | None: + if response.status_code == 200: response_200 = MixedCaseResponse200.from_dict(response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -45,7 +44,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[MixedCaseResponse200]: return Response( status_code=HTTPStatus(response.status_code), @@ -57,7 +56,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, mixed_case: str, mixedCase: str, ) -> Response[MixedCaseResponse200]: @@ -88,10 +87,10 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, mixed_case: str, mixedCase: str, -) -> Optional[MixedCaseResponse200]: +) -> MixedCaseResponse200 | None: """ Args: mixed_case (str): @@ -114,7 +113,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, mixed_case: str, mixedCase: str, ) -> Response[MixedCaseResponse200]: @@ -143,10 +142,10 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, mixed_case: str, mixedCase: str, -) -> Optional[MixedCaseResponse200]: +) -> MixedCaseResponse200 | None: """ Args: mixed_case (str): diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/naming/post_naming_property_conflict_with_import.py b/end_to_end_tests/golden-record/my_test_api_client/api/naming/post_naming_property_conflict_with_import.py index 693eab608..9e848f7af 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/naming/post_naming_property_conflict_with_import.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/naming/post_naming_property_conflict_with_import.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -9,23 +9,23 @@ from ...models.post_naming_property_conflict_with_import_response_200 import ( PostNamingPropertyConflictWithImportResponse200, ) -from ...types import Response +from ...types import UNSET, Response, Unset def _get_kwargs( *, - body: PostNamingPropertyConflictWithImportBody, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + body: PostNamingPropertyConflictWithImportBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/naming/property-conflict-with-import", } - _body = body.to_dict() + if not isinstance(body, Unset): + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -33,12 +33,13 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[PostNamingPropertyConflictWithImportResponse200]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> PostNamingPropertyConflictWithImportResponse200 | None: + if response.status_code == 200: response_200 = PostNamingPropertyConflictWithImportResponse200.from_dict(response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -46,7 +47,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[PostNamingPropertyConflictWithImportResponse200]: return Response( status_code=HTTPStatus(response.status_code), @@ -58,12 +59,12 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: PostNamingPropertyConflictWithImportBody, + client: AuthenticatedClient | Client, + body: PostNamingPropertyConflictWithImportBody | Unset = UNSET, ) -> Response[PostNamingPropertyConflictWithImportResponse200]: """ Args: - body (PostNamingPropertyConflictWithImportBody): + body (PostNamingPropertyConflictWithImportBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -86,12 +87,12 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - body: PostNamingPropertyConflictWithImportBody, -) -> Optional[PostNamingPropertyConflictWithImportResponse200]: + client: AuthenticatedClient | Client, + body: PostNamingPropertyConflictWithImportBody | Unset = UNSET, +) -> PostNamingPropertyConflictWithImportResponse200 | None: """ Args: - body (PostNamingPropertyConflictWithImportBody): + body (PostNamingPropertyConflictWithImportBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -109,12 +110,12 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: PostNamingPropertyConflictWithImportBody, + client: AuthenticatedClient | Client, + body: PostNamingPropertyConflictWithImportBody | Unset = UNSET, ) -> Response[PostNamingPropertyConflictWithImportResponse200]: """ Args: - body (PostNamingPropertyConflictWithImportBody): + body (PostNamingPropertyConflictWithImportBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -135,12 +136,12 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - body: PostNamingPropertyConflictWithImportBody, -) -> Optional[PostNamingPropertyConflictWithImportResponse200]: + client: AuthenticatedClient | Client, + body: PostNamingPropertyConflictWithImportBody | Unset = UNSET, +) -> PostNamingPropertyConflictWithImportResponse200 | None: """ Args: - body (PostNamingPropertyConflictWithImportBody): + body (PostNamingPropertyConflictWithImportBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/get_parameter_references_path_param.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/get_parameter_references_path_param.py index 4b034ffe4..801929db9 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/get_parameter_references_path_param.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameter_references/get_parameter_references_path_param.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -11,12 +12,12 @@ def _get_kwargs( path_param: str, *, - string_param: Union[Unset, str] = UNSET, - integer_param: Union[Unset, int] = 0, - header_param: Union[None, Unset, str] = UNSET, - cookie_param: Union[Unset, str] = UNSET, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + string_param: str | Unset = UNSET, + integer_param: int | Unset = 0, + header_param: None | str | Unset = UNSET, + cookie_param: str | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} if not isinstance(header_param, Unset): headers["header param"] = header_param @@ -24,7 +25,7 @@ def _get_kwargs( if cookie_param is not UNSET: cookies["cookie param"] = cookie_param - params: Dict[str, Any] = {} + params: dict[str, Any] = {} params["string param"] = string_param @@ -32,9 +33,11 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", - "url": f"/parameter-references/{path_param}", + "url": "/https/github.com/parameter-references/{path_param}".format( + path_param=quote(str(path_param), safe=""), + ), "params": params, "cookies": cookies, } @@ -43,16 +46,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -64,20 +68,20 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( path_param: str, *, - client: Union[AuthenticatedClient, Client], - string_param: Union[Unset, str] = UNSET, - integer_param: Union[Unset, int] = 0, - header_param: Union[None, Unset, str] = UNSET, - cookie_param: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + string_param: str | Unset = UNSET, + integer_param: int | Unset = 0, + header_param: None | str | Unset = UNSET, + cookie_param: str | Unset = UNSET, ) -> Response[Any]: """Test different types of parameter references Args: path_param (str): - string_param (Union[Unset, str]): - integer_param (Union[Unset, int]): Default: 0. - header_param (Union[None, Unset, str]): - cookie_param (Union[Unset, str]): + string_param (str | Unset): + integer_param (int | Unset): Default: 0. + header_param (None | str | Unset): + cookie_param (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -105,20 +109,20 @@ def sync_detailed( async def asyncio_detailed( path_param: str, *, - client: Union[AuthenticatedClient, Client], - string_param: Union[Unset, str] = UNSET, - integer_param: Union[Unset, int] = 0, - header_param: Union[None, Unset, str] = UNSET, - cookie_param: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + string_param: str | Unset = UNSET, + integer_param: int | Unset = 0, + header_param: None | str | Unset = UNSET, + cookie_param: str | Unset = UNSET, ) -> Response[Any]: """Test different types of parameter references Args: path_param (str): - string_param (Union[Unset, str]): - integer_param (Union[Unset, int]): Default: 0. - header_param (Union[None, Unset, str]): - cookie_param (Union[Unset, str]): + string_param (str | Unset): + integer_param (int | Unset): Default: 0. + header_param (None | str | Unset): + cookie_param (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/delete_common_parameters_overriding_param.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/delete_common_parameters_overriding_param.py index 8203fa750..1415a8cb4 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/delete_common_parameters_overriding_param.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/delete_common_parameters_overriding_param.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -11,33 +12,36 @@ def _get_kwargs( param_path: str, *, - param_query: Union[Unset, str] = UNSET, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + param_query: str | Unset = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} params["param"] = param_query params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "delete", - "url": f"/common_parameters_overriding/{param_path}", + "url": "/https/github.com/common_parameters_overriding/{param_path}".format( + param_path=quote(str(param_path), safe=""), + ), "params": params, } return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,13 +53,13 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], - param_query: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + param_query: str | Unset = UNSET, ) -> Response[Any]: """ Args: param_path (str): - param_query (Union[Unset, str]): + param_query (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -80,13 +84,13 @@ def sync_detailed( async def asyncio_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], - param_query: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + param_query: str | Unset = UNSET, ) -> Response[Any]: """ Args: param_path (str): - param_query (Union[Unset, str]): + param_query (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_common_parameters_overriding_param.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_common_parameters_overriding_param.py index 985e92c20..4f2c5056f 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_common_parameters_overriding_param.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_common_parameters_overriding_param.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -12,32 +13,35 @@ def _get_kwargs( param_path: str, *, param_query: str = "overridden_in_GET", -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} params["param"] = param_query params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", - "url": f"/common_parameters_overriding/{param_path}", + "url": "/https/github.com/common_parameters_overriding/{param_path}".format( + param_path=quote(str(param_path), safe=""), + ), "params": params, } return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,7 +53,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, param_query: str = "overridden_in_GET", ) -> Response[Any]: """Test that if you have an overriding property from `PathItem` in `Operation`, it produces valid code @@ -82,7 +86,7 @@ def sync_detailed( async def asyncio_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, param_query: str = "overridden_in_GET", ) -> Response[Any]: """Test that if you have an overriding property from `PathItem` in `Operation`, it produces valid code diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_same_name_multiple_locations_param.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_same_name_multiple_locations_param.py index 43f3b8993..dd11f68ca 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_same_name_multiple_locations_param.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/get_same_name_multiple_locations_param.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -11,11 +12,11 @@ def _get_kwargs( param_path: str, *, - param_query: Union[Unset, str] = UNSET, - param_header: Union[Unset, str] = UNSET, - param_cookie: Union[Unset, str] = UNSET, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + param_query: str | Unset = UNSET, + param_header: str | Unset = UNSET, + param_cookie: str | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} if not isinstance(param_header, Unset): headers["param"] = param_header @@ -23,15 +24,17 @@ def _get_kwargs( if param_cookie is not UNSET: cookies["param"] = param_cookie - params: Dict[str, Any] = {} + params: dict[str, Any] = {} params["param"] = param_query params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", - "url": f"/same-name-multiple-locations/{param_path}", + "url": "/https/github.com/same-name-multiple-locations/{param_path}".format( + param_path=quote(str(param_path), safe=""), + ), "params": params, "cookies": cookies, } @@ -40,16 +43,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -61,17 +65,17 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], - param_query: Union[Unset, str] = UNSET, - param_header: Union[Unset, str] = UNSET, - param_cookie: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + param_query: str | Unset = UNSET, + param_header: str | Unset = UNSET, + param_cookie: str | Unset = UNSET, ) -> Response[Any]: """ Args: param_path (str): - param_query (Union[Unset, str]): - param_header (Union[Unset, str]): - param_cookie (Union[Unset, str]): + param_query (str | Unset): + param_header (str | Unset): + param_cookie (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. @@ -98,17 +102,17 @@ def sync_detailed( async def asyncio_detailed( param_path: str, *, - client: Union[AuthenticatedClient, Client], - param_query: Union[Unset, str] = UNSET, - param_header: Union[Unset, str] = UNSET, - param_cookie: Union[Unset, str] = UNSET, + client: AuthenticatedClient | Client, + param_query: str | Unset = UNSET, + param_header: str | Unset = UNSET, + param_cookie: str | Unset = UNSET, ) -> Response[Any]: """ Args: param_path (str): - param_query (Union[Unset, str]): - param_header (Union[Unset, str]): - param_cookie (Union[Unset, str]): + param_query (str | Unset): + param_header (str | Unset): + param_cookie (str | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/multiple_path_parameters.py b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/multiple_path_parameters.py index 2785fa56f..3b11674f5 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/parameters/multiple_path_parameters.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/parameters/multiple_path_parameters.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any +from urllib.parse import quote import httpx @@ -13,25 +14,31 @@ def _get_kwargs( param2: int, param1: str, param3: int, -) -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", - "url": f"/multiple-path-parameters/{param4}/something/{param2}/{param1}/{param3}", + "url": "/https/github.com/multiple-path-parameters/{param4}/something/{param2}/{param1}/{param3}".format( + param4=quote(str(param4), safe=""), + param2=quote(str(param2), safe=""), + param1=quote(str(param1), safe=""), + param3=quote(str(param3), safe=""), + ), } return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -46,7 +53,7 @@ def sync_detailed( param1: str, param3: int, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Args: @@ -83,7 +90,7 @@ async def asyncio_detailed( param1: str, param3: int, *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Args: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/responses/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/default_status_code.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/default_status_code.py new file mode 100644 index 000000000..f2af5e3b3 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/default_status_code.py @@ -0,0 +1,114 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ...client import AuthenticatedClient, Client +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/responses/status-codes/default", + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> str: + response_default = response.text + return response_default + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[str]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[str]: + """Default Status Code Only + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> str | None: + """Default Status Code Only + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[str]: + """Default Status Code Only + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> str | None: + """Default Status Code Only + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/post_responses_unions_simple_before_complex.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/post_responses_unions_simple_before_complex.py index 63f551edc..1b098950e 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/responses/post_responses_unions_simple_before_complex.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/post_responses_unions_simple_before_complex.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -11,8 +11,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/responses/unions/simple_before_complex", } @@ -21,12 +21,13 @@ def _get_kwargs() -> Dict[str, Any]: def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[PostResponsesUnionsSimpleBeforeComplexResponse200]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> PostResponsesUnionsSimpleBeforeComplexResponse200 | None: + if response.status_code == 200: response_200 = PostResponsesUnionsSimpleBeforeComplexResponse200.from_dict(response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -34,7 +35,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[PostResponsesUnionsSimpleBeforeComplexResponse200]: return Response( status_code=HTTPStatus(response.status_code), @@ -46,7 +47,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[PostResponsesUnionsSimpleBeforeComplexResponse200]: """Regression test for #603 @@ -69,8 +70,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[PostResponsesUnionsSimpleBeforeComplexResponse200]: + client: AuthenticatedClient | Client, +) -> PostResponsesUnionsSimpleBeforeComplexResponse200 | None: """Regression test for #603 Raises: @@ -88,7 +89,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[PostResponsesUnionsSimpleBeforeComplexResponse200]: """Regression test for #603 @@ -109,8 +110,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[PostResponsesUnionsSimpleBeforeComplexResponse200]: + client: AuthenticatedClient | Client, +) -> PostResponsesUnionsSimpleBeforeComplexResponse200 | None: """Regression test for #603 Raises: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/reference_response.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/reference_response.py new file mode 100644 index 000000000..4e96030ae --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/reference_response.py @@ -0,0 +1,123 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.a_model import AModel +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/responses/reference", + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> AModel | None: + if response.status_code == 200: + response_200 = AModel.from_dict(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[AModel]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[AModel]: + """Endpoint using predefined response + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AModel] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> AModel | None: + """Endpoint using predefined response + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AModel + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[AModel]: + """Endpoint using predefined response + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[AModel] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> AModel | None: + """Endpoint using predefined response + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + AModel + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_patterns.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_patterns.py new file mode 100644 index 000000000..4e3d501d4 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_patterns.py @@ -0,0 +1,133 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.status_code_patterns_response_2xx import StatusCodePatternsResponse2XX +from ...models.status_code_patterns_response_4xx import StatusCodePatternsResponse4XX +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/response/status-codes/patterns", + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX | None: + if 200 <= response.status_code <= 299: + response_2xx = StatusCodePatternsResponse2XX.from_dict(response.json()) + + return response_2xx + + if 400 <= response.status_code <= 499: + response_4xx = StatusCodePatternsResponse4XX.from_dict(response.json()) + + return response_4xx + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX]: + """Status Code Patterns + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX | None: + """Status Code Patterns + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX]: + """Status Code Patterns + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX | None: + """Status Code Patterns + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + StatusCodePatternsResponse2XX | StatusCodePatternsResponse4XX + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_precedence.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_precedence.py new file mode 100644 index 000000000..9f86c2997 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/status_code_precedence.py @@ -0,0 +1,134 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ...client import AuthenticatedClient, Client +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/response/status-codes/precedence", + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> str: + if response.status_code == 200: + response_200 = response.text + return response_200 + + if response.status_code == 404: + response_404 = response.text + return response_404 + + if 400 <= response.status_code <= 499: + response_4xx = response.text + return response_4xx + + response_default = response.text + return response_default + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[str]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[str]: + """Status Codes Precedence + + Verify that specific status codes are always checked first, then ranges, then default + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, +) -> str | None: + """Status Codes Precedence + + Verify that specific status codes are always checked first, then ranges, then default + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[str]: + """Status Codes Precedence + + Verify that specific status codes are always checked first, then ranges, then default + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, +) -> str | None: + """Status Codes Precedence + + Verify that specific status codes are always checked first, then ranges, then default + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/responses/text_response.py b/end_to_end_tests/golden-record/my_test_api_client/api/responses/text_response.py index c7d71a3f3..978ad84d1 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/responses/text_response.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/responses/text_response.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/responses/text", } @@ -17,17 +17,18 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[str]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> str | None: + if response.status_code == 200: response_200 = response.text return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[str]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[str]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -38,7 +39,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[str]: """Text Response @@ -61,8 +62,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[str]: + client: AuthenticatedClient | Client, +) -> str | None: """Text Response Raises: @@ -80,7 +81,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[str]: """Text Response @@ -101,8 +102,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[str]: + client: AuthenticatedClient | Client, +) -> str | None: """Text Response Raises: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tag1/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/tag1/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tag1/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tag1/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tag1/get_tag_with_number.py b/end_to_end_tests/golden-record/my_test_api_client/api/tag1/get_tag_with_number.py index eedbd5f7a..f2c767fa3 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tag1/get_tag_with_number.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tag1/get_tag_with_number.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tag_with_number", } @@ -17,16 +17,17 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -37,7 +38,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Raises: @@ -59,7 +60,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """ Raises: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tag2/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/tag2/__init__.py new file mode 100644 index 000000000..2d7c0b23d --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tag2/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tag2/get_tag_with_number.py b/end_to_end_tests/golden-record/my_test_api_client/api/tag2/get_tag_with_number.py new file mode 100644 index 000000000..f2c767fa3 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tag2/get_tag_with_number.py @@ -0,0 +1,78 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/tag_with_number", + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[Any]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, +) -> Response[Any]: + """ + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/callback_test.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/callback_test.py index 925349cbd..e806fe60c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/callback_test.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/callback_test.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -13,17 +13,16 @@ def _get_kwargs( *, body: AModel, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/callback", } - _body = body.to_dict() + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -31,15 +30,17 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 200: response_200 = response.json() return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -47,8 +48,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -59,9 +60,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Path with callback Try sending a request related to a callback @@ -74,7 +75,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -90,9 +91,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Path with callback Try sending a request related to a callback @@ -105,7 +106,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -116,9 +117,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Path with callback Try sending a request related to a callback @@ -131,7 +132,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -145,9 +146,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Path with callback Try sending a request related to a callback @@ -160,7 +161,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/description_with_backslash.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/description_with_backslash.py index 9ddd267d8..f33456e97 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/description_with_backslash.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/description_with_backslash.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/description-with-backslash", } @@ -17,16 +17,17 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -37,7 +38,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: r""" Test description with \ @@ -62,7 +63,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: r""" Test description with \ diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_booleans.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_booleans.py index 8f90e7eb6..08ddea77d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_booleans.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_booleans.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, cast import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/basic_lists/booleans", } @@ -17,18 +17,19 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[List[bool]]: - if response.status_code == HTTPStatus.OK: - response_200 = cast(List[bool], response.json()) +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[bool] | None: + if response.status_code == 200: + response_200 = cast(list[bool], response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[List[bool]]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[bool]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -39,8 +40,8 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[bool]]: + client: AuthenticatedClient | Client, +) -> Response[list[bool]]: """Get Basic List Of Booleans Get a list of booleans @@ -50,7 +51,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[bool]] + Response[list[bool]] """ kwargs = _get_kwargs() @@ -64,8 +65,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[bool]]: + client: AuthenticatedClient | Client, +) -> list[bool] | None: """Get Basic List Of Booleans Get a list of booleans @@ -75,7 +76,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[bool] + list[bool] """ return sync_detailed( @@ -85,8 +86,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[bool]]: + client: AuthenticatedClient | Client, +) -> Response[list[bool]]: """Get Basic List Of Booleans Get a list of booleans @@ -96,7 +97,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[bool]] + Response[list[bool]] """ kwargs = _get_kwargs() @@ -108,8 +109,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[bool]]: + client: AuthenticatedClient | Client, +) -> list[bool] | None: """Get Basic List Of Booleans Get a list of booleans @@ -119,7 +120,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[bool] + list[bool] """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_floats.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_floats.py index b76743cf6..35310a218 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_floats.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_floats.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, cast import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/basic_lists/floats", } @@ -17,18 +17,19 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[List[float]]: - if response.status_code == HTTPStatus.OK: - response_200 = cast(List[float], response.json()) +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[float] | None: + if response.status_code == 200: + response_200 = cast(list[float], response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[List[float]]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[float]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -39,8 +40,8 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[float]]: + client: AuthenticatedClient | Client, +) -> Response[list[float]]: """Get Basic List Of Floats Get a list of floats @@ -50,7 +51,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[float]] + Response[list[float]] """ kwargs = _get_kwargs() @@ -64,8 +65,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[float]]: + client: AuthenticatedClient | Client, +) -> list[float] | None: """Get Basic List Of Floats Get a list of floats @@ -75,7 +76,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[float] + list[float] """ return sync_detailed( @@ -85,8 +86,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[float]]: + client: AuthenticatedClient | Client, +) -> Response[list[float]]: """Get Basic List Of Floats Get a list of floats @@ -96,7 +97,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[float]] + Response[list[float]] """ kwargs = _get_kwargs() @@ -108,8 +109,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[float]]: + client: AuthenticatedClient | Client, +) -> list[float] | None: """Get Basic List Of Floats Get a list of floats @@ -119,7 +120,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[float] + list[float] """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_integers.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_integers.py index 346bcf99f..2f0526e41 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_integers.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_integers.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, cast import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/basic_lists/integers", } @@ -17,18 +17,19 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[List[int]]: - if response.status_code == HTTPStatus.OK: - response_200 = cast(List[int], response.json()) +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[int] | None: + if response.status_code == 200: + response_200 = cast(list[int], response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[List[int]]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[int]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -39,8 +40,8 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[int]]: + client: AuthenticatedClient | Client, +) -> Response[list[int]]: """Get Basic List Of Integers Get a list of integers @@ -50,7 +51,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[int]] + Response[list[int]] """ kwargs = _get_kwargs() @@ -64,8 +65,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[int]]: + client: AuthenticatedClient | Client, +) -> list[int] | None: """Get Basic List Of Integers Get a list of integers @@ -75,7 +76,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[int] + list[int] """ return sync_detailed( @@ -85,8 +86,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[int]]: + client: AuthenticatedClient | Client, +) -> Response[list[int]]: """Get Basic List Of Integers Get a list of integers @@ -96,7 +97,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[int]] + Response[list[int]] """ kwargs = _get_kwargs() @@ -108,8 +109,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[int]]: + client: AuthenticatedClient | Client, +) -> list[int] | None: """Get Basic List Of Integers Get a list of integers @@ -119,7 +120,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[int] + list[int] """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_strings.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_strings.py index 29606477e..a4b401eb2 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_strings.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_basic_list_of_strings.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, cast import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/basic_lists/strings", } @@ -17,18 +17,19 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[List[str]]: - if response.status_code == HTTPStatus.OK: - response_200 = cast(List[str], response.json()) +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[str] | None: + if response.status_code == 200: + response_200 = cast(list[str], response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[List[str]]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[str]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -39,8 +40,8 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[str]]: + client: AuthenticatedClient | Client, +) -> Response[list[str]]: """Get Basic List Of Strings Get a list of strings @@ -50,7 +51,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[str]] + Response[list[str]] """ kwargs = _get_kwargs() @@ -64,8 +65,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[str]]: + client: AuthenticatedClient | Client, +) -> list[str] | None: """Get Basic List Of Strings Get a list of strings @@ -75,7 +76,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[str] + list[str] """ return sync_detailed( @@ -85,8 +86,8 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], -) -> Response[List[str]]: + client: AuthenticatedClient | Client, +) -> Response[list[str]]: """Get Basic List Of Strings Get a list of strings @@ -96,7 +97,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[List[str]] + Response[list[str]] """ kwargs = _get_kwargs() @@ -108,8 +109,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[List[str]]: + client: AuthenticatedClient | Client, +) -> list[str] | None: """Get Basic List Of Strings Get a list of strings @@ -119,7 +120,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - List[str] + list[str] """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_user_list.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_user_list.py index ff055d3fd..5d027ecd7 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_user_list.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/get_user_list.py @@ -1,6 +1,6 @@ import datetime from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any import httpx @@ -15,12 +15,12 @@ def _get_kwargs( *, - an_enum_value: List[AnEnum], - an_enum_value_with_null: List[Union[AnEnumWithNull, None]], - an_enum_value_with_only_null: List[None], - some_date: Union[datetime.date, datetime.datetime], -) -> Dict[str, Any]: - params: Dict[str, Any] = {} + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + some_date: datetime.date | datetime.datetime, +) -> dict[str, Any]: + params: dict[str, Any] = {} json_an_enum_value = [] for an_enum_value_item_data in an_enum_value: @@ -31,7 +31,7 @@ def _get_kwargs( json_an_enum_value_with_null = [] for an_enum_value_with_null_item_data in an_enum_value_with_null: - an_enum_value_with_null_item: Union[None, str] + an_enum_value_with_null_item: None | str if isinstance(an_enum_value_with_null_item_data, AnEnumWithNull): an_enum_value_with_null_item = an_enum_value_with_null_item_data.value else: @@ -54,7 +54,7 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/", "params": params, @@ -64,9 +64,9 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, List["AModel"]]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | list[AModel] | None: + if response.status_code == 200: response_200 = [] _response_200 = response.json() for response_200_item_data in _response_200: @@ -75,14 +75,17 @@ def _parse_response( response_200.append(response_200_item) return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 - if response.status_code == HTTPStatus.LOCKED: + + if response.status_code == 423: response_423 = HTTPValidationError.from_dict(response.json()) return response_423 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -90,8 +93,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, List["AModel"]]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | list[AModel]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -102,28 +105,28 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - an_enum_value: List[AnEnum], - an_enum_value_with_null: List[Union[AnEnumWithNull, None]], - an_enum_value_with_only_null: List[None], - some_date: Union[datetime.date, datetime.datetime], -) -> Response[Union[HTTPValidationError, List["AModel"]]]: + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + some_date: datetime.date | datetime.datetime, +) -> Response[HTTPValidationError | list[AModel]]: """Get List Get a list of things Args: - an_enum_value (List[AnEnum]): - an_enum_value_with_null (List[Union[AnEnumWithNull, None]]): - an_enum_value_with_only_null (List[None]): - some_date (Union[datetime.date, datetime.datetime]): + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + some_date (datetime.date | datetime.datetime): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, List['AModel']]] + Response[HTTPValidationError | list[AModel]] """ kwargs = _get_kwargs( @@ -142,28 +145,28 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - an_enum_value: List[AnEnum], - an_enum_value_with_null: List[Union[AnEnumWithNull, None]], - an_enum_value_with_only_null: List[None], - some_date: Union[datetime.date, datetime.datetime], -) -> Optional[Union[HTTPValidationError, List["AModel"]]]: + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + some_date: datetime.date | datetime.datetime, +) -> HTTPValidationError | list[AModel] | None: """Get List Get a list of things Args: - an_enum_value (List[AnEnum]): - an_enum_value_with_null (List[Union[AnEnumWithNull, None]]): - an_enum_value_with_only_null (List[None]): - some_date (Union[datetime.date, datetime.datetime]): + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + some_date (datetime.date | datetime.datetime): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, List['AModel']] + HTTPValidationError | list[AModel] """ return sync_detailed( @@ -177,28 +180,28 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - an_enum_value: List[AnEnum], - an_enum_value_with_null: List[Union[AnEnumWithNull, None]], - an_enum_value_with_only_null: List[None], - some_date: Union[datetime.date, datetime.datetime], -) -> Response[Union[HTTPValidationError, List["AModel"]]]: + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + some_date: datetime.date | datetime.datetime, +) -> Response[HTTPValidationError | list[AModel]]: """Get List Get a list of things Args: - an_enum_value (List[AnEnum]): - an_enum_value_with_null (List[Union[AnEnumWithNull, None]]): - an_enum_value_with_only_null (List[None]): - some_date (Union[datetime.date, datetime.datetime]): + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + some_date (datetime.date | datetime.datetime): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, List['AModel']]] + Response[HTTPValidationError | list[AModel]] """ kwargs = _get_kwargs( @@ -215,28 +218,28 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - an_enum_value: List[AnEnum], - an_enum_value_with_null: List[Union[AnEnumWithNull, None]], - an_enum_value_with_only_null: List[None], - some_date: Union[datetime.date, datetime.datetime], -) -> Optional[Union[HTTPValidationError, List["AModel"]]]: + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + some_date: datetime.date | datetime.datetime, +) -> HTTPValidationError | list[AModel] | None: """Get List Get a list of things Args: - an_enum_value (List[AnEnum]): - an_enum_value_with_null (List[Union[AnEnumWithNull, None]]): - an_enum_value_with_only_null (List[None]): - some_date (Union[datetime.date, datetime.datetime]): + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + some_date (datetime.date | datetime.datetime): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, List['AModel']] + HTTPValidationError | list[AModel] """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/json_body_tests_json_body_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/json_body_tests_json_body_post.py index c43a0ca7e..5be950c77 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/json_body_tests_json_body_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/json_body_tests_json_body_post.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -13,17 +13,16 @@ def _get_kwargs( *, body: AModel, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/json_body", } - _body = body.to_dict() + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -31,15 +30,17 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 200: response_200 = response.json() return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -47,8 +48,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -59,9 +60,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Json Body Try sending a JSON body @@ -74,7 +75,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -90,9 +91,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Json Body Try sending a JSON body @@ -105,7 +106,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -116,9 +117,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Json Body Try sending a JSON body @@ -131,7 +132,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -145,9 +146,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AModel, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Json Body Try sending a JSON body @@ -160,7 +161,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/no_response_tests_no_response_get.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/no_response_tests_no_response_get.py index 670bb5663..59c746613 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/no_response_tests_no_response_get.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/no_response_tests_no_response_get.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/no_response", } @@ -17,16 +17,17 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -37,7 +38,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """No Response @@ -60,7 +61,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """No Response diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_get.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_get.py index 231a7da74..36fab061c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_get.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_get.py @@ -1,6 +1,6 @@ from http import HTTPStatus from io import BytesIO -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -9,8 +9,8 @@ from ...types import File, Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/octet_stream", } @@ -18,18 +18,19 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[File]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> File | None: + if response.status_code == 200: response_200 = File(payload=BytesIO(response.content)) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[File]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[File]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -40,7 +41,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[File]: """Octet Stream @@ -63,8 +64,8 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[File]: + client: AuthenticatedClient | Client, +) -> File | None: """Octet Stream Raises: @@ -82,7 +83,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[File]: """Octet Stream @@ -103,8 +104,8 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], -) -> Optional[File]: + client: AuthenticatedClient | Client, +) -> File | None: """Octet Stream Raises: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_post.py index cb72ba657..0d140359d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/octet_stream_tests_octet_stream_post.py @@ -1,28 +1,29 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union, cast +from typing import Any import httpx from ... import errors from ...client import AuthenticatedClient, Client from ...models.http_validation_error import HTTPValidationError -from ...types import File, Response +from ...models.octet_stream_tests_octet_stream_post_response_200 import OctetStreamTestsOctetStreamPostResponse200 +from ...types import UNSET, File, Response, Unset def _get_kwargs( *, - body: File, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + body: File | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/octet_stream", } - _body = body.payload + if not isinstance(body, Unset): + _kwargs["content"] = body.payload - _kwargs["content"] = _body headers["Content-Type"] = "application/octet-stream" _kwargs["headers"] = headers @@ -30,15 +31,18 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, str]]: - if response.status_code == HTTPStatus.OK: - response_200 = cast(str, response.json()) + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200 | None: + if response.status_code == 200: + response_200 = OctetStreamTestsOctetStreamPostResponse200.from_dict(response.json()) + return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -46,8 +50,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, str]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -58,20 +62,20 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: File, -) -> Response[Union[HTTPValidationError, str]]: + client: AuthenticatedClient | Client, + body: File | Unset = UNSET, +) -> Response[HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200]: """Binary (octet stream) request body Args: - body (File): A file to upload + body (File | Unset): A file to upload Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, str]] + Response[HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200] """ kwargs = _get_kwargs( @@ -87,20 +91,20 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - body: File, -) -> Optional[Union[HTTPValidationError, str]]: + client: AuthenticatedClient | Client, + body: File | Unset = UNSET, +) -> HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200 | None: """Binary (octet stream) request body Args: - body (File): A file to upload + body (File | Unset): A file to upload Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, str] + HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200 """ return sync_detailed( @@ -111,20 +115,20 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: File, -) -> Response[Union[HTTPValidationError, str]]: + client: AuthenticatedClient | Client, + body: File | Unset = UNSET, +) -> Response[HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200]: """Binary (octet stream) request body Args: - body (File): A file to upload + body (File | Unset): A file to upload Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, str]] + Response[HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200] """ kwargs = _get_kwargs( @@ -138,20 +142,20 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - body: File, -) -> Optional[Union[HTTPValidationError, str]]: + client: AuthenticatedClient | Client, + body: File | Unset = UNSET, +) -> HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200 | None: """Binary (octet stream) request body Args: - body (File): A file to upload + body (File | Unset): A file to upload Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, str] + HTTPValidationError | OctetStreamTestsOctetStreamPostResponse200 """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py index 93954ace9..82f464276 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,33 +12,33 @@ def _get_kwargs( *, body: AFormData, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/post_form_data", } - _body = body.to_dict() + _kwargs["data"] = body.to_dict() - _kwargs["data"] = _body headers["Content-Type"] = "application/x-www-form-urlencoded" _kwargs["headers"] = headers return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,7 +49,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AFormData, ) -> Response[Any]: """Post form data @@ -80,7 +80,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: AFormData, ) -> Response[Any]: """Post form data diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data_inline.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data_inline.py index b676061a3..e15881947 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data_inline.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_form_data_inline.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -12,33 +12,33 @@ def _get_kwargs( *, body: PostFormDataInlineBody, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/post_form_data_inline", } - _body = body.to_dict() + _kwargs["data"] = body.to_dict() - _kwargs["data"] = _body headers["Content-Type"] = "application/x-www-form-urlencoded" _kwargs["headers"] = headers return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -49,7 +49,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostFormDataInlineBody, ) -> Response[Any]: """Post form data (inline schema) @@ -80,7 +80,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostFormDataInlineBody, ) -> Response[Any]: """Post form data (inline schema) diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_tests_json_body_string.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_tests_json_body_string.py index 909c77e78..498d0572e 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_tests_json_body_string.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/post_tests_json_body_string.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union, cast +from typing import Any, cast import httpx @@ -12,17 +12,16 @@ def _get_kwargs( *, body: str, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/json_body/string", } - _body = body + _kwargs["json"] = body - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -30,15 +29,17 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[HTTPValidationError, str]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> HTTPValidationError | str | None: + if response.status_code == 200: response_200 = cast(str, response.json()) return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -46,8 +47,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[HTTPValidationError, str]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[HTTPValidationError | str]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -58,9 +59,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: str, -) -> Response[Union[HTTPValidationError, str]]: +) -> Response[HTTPValidationError | str]: """Json Body Which is String Args: @@ -71,7 +72,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, str]] + Response[HTTPValidationError | str] """ kwargs = _get_kwargs( @@ -87,9 +88,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: str, -) -> Optional[Union[HTTPValidationError, str]]: +) -> HTTPValidationError | str | None: """Json Body Which is String Args: @@ -100,7 +101,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, str] + HTTPValidationError | str """ return sync_detailed( @@ -111,9 +112,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: str, -) -> Response[Union[HTTPValidationError, str]]: +) -> Response[HTTPValidationError | str]: """Json Body Which is String Args: @@ -124,7 +125,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[HTTPValidationError, str]] + Response[HTTPValidationError | str] """ kwargs = _get_kwargs( @@ -138,9 +139,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: str, -) -> Optional[Union[HTTPValidationError, str]]: +) -> HTTPValidationError | str | None: """Json Body Which is String Args: @@ -151,7 +152,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[HTTPValidationError, str] + HTTPValidationError | str """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/test_inline_objects.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/test_inline_objects.py index 2a93ef5ad..ea2e45c94 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/test_inline_objects.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/test_inline_objects.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -13,17 +13,16 @@ def _get_kwargs( *, body: TestInlineObjectsBody, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/inline_objects", } - _body = body.to_dict() + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -31,12 +30,13 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[TestInlineObjectsResponse200]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> TestInlineObjectsResponse200 | None: + if response.status_code == 200: response_200 = TestInlineObjectsResponse200.from_dict(response.json()) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -44,7 +44,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[TestInlineObjectsResponse200]: return Response( status_code=HTTPStatus(response.status_code), @@ -56,7 +56,7 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: TestInlineObjectsBody, ) -> Response[TestInlineObjectsResponse200]: """Test Inline Objects @@ -85,9 +85,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: TestInlineObjectsBody, -) -> Optional[TestInlineObjectsResponse200]: +) -> TestInlineObjectsResponse200 | None: """Test Inline Objects Args: @@ -109,7 +109,7 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: TestInlineObjectsBody, ) -> Response[TestInlineObjectsResponse200]: """Test Inline Objects @@ -136,9 +136,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: TestInlineObjectsBody, -) -> Optional[TestInlineObjectsResponse200]: +) -> TestInlineObjectsResponse200 | None: """Test Inline Objects Args: diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/token_with_cookie_auth_token_with_cookie_get.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/token_with_cookie_auth_token_with_cookie_get.py index 0c68f4726..0c7f73b45 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/token_with_cookie_auth_token_with_cookie_get.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/token_with_cookie_auth_token_with_cookie_get.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -11,11 +11,11 @@ def _get_kwargs( *, my_token: str, -) -> Dict[str, Any]: +) -> dict[str, Any]: cookies = {} cookies["MyToken"] = my_token - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/auth/token_with_cookie", "cookies": cookies, @@ -24,18 +24,20 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None - if response.status_code == HTTPStatus.UNAUTHORIZED: + + if response.status_code == 401: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -46,7 +48,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, my_token: str, ) -> Response[Any]: """TOKEN_WITH_COOKIE @@ -77,7 +79,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, my_token: str, ) -> Response[Any]: """TOKEN_WITH_COOKIE diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/unsupported_content_tests_unsupported_content_get.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/unsupported_content_tests_unsupported_content_get.py index a63b7b2a2..794c38b98 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/unsupported_content_tests_unsupported_content_get.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/unsupported_content_tests_unsupported_content_get.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,8 +8,8 @@ from ...types import Response -def _get_kwargs() -> Dict[str, Any]: - _kwargs: Dict[str, Any] = { +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/tests/unsupported_content", } @@ -17,16 +17,17 @@ def _get_kwargs() -> Dict[str, Any]: return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -37,7 +38,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """Unsupported Content @@ -60,7 +61,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, ) -> Response[Any]: """Unsupported Content diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_file_tests_upload_post.py b/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_file_tests_upload_post.py index e36d4d92e..7498e9aee 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_file_tests_upload_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_file_tests_upload_post.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -13,32 +13,32 @@ def _get_kwargs( *, body: BodyUploadFileTestsUploadPost, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/tests/upload", } - _body = body.to_multipart() - - _kwargs["files"] = _body + _kwargs["files"] = body.to_multipart() _kwargs["headers"] = headers return _kwargs def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Any | HTTPValidationError | None: + if response.status_code == 200: response_200 = response.json() return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: + + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) return response_422 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -46,8 +46,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[Any | HTTPValidationError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -58,9 +58,9 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: BodyUploadFileTestsUploadPost, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Upload File Upload a file @@ -73,7 +73,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -89,9 +89,9 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: BodyUploadFileTestsUploadPost, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Upload File Upload a file @@ -104,7 +104,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return sync_detailed( @@ -115,9 +115,9 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: BodyUploadFileTestsUploadPost, -) -> Response[Union[Any, HTTPValidationError]]: +) -> Response[Any | HTTPValidationError]: """Upload File Upload a file @@ -130,7 +130,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[Any | HTTPValidationError] """ kwargs = _get_kwargs( @@ -144,9 +144,9 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: BodyUploadFileTestsUploadPost, -) -> Optional[Union[Any, HTTPValidationError]]: +) -> Any | HTTPValidationError | None: """Upload File Upload a file @@ -159,7 +159,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + Any | HTTPValidationError """ return ( diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/true_/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/api/true_/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/true_/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/true_/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/true_/false_.py b/end_to_end_tests/golden-record/my_test_api_client/api/true_/false_.py index 7921b332e..825672fbd 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/true_/false_.py +++ b/end_to_end_tests/golden-record/my_test_api_client/api/true_/false_.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -11,14 +11,14 @@ def _get_kwargs( *, import_: str, -) -> Dict[str, Any]: - params: Dict[str, Any] = {} +) -> dict[str, Any]: + params: dict[str, Any] = {} params["import"] = import_ params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "get", "url": "/https/github.com/naming/keywords", "params": params, @@ -27,16 +27,17 @@ def _get_kwargs( return _kwargs -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[Any]: - if response.status_code == HTTPStatus.OK: +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: return None + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[Any]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -47,7 +48,7 @@ def _build_response(*, client: Union[AuthenticatedClient, Client], response: htt def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, import_: str, ) -> Response[Any]: """ @@ -75,7 +76,7 @@ def sync_detailed( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, import_: str, ) -> Response[Any]: """ diff --git a/end_to_end_tests/golden-record/my_test_api_client/client.py b/end_to_end_tests/golden-record/my_test_api_client/client.py index 74b476ca8..1b7055ab8 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/client.py +++ b/end_to_end_tests/golden-record/my_test_api_client/client.py @@ -1,5 +1,5 @@ import ssl -from typing import Any, Dict, Optional, Union +from typing import Any import httpx from attrs import define, evolve, field @@ -35,17 +35,17 @@ class Client: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) - - def with_headers(self, headers: Dict[str, str]) -> "Client": + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -53,7 +53,7 @@ def with_headers(self, headers: Dict[str, str]) -> "Client": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "Client": + def with_cookies(self, cookies: dict[str, str]) -> "Client": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -62,7 +62,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "Client": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "Client": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -70,7 +70,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "Client": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "Client": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -101,7 +101,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -165,21 +165,21 @@ class AuthenticatedClient: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) token: str prefix: str = "Bearer" auth_header_name: str = "Authorization" - def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -187,7 +187,7 @@ def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -196,7 +196,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -204,7 +204,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -236,7 +236,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/__init__.py b/end_to_end_tests/golden-record/my_test_api_client/models/__init__.py index 7435983e3..c62e4cfa6 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/__init__.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/__init__.py @@ -7,6 +7,8 @@ from .a_model_with_properties_reference_that_are_not_object import AModelWithPropertiesReferenceThatAreNotObject from .all_of_has_properties_but_no_type import AllOfHasPropertiesButNoType from .all_of_has_properties_but_no_type_type_enum import AllOfHasPropertiesButNoTypeTypeEnum +from .all_of_required_base import AllOfRequiredBase +from .all_of_required_derived import AllOfRequiredDerived from .all_of_sub_model import AllOfSubModel from .all_of_sub_model_type_enum import AllOfSubModelTypeEnum from .an_all_of_enum import AnAllOfEnum @@ -34,9 +36,13 @@ from .body_upload_file_tests_upload_post_some_object import BodyUploadFileTestsUploadPostSomeObject from .body_upload_file_tests_upload_post_some_optional_object import BodyUploadFileTestsUploadPostSomeOptionalObject from .different_enum import DifferentEnum +from .extended import Extended from .free_form_model import FreeFormModel from .get_location_header_types_int_enum_header import GetLocationHeaderTypesIntEnumHeader from .get_location_header_types_string_enum_header import GetLocationHeaderTypesStringEnumHeader +from .get_models_allof_response_200 import GetModelsAllofResponse200 +from .get_models_oneof_with_required_const_response_200_type_0 import GetModelsOneofWithRequiredConstResponse200Type0 +from .get_models_oneof_with_required_const_response_200_type_1 import GetModelsOneofWithRequiredConstResponse200Type1 from .http_validation_error import HTTPValidationError from .import_ import Import from .json_like_body import JsonLikeBody @@ -58,6 +64,9 @@ from .model_with_circular_ref_in_additional_properties_b import ModelWithCircularRefInAdditionalPropertiesB from .model_with_date_time_property import ModelWithDateTimeProperty from .model_with_discriminated_union import ModelWithDiscriminatedUnion +from .model_with_merged_properties import ModelWithMergedProperties +from .model_with_merged_properties_string_to_enum import ModelWithMergedPropertiesStringToEnum +from .model_with_no_properties import ModelWithNoProperties from .model_with_primitive_additional_properties import ModelWithPrimitiveAdditionalProperties from .model_with_primitive_additional_properties_a_date_holder import ModelWithPrimitiveAdditionalPropertiesADateHolder from .model_with_property_ref import ModelWithPropertyRef @@ -65,9 +74,11 @@ from .model_with_recursive_ref_in_additional_properties import ModelWithRecursiveRefInAdditionalProperties from .model_with_union_property import ModelWithUnionProperty from .model_with_union_property_inlined import ModelWithUnionPropertyInlined -from .model_with_union_property_inlined_fruit_type_0 import ModelWithUnionPropertyInlinedFruitType0 -from .model_with_union_property_inlined_fruit_type_1 import ModelWithUnionPropertyInlinedFruitType1 +from .model_with_union_property_inlined_apples import ModelWithUnionPropertyInlinedApples +from .model_with_union_property_inlined_bananas import ModelWithUnionPropertyInlinedBananas from .none import None_ +from .octet_stream_tests_octet_stream_post_response_200 import OctetStreamTestsOctetStreamPostResponse200 +from .optional_body_body import OptionalBodyBody from .post_bodies_multiple_data_body import PostBodiesMultipleDataBody from .post_bodies_multiple_files_body import PostBodiesMultipleFilesBody from .post_bodies_multiple_json_body import PostBodiesMultipleJsonBody @@ -78,6 +89,9 @@ from .post_responses_unions_simple_before_complex_response_200a_type_1 import ( PostResponsesUnionsSimpleBeforeComplexResponse200AType1, ) +from .status_code_patterns_response_2xx import StatusCodePatternsResponse2XX +from .status_code_patterns_response_2xx_status import StatusCodePatternsResponse2XXStatus +from .status_code_patterns_response_4xx import StatusCodePatternsResponse4XX from .test_inline_objects_body import TestInlineObjectsBody from .test_inline_objects_response_200 import TestInlineObjectsResponse200 from .validation_error import ValidationError @@ -88,6 +102,8 @@ "AFormData", "AllOfHasPropertiesButNoType", "AllOfHasPropertiesButNoTypeTypeEnum", + "AllOfRequiredBase", + "AllOfRequiredDerived", "AllOfSubModel", "AllOfSubModelTypeEnum", "AModel", @@ -111,9 +127,13 @@ "BodyUploadFileTestsUploadPostSomeObject", "BodyUploadFileTestsUploadPostSomeOptionalObject", "DifferentEnum", + "Extended", "FreeFormModel", "GetLocationHeaderTypesIntEnumHeader", "GetLocationHeaderTypesStringEnumHeader", + "GetModelsAllofResponse200", + "GetModelsOneofWithRequiredConstResponse200Type0", + "GetModelsOneofWithRequiredConstResponse200Type1", "HTTPValidationError", "Import", "JsonLikeBody", @@ -133,6 +153,9 @@ "ModelWithCircularRefInAdditionalPropertiesB", "ModelWithDateTimeProperty", "ModelWithDiscriminatedUnion", + "ModelWithMergedProperties", + "ModelWithMergedPropertiesStringToEnum", + "ModelWithNoProperties", "ModelWithPrimitiveAdditionalProperties", "ModelWithPrimitiveAdditionalPropertiesADateHolder", "ModelWithPropertyRef", @@ -140,9 +163,11 @@ "ModelWithRecursiveRefInAdditionalProperties", "ModelWithUnionProperty", "ModelWithUnionPropertyInlined", - "ModelWithUnionPropertyInlinedFruitType0", - "ModelWithUnionPropertyInlinedFruitType1", + "ModelWithUnionPropertyInlinedApples", + "ModelWithUnionPropertyInlinedBananas", "None_", + "OctetStreamTestsOctetStreamPostResponse200", + "OptionalBodyBody", "PostBodiesMultipleDataBody", "PostBodiesMultipleFilesBody", "PostBodiesMultipleJsonBody", @@ -151,6 +176,9 @@ "PostNamingPropertyConflictWithImportResponse200", "PostResponsesUnionsSimpleBeforeComplexResponse200", "PostResponsesUnionsSimpleBeforeComplexResponse200AType1", + "StatusCodePatternsResponse2XX", + "StatusCodePatternsResponse2XXStatus", + "StatusCodePatternsResponse4XX", "TestInlineObjectsBody", "TestInlineObjectsResponse200", "ValidationError", diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_1.py b/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_1.py index cb1184b18..635661b7b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_1.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_1.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class ADiscriminatedUnionType1: """ Attributes: - model_type (Union[Unset, str]): + model_type (str | Unset): """ - model_type: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + model_type: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: model_type = self.model_type - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if model_type is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_type = d.pop("modelType", UNSET) a_discriminated_union_type_1 = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return a_discriminated_union_type_1 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_2.py b/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_2.py index 734f3bef4..0797af899 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_2.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/a_discriminated_union_type_2.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class ADiscriminatedUnionType2: """ Attributes: - model_type (Union[Unset, str]): + model_type (str | Unset): """ - model_type: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + model_type: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: model_type = self.model_type - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if model_type is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_type = d.pop("modelType", UNSET) a_discriminated_union_type_2 = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return a_discriminated_union_type_2 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/a_form_data.py b/end_to_end_tests/golden-record/my_test_api_client/models/a_form_data.py index a4c5cd8a7..257054eb2 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/a_form_data.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/a_form_data.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -13,19 +16,19 @@ class AFormData: """ Attributes: an_required_field (str): - an_optional_field (Union[Unset, str]): + an_optional_field (str | Unset): """ an_required_field: str - an_optional_field: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + an_optional_field: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: an_required_field = self.an_required_field an_optional_field = self.an_optional_field - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -38,8 +41,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) an_required_field = d.pop("an_required_field") an_optional_field = d.pop("an_optional_field", UNSET) @@ -53,7 +56,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return a_form_data @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/a_model.py b/end_to_end_tests/golden-record/my_test_api_client/models/a_model.py index d14160bf8..b9c2c362b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/a_model.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/a_model.py @@ -1,5 +1,9 @@ +from __future__ import annotations + import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast +from uuid import UUID from attrs import define as _attrs_define from dateutil.parser import isoparse @@ -24,54 +28,60 @@ class AModel: Attributes: an_enum_value (AnEnum): For testing Enums in all the ways they can be used an_allof_enum_with_overridden_default (AnAllOfEnum): Default: AnAllOfEnum.OVERRIDDEN_DEFAULT. - a_camel_date_time (Union[datetime.date, datetime.datetime]): + a_camel_date_time (datetime.date | datetime.datetime): a_date (datetime.date): - a_nullable_date (Union[None, datetime.date]): - required_nullable (Union[None, str]): + a_nullable_date (datetime.date | None): + a_uuid (UUID): + a_nullable_uuid (None | UUID): Default: UUID('07EF8B4D-AA09-4FFA-898D-C710796AFF41'). + required_nullable (None | str): required_not_nullable (str): - one_of_models (Union['FreeFormModel', 'ModelWithUnionProperty', Any]): - nullable_one_of_models (Union['FreeFormModel', 'ModelWithUnionProperty', None]): + one_of_models (Any | FreeFormModel | ModelWithUnionProperty): + nullable_one_of_models (FreeFormModel | ModelWithUnionProperty | None): model (ModelWithUnionProperty): - nullable_model (Union['ModelWithUnionProperty', None]): - any_value (Union[Unset, Any]): - an_optional_allof_enum (Union[Unset, AnAllOfEnum]): - nested_list_of_enums (Union[Unset, List[List[DifferentEnum]]]): - a_not_required_date (Union[Unset, datetime.date]): - attr_1_leading_digit (Union[Unset, str]): - attr_leading_underscore (Union[Unset, str]): - not_required_nullable (Union[None, Unset, str]): - not_required_not_nullable (Union[Unset, str]): - not_required_one_of_models (Union['FreeFormModel', 'ModelWithUnionProperty', Unset]): - not_required_nullable_one_of_models (Union['FreeFormModel', 'ModelWithUnionProperty', None, Unset, str]): - not_required_model (Union[Unset, ModelWithUnionProperty]): - not_required_nullable_model (Union['ModelWithUnionProperty', None, Unset]): + nullable_model (ModelWithUnionProperty | None): + any_value (Any | Unset): Default: 'default'. + an_optional_allof_enum (AnAllOfEnum | Unset): + nested_list_of_enums (list[list[DifferentEnum]] | Unset): + a_not_required_date (datetime.date | Unset): + a_not_required_uuid (UUID | Unset): + attr_1_leading_digit (str | Unset): + attr_leading_underscore (str | Unset): + not_required_nullable (None | str | Unset): + not_required_not_nullable (str | Unset): + not_required_one_of_models (FreeFormModel | ModelWithUnionProperty | Unset): + not_required_nullable_one_of_models (FreeFormModel | ModelWithUnionProperty | None | str | Unset): + not_required_model (ModelWithUnionProperty | Unset): + not_required_nullable_model (ModelWithUnionProperty | None | Unset): """ an_enum_value: AnEnum - a_camel_date_time: Union[datetime.date, datetime.datetime] + a_camel_date_time: datetime.date | datetime.datetime a_date: datetime.date - a_nullable_date: Union[None, datetime.date] - required_nullable: Union[None, str] + a_nullable_date: datetime.date | None + a_uuid: UUID + required_nullable: None | str required_not_nullable: str - one_of_models: Union["FreeFormModel", "ModelWithUnionProperty", Any] - nullable_one_of_models: Union["FreeFormModel", "ModelWithUnionProperty", None] - model: "ModelWithUnionProperty" - nullable_model: Union["ModelWithUnionProperty", None] + one_of_models: Any | FreeFormModel | ModelWithUnionProperty + nullable_one_of_models: FreeFormModel | ModelWithUnionProperty | None + model: ModelWithUnionProperty + nullable_model: ModelWithUnionProperty | None an_allof_enum_with_overridden_default: AnAllOfEnum = AnAllOfEnum.OVERRIDDEN_DEFAULT - any_value: Union[Unset, Any] = UNSET - an_optional_allof_enum: Union[Unset, AnAllOfEnum] = UNSET - nested_list_of_enums: Union[Unset, List[List[DifferentEnum]]] = UNSET - a_not_required_date: Union[Unset, datetime.date] = UNSET - attr_1_leading_digit: Union[Unset, str] = UNSET - attr_leading_underscore: Union[Unset, str] = UNSET - not_required_nullable: Union[None, Unset, str] = UNSET - not_required_not_nullable: Union[Unset, str] = UNSET - not_required_one_of_models: Union["FreeFormModel", "ModelWithUnionProperty", Unset] = UNSET - not_required_nullable_one_of_models: Union["FreeFormModel", "ModelWithUnionProperty", None, Unset, str] = UNSET - not_required_model: Union[Unset, "ModelWithUnionProperty"] = UNSET - not_required_nullable_model: Union["ModelWithUnionProperty", None, Unset] = UNSET - - def to_dict(self) -> Dict[str, Any]: + a_nullable_uuid: None | UUID = UUID("07EF8B4D-AA09-4FFA-898D-C710796AFF41") + any_value: Any | Unset = "default" + an_optional_allof_enum: AnAllOfEnum | Unset = UNSET + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + a_not_required_date: datetime.date | Unset = UNSET + a_not_required_uuid: UUID | Unset = UNSET + attr_1_leading_digit: str | Unset = UNSET + attr_leading_underscore: str | Unset = UNSET + not_required_nullable: None | str | Unset = UNSET + not_required_not_nullable: str | Unset = UNSET + not_required_one_of_models: FreeFormModel | ModelWithUnionProperty | Unset = UNSET + not_required_nullable_one_of_models: FreeFormModel | ModelWithUnionProperty | None | str | Unset = UNSET + not_required_model: ModelWithUnionProperty | Unset = UNSET + not_required_nullable_model: ModelWithUnionProperty | None | Unset = UNSET + + def to_dict(self) -> dict[str, Any]: from ..models.free_form_model import FreeFormModel from ..models.model_with_union_property import ModelWithUnionProperty @@ -87,18 +97,26 @@ def to_dict(self) -> Dict[str, Any]: a_date = self.a_date.isoformat() - a_nullable_date: Union[None, str] + a_nullable_date: None | str if isinstance(self.a_nullable_date, datetime.date): a_nullable_date = self.a_nullable_date.isoformat() else: a_nullable_date = self.a_nullable_date - required_nullable: Union[None, str] + a_uuid = str(self.a_uuid) + + a_nullable_uuid: None | str + if isinstance(self.a_nullable_uuid, UUID): + a_nullable_uuid = str(self.a_nullable_uuid) + else: + a_nullable_uuid = self.a_nullable_uuid + + required_nullable: None | str required_nullable = self.required_nullable required_not_nullable = self.required_not_nullable - one_of_models: Union[Any, Dict[str, Any]] + one_of_models: Any | dict[str, Any] if isinstance(self.one_of_models, FreeFormModel): one_of_models = self.one_of_models.to_dict() elif isinstance(self.one_of_models, ModelWithUnionProperty): @@ -106,7 +124,7 @@ def to_dict(self) -> Dict[str, Any]: else: one_of_models = self.one_of_models - nullable_one_of_models: Union[Dict[str, Any], None] + nullable_one_of_models: dict[str, Any] | None if isinstance(self.nullable_one_of_models, FreeFormModel): nullable_one_of_models = self.nullable_one_of_models.to_dict() elif isinstance(self.nullable_one_of_models, ModelWithUnionProperty): @@ -116,7 +134,7 @@ def to_dict(self) -> Dict[str, Any]: model = self.model.to_dict() - nullable_model: Union[Dict[str, Any], None] + nullable_model: dict[str, Any] | None if isinstance(self.nullable_model, ModelWithUnionProperty): nullable_model = self.nullable_model.to_dict() else: @@ -124,11 +142,11 @@ def to_dict(self) -> Dict[str, Any]: any_value = self.any_value - an_optional_allof_enum: Union[Unset, str] = UNSET + an_optional_allof_enum: str | Unset = UNSET if not isinstance(self.an_optional_allof_enum, Unset): an_optional_allof_enum = self.an_optional_allof_enum.value - nested_list_of_enums: Union[Unset, List[List[str]]] = UNSET + nested_list_of_enums: list[list[str]] | Unset = UNSET if not isinstance(self.nested_list_of_enums, Unset): nested_list_of_enums = [] for nested_list_of_enums_item_data in self.nested_list_of_enums: @@ -139,15 +157,19 @@ def to_dict(self) -> Dict[str, Any]: nested_list_of_enums.append(nested_list_of_enums_item) - a_not_required_date: Union[Unset, str] = UNSET + a_not_required_date: str | Unset = UNSET if not isinstance(self.a_not_required_date, Unset): a_not_required_date = self.a_not_required_date.isoformat() + a_not_required_uuid: str | Unset = UNSET + if not isinstance(self.a_not_required_uuid, Unset): + a_not_required_uuid = str(self.a_not_required_uuid) + attr_1_leading_digit = self.attr_1_leading_digit attr_leading_underscore = self.attr_leading_underscore - not_required_nullable: Union[None, Unset, str] + not_required_nullable: None | str | Unset if isinstance(self.not_required_nullable, Unset): not_required_nullable = UNSET else: @@ -155,7 +177,7 @@ def to_dict(self) -> Dict[str, Any]: not_required_not_nullable = self.not_required_not_nullable - not_required_one_of_models: Union[Dict[str, Any], Unset] + not_required_one_of_models: dict[str, Any] | Unset if isinstance(self.not_required_one_of_models, Unset): not_required_one_of_models = UNSET elif isinstance(self.not_required_one_of_models, FreeFormModel): @@ -163,7 +185,7 @@ def to_dict(self) -> Dict[str, Any]: else: not_required_one_of_models = self.not_required_one_of_models.to_dict() - not_required_nullable_one_of_models: Union[Dict[str, Any], None, Unset, str] + not_required_nullable_one_of_models: dict[str, Any] | None | str | Unset if isinstance(self.not_required_nullable_one_of_models, Unset): not_required_nullable_one_of_models = UNSET elif isinstance(self.not_required_nullable_one_of_models, FreeFormModel): @@ -173,11 +195,11 @@ def to_dict(self) -> Dict[str, Any]: else: not_required_nullable_one_of_models = self.not_required_nullable_one_of_models - not_required_model: Union[Unset, Dict[str, Any]] = UNSET + not_required_model: dict[str, Any] | Unset = UNSET if not isinstance(self.not_required_model, Unset): not_required_model = self.not_required_model.to_dict() - not_required_nullable_model: Union[Dict[str, Any], None, Unset] + not_required_nullable_model: dict[str, Any] | None | Unset if isinstance(self.not_required_nullable_model, Unset): not_required_nullable_model = UNSET elif isinstance(self.not_required_nullable_model, ModelWithUnionProperty): @@ -185,7 +207,8 @@ def to_dict(self) -> Dict[str, Any]: else: not_required_nullable_model = self.not_required_nullable_model - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update( { "an_enum_value": an_enum_value, @@ -193,6 +216,8 @@ def to_dict(self) -> Dict[str, Any]: "aCamelDateTime": a_camel_date_time, "a_date": a_date, "a_nullable_date": a_nullable_date, + "a_uuid": a_uuid, + "a_nullable_uuid": a_nullable_uuid, "required_nullable": required_nullable, "required_not_nullable": required_not_nullable, "one_of_models": one_of_models, @@ -209,6 +234,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["nested_list_of_enums"] = nested_list_of_enums if a_not_required_date is not UNSET: field_dict["a_not_required_date"] = a_not_required_date + if a_not_required_uuid is not UNSET: + field_dict["a_not_required_uuid"] = a_not_required_uuid if attr_1_leading_digit is not UNSET: field_dict["1_leading_digit"] = attr_1_leading_digit if attr_leading_underscore is not UNSET: @@ -229,23 +256,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.free_form_model import FreeFormModel from ..models.model_with_union_property import ModelWithUnionProperty - d = src_dict.copy() + d = dict(src_dict) an_enum_value = AnEnum(d.pop("an_enum_value")) an_allof_enum_with_overridden_default = AnAllOfEnum(d.pop("an_allof_enum_with_overridden_default")) - def _parse_a_camel_date_time(data: object) -> Union[datetime.date, datetime.datetime]: + def _parse_a_camel_date_time(data: object) -> datetime.date | datetime.datetime: try: if not isinstance(data, str): raise TypeError() a_camel_date_time_type_0 = isoparse(data) return a_camel_date_time_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass if not isinstance(data, str): raise TypeError() @@ -257,7 +284,7 @@ def _parse_a_camel_date_time(data: object) -> Union[datetime.date, datetime.date a_date = isoparse(d.pop("a_date")).date() - def _parse_a_nullable_date(data: object) -> Union[None, datetime.date]: + def _parse_a_nullable_date(data: object) -> datetime.date | None: if data is None: return data try: @@ -266,29 +293,46 @@ def _parse_a_nullable_date(data: object) -> Union[None, datetime.date]: a_nullable_date_type_0 = isoparse(data).date() return a_nullable_date_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[None, datetime.date], data) + return cast(datetime.date | None, data) a_nullable_date = _parse_a_nullable_date(d.pop("a_nullable_date")) - def _parse_required_nullable(data: object) -> Union[None, str]: + a_uuid = UUID(d.pop("a_uuid")) + + def _parse_a_nullable_uuid(data: object) -> None | UUID: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + a_nullable_uuid_type_0 = UUID(data) + + return a_nullable_uuid_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | UUID, data) + + a_nullable_uuid = _parse_a_nullable_uuid(d.pop("a_nullable_uuid")) + + def _parse_required_nullable(data: object) -> None | str: if data is None: return data - return cast(Union[None, str], data) + return cast(None | str, data) required_nullable = _parse_required_nullable(d.pop("required_nullable")) required_not_nullable = d.pop("required_not_nullable") - def _parse_one_of_models(data: object) -> Union["FreeFormModel", "ModelWithUnionProperty", Any]: + def _parse_one_of_models(data: object) -> Any | FreeFormModel | ModelWithUnionProperty: try: if not isinstance(data, dict): raise TypeError() one_of_models_type_0 = FreeFormModel.from_dict(data) return one_of_models_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass try: if not isinstance(data, dict): @@ -296,13 +340,13 @@ def _parse_one_of_models(data: object) -> Union["FreeFormModel", "ModelWithUnion one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) return one_of_models_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["FreeFormModel", "ModelWithUnionProperty", Any], data) + return cast(Any | FreeFormModel | ModelWithUnionProperty, data) one_of_models = _parse_one_of_models(d.pop("one_of_models")) - def _parse_nullable_one_of_models(data: object) -> Union["FreeFormModel", "ModelWithUnionProperty", None]: + def _parse_nullable_one_of_models(data: object) -> FreeFormModel | ModelWithUnionProperty | None: if data is None: return data try: @@ -311,7 +355,7 @@ def _parse_nullable_one_of_models(data: object) -> Union["FreeFormModel", "Model nullable_one_of_models_type_0 = FreeFormModel.from_dict(data) return nullable_one_of_models_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass try: if not isinstance(data, dict): @@ -319,15 +363,15 @@ def _parse_nullable_one_of_models(data: object) -> Union["FreeFormModel", "Model nullable_one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) return nullable_one_of_models_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["FreeFormModel", "ModelWithUnionProperty", None], data) + return cast(FreeFormModel | ModelWithUnionProperty | None, data) nullable_one_of_models = _parse_nullable_one_of_models(d.pop("nullable_one_of_models")) model = ModelWithUnionProperty.from_dict(d.pop("model")) - def _parse_nullable_model(data: object) -> Union["ModelWithUnionProperty", None]: + def _parse_nullable_model(data: object) -> ModelWithUnionProperty | None: if data is None: return data try: @@ -336,56 +380,65 @@ def _parse_nullable_model(data: object) -> Union["ModelWithUnionProperty", None] nullable_model_type_1 = ModelWithUnionProperty.from_dict(data) return nullable_model_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["ModelWithUnionProperty", None], data) + return cast(ModelWithUnionProperty | None, data) nullable_model = _parse_nullable_model(d.pop("nullable_model")) any_value = d.pop("any_value", UNSET) _an_optional_allof_enum = d.pop("an_optional_allof_enum", UNSET) - an_optional_allof_enum: Union[Unset, AnAllOfEnum] + an_optional_allof_enum: AnAllOfEnum | Unset if isinstance(_an_optional_allof_enum, Unset): an_optional_allof_enum = UNSET else: an_optional_allof_enum = AnAllOfEnum(_an_optional_allof_enum) - nested_list_of_enums = [] _nested_list_of_enums = d.pop("nested_list_of_enums", UNSET) - for nested_list_of_enums_item_data in _nested_list_of_enums or []: - nested_list_of_enums_item = [] - _nested_list_of_enums_item = nested_list_of_enums_item_data - for nested_list_of_enums_item_item_data in _nested_list_of_enums_item: - nested_list_of_enums_item_item = DifferentEnum(nested_list_of_enums_item_item_data) + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + if _nested_list_of_enums is not UNSET: + nested_list_of_enums = [] + for nested_list_of_enums_item_data in _nested_list_of_enums: + nested_list_of_enums_item = [] + _nested_list_of_enums_item = nested_list_of_enums_item_data + for nested_list_of_enums_item_item_data in _nested_list_of_enums_item: + nested_list_of_enums_item_item = DifferentEnum(nested_list_of_enums_item_item_data) - nested_list_of_enums_item.append(nested_list_of_enums_item_item) + nested_list_of_enums_item.append(nested_list_of_enums_item_item) - nested_list_of_enums.append(nested_list_of_enums_item) + nested_list_of_enums.append(nested_list_of_enums_item) _a_not_required_date = d.pop("a_not_required_date", UNSET) - a_not_required_date: Union[Unset, datetime.date] + a_not_required_date: datetime.date | Unset if isinstance(_a_not_required_date, Unset): a_not_required_date = UNSET else: a_not_required_date = isoparse(_a_not_required_date).date() + _a_not_required_uuid = d.pop("a_not_required_uuid", UNSET) + a_not_required_uuid: UUID | Unset + if isinstance(_a_not_required_uuid, Unset): + a_not_required_uuid = UNSET + else: + a_not_required_uuid = UUID(_a_not_required_uuid) + attr_1_leading_digit = d.pop("1_leading_digit", UNSET) attr_leading_underscore = d.pop("_leading_underscore", UNSET) - def _parse_not_required_nullable(data: object) -> Union[None, Unset, str]: + def _parse_not_required_nullable(data: object) -> None | str | Unset: if data is None: return data if isinstance(data, Unset): return data - return cast(Union[None, Unset, str], data) + return cast(None | str | Unset, data) not_required_nullable = _parse_not_required_nullable(d.pop("not_required_nullable", UNSET)) not_required_not_nullable = d.pop("not_required_not_nullable", UNSET) - def _parse_not_required_one_of_models(data: object) -> Union["FreeFormModel", "ModelWithUnionProperty", Unset]: + def _parse_not_required_one_of_models(data: object) -> FreeFormModel | ModelWithUnionProperty | Unset: if isinstance(data, Unset): return data try: @@ -394,7 +447,7 @@ def _parse_not_required_one_of_models(data: object) -> Union["FreeFormModel", "M not_required_one_of_models_type_0 = FreeFormModel.from_dict(data) return not_required_one_of_models_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass if not isinstance(data, dict): raise TypeError() @@ -406,7 +459,7 @@ def _parse_not_required_one_of_models(data: object) -> Union["FreeFormModel", "M def _parse_not_required_nullable_one_of_models( data: object, - ) -> Union["FreeFormModel", "ModelWithUnionProperty", None, Unset, str]: + ) -> FreeFormModel | ModelWithUnionProperty | None | str | Unset: if data is None: return data if isinstance(data, Unset): @@ -417,7 +470,7 @@ def _parse_not_required_nullable_one_of_models( not_required_nullable_one_of_models_type_0 = FreeFormModel.from_dict(data) return not_required_nullable_one_of_models_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass try: if not isinstance(data, dict): @@ -425,22 +478,22 @@ def _parse_not_required_nullable_one_of_models( not_required_nullable_one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) return not_required_nullable_one_of_models_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["FreeFormModel", "ModelWithUnionProperty", None, Unset, str], data) + return cast(FreeFormModel | ModelWithUnionProperty | None | str | Unset, data) not_required_nullable_one_of_models = _parse_not_required_nullable_one_of_models( d.pop("not_required_nullable_one_of_models", UNSET) ) _not_required_model = d.pop("not_required_model", UNSET) - not_required_model: Union[Unset, ModelWithUnionProperty] + not_required_model: ModelWithUnionProperty | Unset if isinstance(_not_required_model, Unset): not_required_model = UNSET else: not_required_model = ModelWithUnionProperty.from_dict(_not_required_model) - def _parse_not_required_nullable_model(data: object) -> Union["ModelWithUnionProperty", None, Unset]: + def _parse_not_required_nullable_model(data: object) -> ModelWithUnionProperty | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -451,9 +504,9 @@ def _parse_not_required_nullable_model(data: object) -> Union["ModelWithUnionPro not_required_nullable_model_type_1 = ModelWithUnionProperty.from_dict(data) return not_required_nullable_model_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["ModelWithUnionProperty", None, Unset], data) + return cast(ModelWithUnionProperty | None | Unset, data) not_required_nullable_model = _parse_not_required_nullable_model(d.pop("not_required_nullable_model", UNSET)) @@ -463,6 +516,8 @@ def _parse_not_required_nullable_model(data: object) -> Union["ModelWithUnionPro a_camel_date_time=a_camel_date_time, a_date=a_date, a_nullable_date=a_nullable_date, + a_uuid=a_uuid, + a_nullable_uuid=a_nullable_uuid, required_nullable=required_nullable, required_not_nullable=required_not_nullable, one_of_models=one_of_models, @@ -473,6 +528,7 @@ def _parse_not_required_nullable_model(data: object) -> Union["ModelWithUnionPro an_optional_allof_enum=an_optional_allof_enum, nested_list_of_enums=nested_list_of_enums, a_not_required_date=a_not_required_date, + a_not_required_uuid=a_not_required_uuid, attr_1_leading_digit=attr_1_leading_digit, attr_leading_underscore=attr_leading_underscore, not_required_nullable=not_required_nullable, diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/a_model_with_properties_reference_that_are_not_object.py b/end_to_end_tests/golden-record/my_test_api_client/models/a_model_with_properties_reference_that_are_not_object.py index 88ffd349f..b44dfc975 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/a_model_with_properties_reference_that_are_not_object.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/a_model_with_properties_reference_that_are_not_object.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import datetime +from collections.abc import Mapping from io import BytesIO -from typing import Any, Dict, List, Type, TypeVar, cast +from typing import Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,26 +19,26 @@ class AModelWithPropertiesReferenceThatAreNotObject: """ Attributes: - enum_properties_ref (List[AnEnum]): - str_properties_ref (List[str]): - date_properties_ref (List[datetime.date]): - datetime_properties_ref (List[datetime.datetime]): - int32_properties_ref (List[int]): - int64_properties_ref (List[int]): - float_properties_ref (List[float]): - double_properties_ref (List[float]): - file_properties_ref (List[File]): - bytestream_properties_ref (List[str]): - enum_properties (List[AnEnum]): - str_properties (List[str]): - date_properties (List[datetime.date]): - datetime_properties (List[datetime.datetime]): - int32_properties (List[int]): - int64_properties (List[int]): - float_properties (List[float]): - double_properties (List[float]): - file_properties (List[File]): - bytestream_properties (List[str]): + enum_properties_ref (list[AnEnum]): + str_properties_ref (list[str]): + date_properties_ref (list[datetime.date]): + datetime_properties_ref (list[datetime.datetime]): + int32_properties_ref (list[int]): + int64_properties_ref (list[int]): + float_properties_ref (list[float]): + double_properties_ref (list[float]): + file_properties_ref (list[File]): + bytestream_properties_ref (list[str]): + enum_properties (list[AnEnum]): + str_properties (list[str]): + date_properties (list[datetime.date]): + datetime_properties (list[datetime.datetime]): + int32_properties (list[int]): + int64_properties (list[int]): + float_properties (list[float]): + double_properties (list[float]): + file_properties (list[File]): + bytestream_properties (list[str]): enum_property_ref (AnEnum): For testing Enums in all the ways they can be used str_property_ref (str): date_property_ref (datetime.date): @@ -48,26 +51,26 @@ class AModelWithPropertiesReferenceThatAreNotObject: bytestream_property_ref (str): """ - enum_properties_ref: List[AnEnum] - str_properties_ref: List[str] - date_properties_ref: List[datetime.date] - datetime_properties_ref: List[datetime.datetime] - int32_properties_ref: List[int] - int64_properties_ref: List[int] - float_properties_ref: List[float] - double_properties_ref: List[float] - file_properties_ref: List[File] - bytestream_properties_ref: List[str] - enum_properties: List[AnEnum] - str_properties: List[str] - date_properties: List[datetime.date] - datetime_properties: List[datetime.datetime] - int32_properties: List[int] - int64_properties: List[int] - float_properties: List[float] - double_properties: List[float] - file_properties: List[File] - bytestream_properties: List[str] + enum_properties_ref: list[AnEnum] + str_properties_ref: list[str] + date_properties_ref: list[datetime.date] + datetime_properties_ref: list[datetime.datetime] + int32_properties_ref: list[int] + int64_properties_ref: list[int] + float_properties_ref: list[float] + double_properties_ref: list[float] + file_properties_ref: list[File] + bytestream_properties_ref: list[str] + enum_properties: list[AnEnum] + str_properties: list[str] + date_properties: list[datetime.date] + datetime_properties: list[datetime.datetime] + int32_properties: list[int] + int64_properties: list[int] + float_properties: list[float] + double_properties: list[float] + file_properties: list[File] + bytestream_properties: list[str] enum_property_ref: AnEnum str_property_ref: str date_property_ref: datetime.date @@ -78,9 +81,9 @@ class AModelWithPropertiesReferenceThatAreNotObject: double_property_ref: float file_property_ref: File bytestream_property_ref: str - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: enum_properties_ref = [] for componentsschemas_an_other_array_of_enum_item_data in self.enum_properties_ref: componentsschemas_an_other_array_of_enum_item = componentsschemas_an_other_array_of_enum_item_data.value @@ -173,7 +176,7 @@ def to_dict(self) -> Dict[str, Any]: bytestream_property_ref = self.bytestream_property_ref - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -213,8 +216,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) enum_properties_ref = [] _enum_properties_ref = d.pop("enum_properties_ref") for componentsschemas_an_other_array_of_enum_item_data in _enum_properties_ref: @@ -222,7 +225,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: enum_properties_ref.append(componentsschemas_an_other_array_of_enum_item) - str_properties_ref = cast(List[str], d.pop("str_properties_ref")) + str_properties_ref = cast(list[str], d.pop("str_properties_ref")) date_properties_ref = [] _date_properties_ref = d.pop("date_properties_ref") @@ -242,13 +245,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: datetime_properties_ref.append(componentsschemas_an_other_array_of_date_time_item) - int32_properties_ref = cast(List[int], d.pop("int32_properties_ref")) + int32_properties_ref = cast(list[int], d.pop("int32_properties_ref")) - int64_properties_ref = cast(List[int], d.pop("int64_properties_ref")) + int64_properties_ref = cast(list[int], d.pop("int64_properties_ref")) - float_properties_ref = cast(List[float], d.pop("float_properties_ref")) + float_properties_ref = cast(list[float], d.pop("float_properties_ref")) - double_properties_ref = cast(List[float], d.pop("double_properties_ref")) + double_properties_ref = cast(list[float], d.pop("double_properties_ref")) file_properties_ref = [] _file_properties_ref = d.pop("file_properties_ref") @@ -259,7 +262,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: file_properties_ref.append(componentsschemas_an_other_array_of_file_item) - bytestream_properties_ref = cast(List[str], d.pop("bytestream_properties_ref")) + bytestream_properties_ref = cast(list[str], d.pop("bytestream_properties_ref")) enum_properties = [] _enum_properties = d.pop("enum_properties") @@ -268,7 +271,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: enum_properties.append(componentsschemas_an_array_of_enum_item) - str_properties = cast(List[str], d.pop("str_properties")) + str_properties = cast(list[str], d.pop("str_properties")) date_properties = [] _date_properties = d.pop("date_properties") @@ -284,13 +287,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: datetime_properties.append(componentsschemas_an_array_of_date_time_item) - int32_properties = cast(List[int], d.pop("int32_properties")) + int32_properties = cast(list[int], d.pop("int32_properties")) - int64_properties = cast(List[int], d.pop("int64_properties")) + int64_properties = cast(list[int], d.pop("int64_properties")) - float_properties = cast(List[float], d.pop("float_properties")) + float_properties = cast(list[float], d.pop("float_properties")) - double_properties = cast(List[float], d.pop("double_properties")) + double_properties = cast(list[float], d.pop("double_properties")) file_properties = [] _file_properties = d.pop("file_properties") @@ -301,7 +304,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: file_properties.append(componentsschemas_an_array_of_file_item) - bytestream_properties = cast(List[str], d.pop("bytestream_properties")) + bytestream_properties = cast(list[str], d.pop("bytestream_properties")) enum_property_ref = AnEnum(d.pop("enum_property_ref")) @@ -360,7 +363,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return a_model_with_properties_reference_that_are_not_object @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_has_properties_but_no_type.py b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_has_properties_but_no_type.py index 245a1b04a..3295dcf26 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_has_properties_but_no_type.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_has_properties_but_no_type.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -13,46 +16,46 @@ class AllOfHasPropertiesButNoType: """ Attributes: - a_sub_property (Union[Unset, str]): - type (Union[Unset, str]): - type_enum (Union[Unset, AllOfHasPropertiesButNoTypeTypeEnum]): + a_sub_property (str | Unset): + type_ (str | Unset): + type_enum (AllOfHasPropertiesButNoTypeTypeEnum | Unset): """ - a_sub_property: Union[Unset, str] = UNSET - type: Union[Unset, str] = UNSET - type_enum: Union[Unset, AllOfHasPropertiesButNoTypeTypeEnum] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a_sub_property: str | Unset = UNSET + type_: str | Unset = UNSET + type_enum: AllOfHasPropertiesButNoTypeTypeEnum | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_sub_property = self.a_sub_property - type = self.type + type_ = self.type_ - type_enum: Union[Unset, int] = UNSET + type_enum: int | Unset = UNSET if not isinstance(self.type_enum, Unset): type_enum = self.type_enum.value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a_sub_property is not UNSET: field_dict["a_sub_property"] = a_sub_property - if type is not UNSET: - field_dict["type"] = type + if type_ is not UNSET: + field_dict["type"] = type_ if type_enum is not UNSET: field_dict["type_enum"] = type_enum return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_sub_property = d.pop("a_sub_property", UNSET) - type = d.pop("type", UNSET) + type_ = d.pop("type", UNSET) _type_enum = d.pop("type_enum", UNSET) - type_enum: Union[Unset, AllOfHasPropertiesButNoTypeTypeEnum] + type_enum: AllOfHasPropertiesButNoTypeTypeEnum | Unset if isinstance(_type_enum, Unset): type_enum = UNSET else: @@ -60,7 +63,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: all_of_has_properties_but_no_type = cls( a_sub_property=a_sub_property, - type=type, + type_=type_, type_enum=type_enum, ) @@ -68,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return all_of_has_properties_but_no_type @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_base.py b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_base.py new file mode 100644 index 000000000..b06f45c30 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_base.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="AllOfRequiredBase") + + +@_attrs_define +class AllOfRequiredBase: + """ + Attributes: + bar (str | Unset): The bar property + baz (str | Unset): The baz property + """ + + bar: str | Unset = UNSET + baz: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + bar = self.bar + + baz = self.baz + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if bar is not UNSET: + field_dict["bar"] = bar + if baz is not UNSET: + field_dict["baz"] = baz + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + bar = d.pop("bar", UNSET) + + baz = d.pop("baz", UNSET) + + all_of_required_base = cls( + bar=bar, + baz=baz, + ) + + all_of_required_base.additional_properties = d + return all_of_required_base + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_derived.py b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_derived.py new file mode 100644 index 000000000..505334d6b --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_required_derived.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="AllOfRequiredDerived") + + +@_attrs_define +class AllOfRequiredDerived: + """ + Attributes: + bar (str): The bar property + baz (str | Unset): The baz property + """ + + bar: str + baz: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + bar = self.bar + + baz = self.baz + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "bar": bar, + } + ) + if baz is not UNSET: + field_dict["baz"] = baz + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + bar = d.pop("bar") + + baz = d.pop("baz", UNSET) + + all_of_required_derived = cls( + bar=bar, + baz=baz, + ) + + all_of_required_derived.additional_properties = d + return all_of_required_derived + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_sub_model.py b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_sub_model.py index 550b9b9c4..65b6f97c6 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/all_of_sub_model.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/all_of_sub_model.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -13,46 +16,46 @@ class AllOfSubModel: """ Attributes: - a_sub_property (Union[Unset, str]): - type (Union[Unset, str]): - type_enum (Union[Unset, AllOfSubModelTypeEnum]): + a_sub_property (str | Unset): + type_ (str | Unset): + type_enum (AllOfSubModelTypeEnum | Unset): """ - a_sub_property: Union[Unset, str] = UNSET - type: Union[Unset, str] = UNSET - type_enum: Union[Unset, AllOfSubModelTypeEnum] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a_sub_property: str | Unset = UNSET + type_: str | Unset = UNSET + type_enum: AllOfSubModelTypeEnum | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_sub_property = self.a_sub_property - type = self.type + type_ = self.type_ - type_enum: Union[Unset, int] = UNSET + type_enum: int | Unset = UNSET if not isinstance(self.type_enum, Unset): type_enum = self.type_enum.value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a_sub_property is not UNSET: field_dict["a_sub_property"] = a_sub_property - if type is not UNSET: - field_dict["type"] = type + if type_ is not UNSET: + field_dict["type"] = type_ if type_enum is not UNSET: field_dict["type_enum"] = type_enum return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_sub_property = d.pop("a_sub_property", UNSET) - type = d.pop("type", UNSET) + type_ = d.pop("type", UNSET) _type_enum = d.pop("type_enum", UNSET) - type_enum: Union[Unset, AllOfSubModelTypeEnum] + type_enum: AllOfSubModelTypeEnum | Unset if isinstance(_type_enum, Unset): type_enum = UNSET else: @@ -60,7 +63,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: all_of_sub_model = cls( a_sub_property=a_sub_property, - type=type, + type_=type_, type_enum=type_enum, ) @@ -68,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return all_of_sub_model @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_a_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_a_item.py index b7792fefc..8750fb194 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_a_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_a_item.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,14 +19,14 @@ class AnArrayWithACircularRefInItemsObjectAItem: """ Attributes: - circular (Union[Unset, List['AnArrayWithACircularRefInItemsObjectBItem']]): + circular (list[AnArrayWithACircularRefInItemsObjectBItem] | Unset): """ - circular: Union[Unset, List["AnArrayWithACircularRefInItemsObjectBItem"]] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + circular: list[AnArrayWithACircularRefInItemsObjectBItem] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - circular: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + circular: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.circular, Unset): circular = [] for componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item_data in self.circular: @@ -32,7 +35,7 @@ def to_dict(self) -> Dict[str, Any]: ) circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if circular is not UNSET: @@ -41,22 +44,24 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.an_array_with_a_circular_ref_in_items_object_b_item import ( AnArrayWithACircularRefInItemsObjectBItem, ) - d = src_dict.copy() - circular = [] + d = dict(src_dict) _circular = d.pop("circular", UNSET) - for componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item_data in _circular or []: - componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item = ( - AnArrayWithACircularRefInItemsObjectBItem.from_dict( - componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item_data + circular: list[AnArrayWithACircularRefInItemsObjectBItem] | Unset = UNSET + if _circular is not UNSET: + circular = [] + for componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item_data in _circular: + componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item = ( + AnArrayWithACircularRefInItemsObjectBItem.from_dict( + componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item_data + ) ) - ) - circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item) + circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_b_item) an_array_with_a_circular_ref_in_items_object_a_item = cls( circular=circular, @@ -66,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_circular_ref_in_items_object_a_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_a_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_a_item.py index c505553b6..06ce50b1e 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_a_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_a_item.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,12 +19,12 @@ class AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem: """ """ - additional_properties: Dict[str, List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem"]] = ( + additional_properties: dict[str, list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem]] = ( _attrs_field(init=False, factory=dict) ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = [] for ( @@ -35,12 +38,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.an_array_with_a_circular_ref_in_items_object_additional_properties_b_item import ( AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem, ) - d = src_dict.copy() + d = dict(src_dict) an_array_with_a_circular_ref_in_items_object_additional_properties_a_item = cls() additional_properties = {} @@ -68,15 +71,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_circular_ref_in_items_object_additional_properties_a_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem"]: + def __getitem__(self, key: str) -> list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem]: return self.additional_properties[key] - def __setitem__( - self, key: str, value: List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem"] - ) -> None: + def __setitem__(self, key: str, value: list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem]) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_b_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_b_item.py index 9d2dc9827..689ac0127 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_b_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_additional_properties_b_item.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,12 +19,12 @@ class AnArrayWithACircularRefInItemsObjectAdditionalPropertiesBItem: """ """ - additional_properties: Dict[str, List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem"]] = ( + additional_properties: dict[str, list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem]] = ( _attrs_field(init=False, factory=dict) ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = [] for ( @@ -35,12 +38,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.an_array_with_a_circular_ref_in_items_object_additional_properties_a_item import ( AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem, ) - d = src_dict.copy() + d = dict(src_dict) an_array_with_a_circular_ref_in_items_object_additional_properties_b_item = cls() additional_properties = {} @@ -68,15 +71,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_circular_ref_in_items_object_additional_properties_b_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem"]: + def __getitem__(self, key: str) -> list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem]: return self.additional_properties[key] - def __setitem__( - self, key: str, value: List["AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem"] - ) -> None: + def __setitem__(self, key: str, value: list[AnArrayWithACircularRefInItemsObjectAdditionalPropertiesAItem]) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_b_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_b_item.py index 622d5d999..0d4f07ece 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_b_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_circular_ref_in_items_object_b_item.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,14 +19,14 @@ class AnArrayWithACircularRefInItemsObjectBItem: """ Attributes: - circular (Union[Unset, List['AnArrayWithACircularRefInItemsObjectAItem']]): + circular (list[AnArrayWithACircularRefInItemsObjectAItem] | Unset): """ - circular: Union[Unset, List["AnArrayWithACircularRefInItemsObjectAItem"]] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + circular: list[AnArrayWithACircularRefInItemsObjectAItem] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - circular: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + circular: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.circular, Unset): circular = [] for componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item_data in self.circular: @@ -32,7 +35,7 @@ def to_dict(self) -> Dict[str, Any]: ) circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if circular is not UNSET: @@ -41,22 +44,24 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.an_array_with_a_circular_ref_in_items_object_a_item import ( AnArrayWithACircularRefInItemsObjectAItem, ) - d = src_dict.copy() - circular = [] + d = dict(src_dict) _circular = d.pop("circular", UNSET) - for componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item_data in _circular or []: - componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item = ( - AnArrayWithACircularRefInItemsObjectAItem.from_dict( - componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item_data + circular: list[AnArrayWithACircularRefInItemsObjectAItem] | Unset = UNSET + if _circular is not UNSET: + circular = [] + for componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item_data in _circular: + componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item = ( + AnArrayWithACircularRefInItemsObjectAItem.from_dict( + componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item_data + ) ) - ) - circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item) + circular.append(componentsschemas_an_array_with_a_circular_ref_in_items_object_a_item) an_array_with_a_circular_ref_in_items_object_b_item = cls( circular=circular, @@ -66,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_circular_ref_in_items_object_b_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_additional_properties_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_additional_properties_item.py index e19cfc052..10b6e6c93 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_additional_properties_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_additional_properties_item.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,12 +13,12 @@ class AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem: """ """ - additional_properties: Dict[str, List["AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem"]] = ( + additional_properties: dict[str, list[AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem]] = ( _attrs_field(init=False, factory=dict) ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = [] for componentsschemas_an_array_with_a_recursive_ref_in_items_object_additional_properties_item_data in prop: @@ -27,8 +30,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) an_array_with_a_recursive_ref_in_items_object_additional_properties_item = cls() additional_properties = {} @@ -56,15 +59,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_recursive_ref_in_items_object_additional_properties_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> List["AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem"]: + def __getitem__(self, key: str) -> list[AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem]: return self.additional_properties[key] - def __setitem__( - self, key: str, value: List["AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem"] - ) -> None: + def __setitem__(self, key: str, value: list[AnArrayWithARecursiveRefInItemsObjectAdditionalPropertiesItem]) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_item.py b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_item.py index 6b12b9b5d..581a9f622 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_item.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/an_array_with_a_recursive_ref_in_items_object_item.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,14 +15,14 @@ class AnArrayWithARecursiveRefInItemsObjectItem: """ Attributes: - recursive (Union[Unset, List['AnArrayWithARecursiveRefInItemsObjectItem']]): + recursive (list[AnArrayWithARecursiveRefInItemsObjectItem] | Unset): """ - recursive: Union[Unset, List["AnArrayWithARecursiveRefInItemsObjectItem"]] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + recursive: list[AnArrayWithARecursiveRefInItemsObjectItem] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - recursive: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + recursive: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.recursive, Unset): recursive = [] for componentsschemas_an_array_with_a_recursive_ref_in_items_object_item_data in self.recursive: @@ -28,7 +31,7 @@ def to_dict(self) -> Dict[str, Any]: ) recursive.append(componentsschemas_an_array_with_a_recursive_ref_in_items_object_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if recursive is not UNSET: @@ -37,18 +40,20 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() - recursive = [] + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) _recursive = d.pop("recursive", UNSET) - for componentsschemas_an_array_with_a_recursive_ref_in_items_object_item_data in _recursive or []: - componentsschemas_an_array_with_a_recursive_ref_in_items_object_item = ( - AnArrayWithARecursiveRefInItemsObjectItem.from_dict( - componentsschemas_an_array_with_a_recursive_ref_in_items_object_item_data + recursive: list[AnArrayWithARecursiveRefInItemsObjectItem] | Unset = UNSET + if _recursive is not UNSET: + recursive = [] + for componentsschemas_an_array_with_a_recursive_ref_in_items_object_item_data in _recursive: + componentsschemas_an_array_with_a_recursive_ref_in_items_object_item = ( + AnArrayWithARecursiveRefInItemsObjectItem.from_dict( + componentsschemas_an_array_with_a_recursive_ref_in_items_object_item_data + ) ) - ) - recursive.append(componentsschemas_an_array_with_a_recursive_ref_in_items_object_item) + recursive.append(componentsschemas_an_array_with_a_recursive_ref_in_items_object_item) an_array_with_a_recursive_ref_in_items_object_item = cls( recursive=recursive, @@ -58,7 +63,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return an_array_with_a_recursive_ref_in_items_object_item @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/another_all_of_sub_model.py b/end_to_end_tests/golden-record/my_test_api_client/models/another_all_of_sub_model.py index fde2bb6f8..7c58d1a9c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/another_all_of_sub_model.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/another_all_of_sub_model.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,53 +17,53 @@ class AnotherAllOfSubModel: """ Attributes: - another_sub_property (Union[Unset, str]): - type (Union[Unset, AnotherAllOfSubModelType]): - type_enum (Union[Unset, AnotherAllOfSubModelTypeEnum]): + another_sub_property (str | Unset): + type_ (AnotherAllOfSubModelType | Unset): + type_enum (AnotherAllOfSubModelTypeEnum | Unset): """ - another_sub_property: Union[Unset, str] = UNSET - type: Union[Unset, AnotherAllOfSubModelType] = UNSET - type_enum: Union[Unset, AnotherAllOfSubModelTypeEnum] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + another_sub_property: str | Unset = UNSET + type_: AnotherAllOfSubModelType | Unset = UNSET + type_enum: AnotherAllOfSubModelTypeEnum | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: another_sub_property = self.another_sub_property - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value + type_: str | Unset = UNSET + if not isinstance(self.type_, Unset): + type_ = self.type_.value - type_enum: Union[Unset, int] = UNSET + type_enum: int | Unset = UNSET if not isinstance(self.type_enum, Unset): type_enum = self.type_enum.value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if another_sub_property is not UNSET: field_dict["another_sub_property"] = another_sub_property - if type is not UNSET: - field_dict["type"] = type + if type_ is not UNSET: + field_dict["type"] = type_ if type_enum is not UNSET: field_dict["type_enum"] = type_enum return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) another_sub_property = d.pop("another_sub_property", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, AnotherAllOfSubModelType] - if isinstance(_type, Unset): - type = UNSET + _type_ = d.pop("type", UNSET) + type_: AnotherAllOfSubModelType | Unset + if isinstance(_type_, Unset): + type_ = UNSET else: - type = AnotherAllOfSubModelType(_type) + type_ = AnotherAllOfSubModelType(_type_) _type_enum = d.pop("type_enum", UNSET) - type_enum: Union[Unset, AnotherAllOfSubModelTypeEnum] + type_enum: AnotherAllOfSubModelTypeEnum | Unset if isinstance(_type_enum, Unset): type_enum = UNSET else: @@ -68,7 +71,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: another_all_of_sub_model = cls( another_sub_property=another_sub_property, - type=type, + type_=type_, type_enum=type_enum, ) @@ -76,7 +79,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return another_all_of_sub_model @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py index 20d27d4a6..628ed2ccd 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post.py @@ -1,14 +1,18 @@ +from __future__ import annotations + import datetime import json +from collections.abc import Mapping from io import BytesIO -from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field from dateutil.parser import isoparse +from .. import types from ..models.different_enum import DifferentEnum -from ..types import UNSET, File, FileJsonType, Unset +from ..types import UNSET, File, FileTypes, Unset if TYPE_CHECKING: from ..models.a_form_data import AFormData @@ -32,65 +36,87 @@ class BodyUploadFileTestsUploadPost: """ Attributes: some_file (File): + some_required_number (float): some_object (BodyUploadFileTestsUploadPostSomeObject): - some_nullable_object (Union['BodyUploadFileTestsUploadPostSomeNullableObject', None]): - some_optional_file (Union[Unset, File]): - some_string (Union[Unset, str]): Default: 'some_default_string'. - a_datetime (Union[Unset, datetime.datetime]): - a_date (Union[Unset, datetime.date]): - some_number (Union[Unset, float]): - some_array (Union[List['AFormData'], None, Unset]): - some_optional_object (Union[Unset, BodyUploadFileTestsUploadPostSomeOptionalObject]): - some_enum (Union[Unset, DifferentEnum]): An enumeration. + some_nullable_object (BodyUploadFileTestsUploadPostSomeNullableObject | None): + some_optional_file (File | Unset): + some_string (str | Unset): Default: 'some_default_string'. + a_datetime (datetime.datetime | Unset): + a_date (datetime.date | Unset): + some_number (float | Unset): + some_nullable_number (float | None | Unset): + some_int_array (list[int | None] | Unset): + some_array (list[AFormData] | None | Unset): + some_optional_object (BodyUploadFileTestsUploadPostSomeOptionalObject | Unset): + some_enum (DifferentEnum | Unset): An enumeration. """ some_file: File - some_object: "BodyUploadFileTestsUploadPostSomeObject" - some_nullable_object: Union["BodyUploadFileTestsUploadPostSomeNullableObject", None] - some_optional_file: Union[Unset, File] = UNSET - some_string: Union[Unset, str] = "some_default_string" - a_datetime: Union[Unset, datetime.datetime] = UNSET - a_date: Union[Unset, datetime.date] = UNSET - some_number: Union[Unset, float] = UNSET - some_array: Union[List["AFormData"], None, Unset] = UNSET - some_optional_object: Union[Unset, "BodyUploadFileTestsUploadPostSomeOptionalObject"] = UNSET - some_enum: Union[Unset, DifferentEnum] = UNSET - additional_properties: Dict[str, "BodyUploadFileTestsUploadPostAdditionalProperty"] = _attrs_field( + some_required_number: float + some_object: BodyUploadFileTestsUploadPostSomeObject + some_nullable_object: BodyUploadFileTestsUploadPostSomeNullableObject | None + some_optional_file: File | Unset = UNSET + some_string: str | Unset = "some_default_string" + a_datetime: datetime.datetime | Unset = UNSET + a_date: datetime.date | Unset = UNSET + some_number: float | Unset = UNSET + some_nullable_number: float | None | Unset = UNSET + some_int_array: list[int | None] | Unset = UNSET + some_array: list[AFormData] | None | Unset = UNSET + some_optional_object: BodyUploadFileTestsUploadPostSomeOptionalObject | Unset = UNSET + some_enum: DifferentEnum | Unset = UNSET + additional_properties: dict[str, BodyUploadFileTestsUploadPostAdditionalProperty] = _attrs_field( init=False, factory=dict ) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: from ..models.body_upload_file_tests_upload_post_some_nullable_object import ( BodyUploadFileTestsUploadPostSomeNullableObject, ) some_file = self.some_file.to_tuple() + some_required_number = self.some_required_number + some_object = self.some_object.to_dict() - some_nullable_object: Union[Dict[str, Any], None] + some_nullable_object: dict[str, Any] | None if isinstance(self.some_nullable_object, BodyUploadFileTestsUploadPostSomeNullableObject): some_nullable_object = self.some_nullable_object.to_dict() else: some_nullable_object = self.some_nullable_object - some_optional_file: Union[Unset, FileJsonType] = UNSET + some_optional_file: FileTypes | Unset = UNSET if not isinstance(self.some_optional_file, Unset): some_optional_file = self.some_optional_file.to_tuple() some_string = self.some_string - a_datetime: Union[Unset, str] = UNSET + a_datetime: str | Unset = UNSET if not isinstance(self.a_datetime, Unset): a_datetime = self.a_datetime.isoformat() - a_date: Union[Unset, str] = UNSET + a_date: str | Unset = UNSET if not isinstance(self.a_date, Unset): a_date = self.a_date.isoformat() some_number = self.some_number - some_array: Union[List[Dict[str, Any]], None, Unset] + some_nullable_number: float | None | Unset + if isinstance(self.some_nullable_number, Unset): + some_nullable_number = UNSET + else: + some_nullable_number = self.some_nullable_number + + some_int_array: list[int | None] | Unset = UNSET + if not isinstance(self.some_int_array, Unset): + some_int_array = [] + for some_int_array_item_data in self.some_int_array: + some_int_array_item: int | None + some_int_array_item = some_int_array_item_data + some_int_array.append(some_int_array_item) + + some_array: list[dict[str, Any]] | None | Unset if isinstance(self.some_array, Unset): some_array = UNSET elif isinstance(self.some_array, list): @@ -102,20 +128,22 @@ def to_dict(self) -> Dict[str, Any]: else: some_array = self.some_array - some_optional_object: Union[Unset, Dict[str, Any]] = UNSET + some_optional_object: dict[str, Any] | Unset = UNSET if not isinstance(self.some_optional_object, Unset): some_optional_object = self.some_optional_object.to_dict() - some_enum: Union[Unset, str] = UNSET + some_enum: str | Unset = UNSET if not isinstance(self.some_enum, Unset): some_enum = self.some_enum.value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() + field_dict.update( { "some_file": some_file, + "some_required_number": some_required_number, "some_object": some_object, "some_nullable_object": some_nullable_object, } @@ -130,6 +158,10 @@ def to_dict(self) -> Dict[str, Any]: field_dict["a_date"] = a_date if some_number is not UNSET: field_dict["some_number"] = some_number + if some_nullable_number is not UNSET: + field_dict["some_nullable_number"] = some_nullable_number + if some_int_array is not UNSET: + field_dict["some_int_array"] = some_int_array if some_array is not UNSET: field_dict["some_array"] = some_array if some_optional_object is not UNSET: @@ -139,93 +171,87 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - some_file = self.some_file.to_tuple() + def to_multipart(self) -> types.RequestFiles: + from ..models.body_upload_file_tests_upload_post_some_nullable_object import ( + BodyUploadFileTestsUploadPostSomeNullableObject, + ) + + files: types.RequestFiles = [] + + files.append(("some_file", self.some_file.to_tuple())) - some_object = (None, json.dumps(self.some_object.to_dict()).encode(), "application/json") + files.append(("some_required_number", (None, str(self.some_required_number).encode(), "text/plain"))) + + files.append(("some_object", (None, json.dumps(self.some_object.to_dict()).encode(), "application/json"))) - some_nullable_object: Union[None, Tuple[None, bytes, str]] if isinstance(self.some_nullable_object, BodyUploadFileTestsUploadPostSomeNullableObject): - some_nullable_object = (None, json.dumps(self.some_nullable_object.to_dict()).encode(), "application/json") + files.append( + ( + "some_nullable_object", + (None, json.dumps(self.some_nullable_object.to_dict()).encode(), "application/json"), + ) + ) else: - some_nullable_object = self.some_nullable_object + files.append(("some_nullable_object", (None, str(self.some_nullable_object).encode(), "text/plain"))) - some_optional_file: Union[Unset, FileJsonType] = UNSET if not isinstance(self.some_optional_file, Unset): - some_optional_file = self.some_optional_file.to_tuple() + files.append(("some_optional_file", self.some_optional_file.to_tuple())) - some_string = ( - self.some_string - if isinstance(self.some_string, Unset) - else (None, str(self.some_string).encode(), "text/plain") - ) + if not isinstance(self.some_string, Unset): + files.append(("some_string", (None, str(self.some_string).encode(), "text/plain"))) - a_datetime: Union[Unset, bytes] = UNSET if not isinstance(self.a_datetime, Unset): - a_datetime = self.a_datetime.isoformat().encode() + files.append(("a_datetime", (None, self.a_datetime.isoformat().encode(), "text/plain"))) - a_date: Union[Unset, bytes] = UNSET if not isinstance(self.a_date, Unset): - a_date = self.a_date.isoformat().encode() + files.append(("a_date", (None, self.a_date.isoformat().encode(), "text/plain"))) + + if not isinstance(self.some_number, Unset): + files.append(("some_number", (None, str(self.some_number).encode(), "text/plain"))) + + if not isinstance(self.some_nullable_number, Unset): + if isinstance(self.some_nullable_number, float): + files.append(("some_nullable_number", (None, str(self.some_nullable_number).encode(), "text/plain"))) + else: + files.append(("some_nullable_number", (None, str(self.some_nullable_number).encode(), "text/plain"))) + + if not isinstance(self.some_int_array, Unset): + for some_int_array_item_element in self.some_int_array: + if isinstance(some_int_array_item_element, int): + files.append(("some_int_array", (None, str(some_int_array_item_element).encode(), "text/plain"))) + else: + files.append(("some_int_array", (None, str(some_int_array_item_element).encode(), "text/plain"))) + + if not isinstance(self.some_array, Unset): + if isinstance(self.some_array, list): + for some_array_type_0_item_element in self.some_array: + files.append( + ( + "some_array", + (None, json.dumps(some_array_type_0_item_element.to_dict()).encode(), "application/json"), + ) + ) + else: + files.append(("some_array", (None, str(self.some_array).encode(), "text/plain"))) - some_number = ( - self.some_number - if isinstance(self.some_number, Unset) - else (None, str(self.some_number).encode(), "text/plain") - ) - - some_array: Union[None, Tuple[None, bytes, str], Unset] - if isinstance(self.some_array, Unset): - some_array = UNSET - elif isinstance(self.some_array, list): - _temp_some_array = [] - for some_array_type_0_item_data in self.some_array: - some_array_type_0_item = some_array_type_0_item_data.to_dict() - _temp_some_array.append(some_array_type_0_item) - some_array = (None, json.dumps(_temp_some_array).encode(), "application/json") - - else: - some_array = self.some_array - - some_optional_object: Union[Unset, Tuple[None, bytes, str]] = UNSET if not isinstance(self.some_optional_object, Unset): - some_optional_object = (None, json.dumps(self.some_optional_object.to_dict()).encode(), "application/json") + files.append( + ( + "some_optional_object", + (None, json.dumps(self.some_optional_object.to_dict()).encode(), "application/json"), + ) + ) - some_enum: Union[Unset, Tuple[None, bytes, str]] = UNSET if not isinstance(self.some_enum, Unset): - some_enum = (None, str(self.some_enum.value).encode(), "text/plain") + files.append(("some_enum", (None, str(self.some_enum.value).encode(), "text/plain"))) - field_dict: Dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): - field_dict[prop_name] = (None, json.dumps(prop.to_dict()).encode(), "application/json") - field_dict.update( - { - "some_file": some_file, - "some_object": some_object, - "some_nullable_object": some_nullable_object, - } - ) - if some_optional_file is not UNSET: - field_dict["some_optional_file"] = some_optional_file - if some_string is not UNSET: - field_dict["some_string"] = some_string - if a_datetime is not UNSET: - field_dict["a_datetime"] = a_datetime - if a_date is not UNSET: - field_dict["a_date"] = a_date - if some_number is not UNSET: - field_dict["some_number"] = some_number - if some_array is not UNSET: - field_dict["some_array"] = some_array - if some_optional_object is not UNSET: - field_dict["some_optional_object"] = some_optional_object - if some_enum is not UNSET: - field_dict["some_enum"] = some_enum + files.append((prop_name, (None, json.dumps(prop.to_dict()).encode(), "application/json"))) - return field_dict + return files @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.a_form_data import AFormData from ..models.body_upload_file_tests_upload_post_additional_property import ( BodyUploadFileTestsUploadPostAdditionalProperty, @@ -238,12 +264,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: BodyUploadFileTestsUploadPostSomeOptionalObject, ) - d = src_dict.copy() + d = dict(src_dict) some_file = File(payload=BytesIO(d.pop("some_file"))) + some_required_number = d.pop("some_required_number") + some_object = BodyUploadFileTestsUploadPostSomeObject.from_dict(d.pop("some_object")) - def _parse_some_nullable_object(data: object) -> Union["BodyUploadFileTestsUploadPostSomeNullableObject", None]: + def _parse_some_nullable_object(data: object) -> BodyUploadFileTestsUploadPostSomeNullableObject | None: if data is None: return data try: @@ -252,14 +280,14 @@ def _parse_some_nullable_object(data: object) -> Union["BodyUploadFileTestsUploa some_nullable_object_type_0 = BodyUploadFileTestsUploadPostSomeNullableObject.from_dict(data) return some_nullable_object_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["BodyUploadFileTestsUploadPostSomeNullableObject", None], data) + return cast(BodyUploadFileTestsUploadPostSomeNullableObject | None, data) some_nullable_object = _parse_some_nullable_object(d.pop("some_nullable_object")) _some_optional_file = d.pop("some_optional_file", UNSET) - some_optional_file: Union[Unset, File] + some_optional_file: File | Unset if isinstance(_some_optional_file, Unset): some_optional_file = UNSET else: @@ -268,14 +296,14 @@ def _parse_some_nullable_object(data: object) -> Union["BodyUploadFileTestsUploa some_string = d.pop("some_string", UNSET) _a_datetime = d.pop("a_datetime", UNSET) - a_datetime: Union[Unset, datetime.datetime] + a_datetime: datetime.datetime | Unset if isinstance(_a_datetime, Unset): a_datetime = UNSET else: a_datetime = isoparse(_a_datetime) _a_date = d.pop("a_date", UNSET) - a_date: Union[Unset, datetime.date] + a_date: datetime.date | Unset if isinstance(_a_date, Unset): a_date = UNSET else: @@ -283,7 +311,31 @@ def _parse_some_nullable_object(data: object) -> Union["BodyUploadFileTestsUploa some_number = d.pop("some_number", UNSET) - def _parse_some_array(data: object) -> Union[List["AFormData"], None, Unset]: + def _parse_some_nullable_number(data: object) -> float | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(float | None | Unset, data) + + some_nullable_number = _parse_some_nullable_number(d.pop("some_nullable_number", UNSET)) + + _some_int_array = d.pop("some_int_array", UNSET) + some_int_array: list[int | None] | Unset = UNSET + if _some_int_array is not UNSET: + some_int_array = [] + for some_int_array_item_data in _some_int_array: + + def _parse_some_int_array_item(data: object) -> int | None: + if data is None: + return data + return cast(int | None, data) + + some_int_array_item = _parse_some_int_array_item(some_int_array_item_data) + + some_int_array.append(some_int_array_item) + + def _parse_some_array(data: object) -> list[AFormData] | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -299,21 +351,21 @@ def _parse_some_array(data: object) -> Union[List["AFormData"], None, Unset]: some_array_type_0.append(some_array_type_0_item) return some_array_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union[List["AFormData"], None, Unset], data) + return cast(list[AFormData] | None | Unset, data) some_array = _parse_some_array(d.pop("some_array", UNSET)) _some_optional_object = d.pop("some_optional_object", UNSET) - some_optional_object: Union[Unset, BodyUploadFileTestsUploadPostSomeOptionalObject] + some_optional_object: BodyUploadFileTestsUploadPostSomeOptionalObject | Unset if isinstance(_some_optional_object, Unset): some_optional_object = UNSET else: some_optional_object = BodyUploadFileTestsUploadPostSomeOptionalObject.from_dict(_some_optional_object) _some_enum = d.pop("some_enum", UNSET) - some_enum: Union[Unset, DifferentEnum] + some_enum: DifferentEnum | Unset if isinstance(_some_enum, Unset): some_enum = UNSET else: @@ -321,6 +373,7 @@ def _parse_some_array(data: object) -> Union[List["AFormData"], None, Unset]: body_upload_file_tests_upload_post = cls( some_file=some_file, + some_required_number=some_required_number, some_object=some_object, some_nullable_object=some_nullable_object, some_optional_file=some_optional_file, @@ -328,6 +381,8 @@ def _parse_some_array(data: object) -> Union[List["AFormData"], None, Unset]: a_datetime=a_datetime, a_date=a_date, some_number=some_number, + some_nullable_number=some_nullable_number, + some_int_array=some_int_array, some_array=some_array, some_optional_object=some_optional_object, some_enum=some_enum, @@ -343,13 +398,13 @@ def _parse_some_array(data: object) -> Union[List["AFormData"], None, Unset]: return body_upload_file_tests_upload_post @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "BodyUploadFileTestsUploadPostAdditionalProperty": + def __getitem__(self, key: str) -> BodyUploadFileTestsUploadPostAdditionalProperty: return self.additional_properties[key] - def __setitem__(self, key: str, value: "BodyUploadFileTestsUploadPostAdditionalProperty") -> None: + def __setitem__(self, key: str, value: BodyUploadFileTestsUploadPostAdditionalProperty) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_additional_property.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_additional_property.py index f855d9c61..160cb2542 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_additional_property.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_additional_property.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class BodyUploadFileTestsUploadPostAdditionalProperty: """ Attributes: - foo (Union[Unset, str]): + foo (str | Unset): """ - foo: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + foo: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: foo = self.foo - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if foo is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) foo = d.pop("foo", UNSET) body_upload_file_tests_upload_post_additional_property = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return body_upload_file_tests_upload_post_additional_property @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_nullable_object.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_nullable_object.py index 9762b7efa..47796442d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_nullable_object.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_nullable_object.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class BodyUploadFileTestsUploadPostSomeNullableObject: """ Attributes: - bar (Union[Unset, str]): + bar (str | Unset): """ - bar: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + bar: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: bar = self.bar - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if bar is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) bar = d.pop("bar", UNSET) body_upload_file_tests_upload_post_some_nullable_object = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return body_upload_file_tests_upload_post_some_nullable_object @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_object.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_object.py index 25c2c0a6a..0cc39b78b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_object.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_object.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,14 +19,14 @@ class BodyUploadFileTestsUploadPostSomeObject: num: float text: str - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: num = self.num text = self.text - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -35,8 +38,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) num = d.pop("num") text = d.pop("text") @@ -50,7 +53,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return body_upload_file_tests_upload_post_some_object @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_optional_object.py b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_optional_object.py index 711b34e63..f5658196c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_optional_object.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/body_upload_file_tests_upload_post_some_optional_object.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,12 +17,12 @@ class BodyUploadFileTestsUploadPostSomeOptionalObject: """ foo: str - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: foo = self.foo - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) foo = d.pop("foo") body_upload_file_tests_upload_post_some_optional_object = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return body_upload_file_tests_upload_post_some_optional_object @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/extended.py b/end_to_end_tests/golden-record/my_test_api_client/models/extended.py new file mode 100644 index 000000000..708aa4a90 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/extended.py @@ -0,0 +1,569 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast +from uuid import UUID + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.an_all_of_enum import AnAllOfEnum +from ..models.an_enum import AnEnum +from ..models.different_enum import DifferentEnum +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.free_form_model import FreeFormModel + from ..models.model_with_union_property import ModelWithUnionProperty + + +T = TypeVar("T", bound="Extended") + + +@_attrs_define +class Extended: + """ + Attributes: + an_enum_value (AnEnum): For testing Enums in all the ways they can be used + an_allof_enum_with_overridden_default (AnAllOfEnum): Default: AnAllOfEnum.OVERRIDDEN_DEFAULT. + a_camel_date_time (datetime.date | datetime.datetime): + a_date (datetime.date): + a_nullable_date (datetime.date | None): + a_uuid (UUID): + a_nullable_uuid (None | UUID): Default: UUID('07EF8B4D-AA09-4FFA-898D-C710796AFF41'). + required_nullable (None | str): + required_not_nullable (str): + one_of_models (Any | FreeFormModel | ModelWithUnionProperty): + nullable_one_of_models (FreeFormModel | ModelWithUnionProperty | None): + model (ModelWithUnionProperty): + nullable_model (ModelWithUnionProperty | None): + any_value (Any | Unset): Default: 'default'. + an_optional_allof_enum (AnAllOfEnum | Unset): + nested_list_of_enums (list[list[DifferentEnum]] | Unset): + a_not_required_date (datetime.date | Unset): + a_not_required_uuid (UUID | Unset): + attr_1_leading_digit (str | Unset): + attr_leading_underscore (str | Unset): + not_required_nullable (None | str | Unset): + not_required_not_nullable (str | Unset): + not_required_one_of_models (FreeFormModel | ModelWithUnionProperty | Unset): + not_required_nullable_one_of_models (FreeFormModel | ModelWithUnionProperty | None | str | Unset): + not_required_model (ModelWithUnionProperty | Unset): + not_required_nullable_model (ModelWithUnionProperty | None | Unset): + from_extended (str | Unset): + """ + + an_enum_value: AnEnum + a_camel_date_time: datetime.date | datetime.datetime + a_date: datetime.date + a_nullable_date: datetime.date | None + a_uuid: UUID + required_nullable: None | str + required_not_nullable: str + one_of_models: Any | FreeFormModel | ModelWithUnionProperty + nullable_one_of_models: FreeFormModel | ModelWithUnionProperty | None + model: ModelWithUnionProperty + nullable_model: ModelWithUnionProperty | None + an_allof_enum_with_overridden_default: AnAllOfEnum = AnAllOfEnum.OVERRIDDEN_DEFAULT + a_nullable_uuid: None | UUID = UUID("07EF8B4D-AA09-4FFA-898D-C710796AFF41") + any_value: Any | Unset = "default" + an_optional_allof_enum: AnAllOfEnum | Unset = UNSET + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + a_not_required_date: datetime.date | Unset = UNSET + a_not_required_uuid: UUID | Unset = UNSET + attr_1_leading_digit: str | Unset = UNSET + attr_leading_underscore: str | Unset = UNSET + not_required_nullable: None | str | Unset = UNSET + not_required_not_nullable: str | Unset = UNSET + not_required_one_of_models: FreeFormModel | ModelWithUnionProperty | Unset = UNSET + not_required_nullable_one_of_models: FreeFormModel | ModelWithUnionProperty | None | str | Unset = UNSET + not_required_model: ModelWithUnionProperty | Unset = UNSET + not_required_nullable_model: ModelWithUnionProperty | None | Unset = UNSET + from_extended: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + from ..models.free_form_model import FreeFormModel + from ..models.model_with_union_property import ModelWithUnionProperty + + an_enum_value = self.an_enum_value.value + + an_allof_enum_with_overridden_default = self.an_allof_enum_with_overridden_default.value + + a_camel_date_time: str + if isinstance(self.a_camel_date_time, datetime.datetime): + a_camel_date_time = self.a_camel_date_time.isoformat() + else: + a_camel_date_time = self.a_camel_date_time.isoformat() + + a_date = self.a_date.isoformat() + + a_nullable_date: None | str + if isinstance(self.a_nullable_date, datetime.date): + a_nullable_date = self.a_nullable_date.isoformat() + else: + a_nullable_date = self.a_nullable_date + + a_uuid = str(self.a_uuid) + + a_nullable_uuid: None | str + if isinstance(self.a_nullable_uuid, UUID): + a_nullable_uuid = str(self.a_nullable_uuid) + else: + a_nullable_uuid = self.a_nullable_uuid + + required_nullable: None | str + required_nullable = self.required_nullable + + required_not_nullable = self.required_not_nullable + + one_of_models: Any | dict[str, Any] + if isinstance(self.one_of_models, FreeFormModel): + one_of_models = self.one_of_models.to_dict() + elif isinstance(self.one_of_models, ModelWithUnionProperty): + one_of_models = self.one_of_models.to_dict() + else: + one_of_models = self.one_of_models + + nullable_one_of_models: dict[str, Any] | None + if isinstance(self.nullable_one_of_models, FreeFormModel): + nullable_one_of_models = self.nullable_one_of_models.to_dict() + elif isinstance(self.nullable_one_of_models, ModelWithUnionProperty): + nullable_one_of_models = self.nullable_one_of_models.to_dict() + else: + nullable_one_of_models = self.nullable_one_of_models + + model = self.model.to_dict() + + nullable_model: dict[str, Any] | None + if isinstance(self.nullable_model, ModelWithUnionProperty): + nullable_model = self.nullable_model.to_dict() + else: + nullable_model = self.nullable_model + + any_value = self.any_value + + an_optional_allof_enum: str | Unset = UNSET + if not isinstance(self.an_optional_allof_enum, Unset): + an_optional_allof_enum = self.an_optional_allof_enum.value + + nested_list_of_enums: list[list[str]] | Unset = UNSET + if not isinstance(self.nested_list_of_enums, Unset): + nested_list_of_enums = [] + for nested_list_of_enums_item_data in self.nested_list_of_enums: + nested_list_of_enums_item = [] + for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data: + nested_list_of_enums_item_item = nested_list_of_enums_item_item_data.value + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + a_not_required_date: str | Unset = UNSET + if not isinstance(self.a_not_required_date, Unset): + a_not_required_date = self.a_not_required_date.isoformat() + + a_not_required_uuid: str | Unset = UNSET + if not isinstance(self.a_not_required_uuid, Unset): + a_not_required_uuid = str(self.a_not_required_uuid) + + attr_1_leading_digit = self.attr_1_leading_digit + + attr_leading_underscore = self.attr_leading_underscore + + not_required_nullable: None | str | Unset + if isinstance(self.not_required_nullable, Unset): + not_required_nullable = UNSET + else: + not_required_nullable = self.not_required_nullable + + not_required_not_nullable = self.not_required_not_nullable + + not_required_one_of_models: dict[str, Any] | Unset + if isinstance(self.not_required_one_of_models, Unset): + not_required_one_of_models = UNSET + elif isinstance(self.not_required_one_of_models, FreeFormModel): + not_required_one_of_models = self.not_required_one_of_models.to_dict() + else: + not_required_one_of_models = self.not_required_one_of_models.to_dict() + + not_required_nullable_one_of_models: dict[str, Any] | None | str | Unset + if isinstance(self.not_required_nullable_one_of_models, Unset): + not_required_nullable_one_of_models = UNSET + elif isinstance(self.not_required_nullable_one_of_models, FreeFormModel): + not_required_nullable_one_of_models = self.not_required_nullable_one_of_models.to_dict() + elif isinstance(self.not_required_nullable_one_of_models, ModelWithUnionProperty): + not_required_nullable_one_of_models = self.not_required_nullable_one_of_models.to_dict() + else: + not_required_nullable_one_of_models = self.not_required_nullable_one_of_models + + not_required_model: dict[str, Any] | Unset = UNSET + if not isinstance(self.not_required_model, Unset): + not_required_model = self.not_required_model.to_dict() + + not_required_nullable_model: dict[str, Any] | None | Unset + if isinstance(self.not_required_nullable_model, Unset): + not_required_nullable_model = UNSET + elif isinstance(self.not_required_nullable_model, ModelWithUnionProperty): + not_required_nullable_model = self.not_required_nullable_model.to_dict() + else: + not_required_nullable_model = self.not_required_nullable_model + + from_extended = self.from_extended + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "an_enum_value": an_enum_value, + "an_allof_enum_with_overridden_default": an_allof_enum_with_overridden_default, + "aCamelDateTime": a_camel_date_time, + "a_date": a_date, + "a_nullable_date": a_nullable_date, + "a_uuid": a_uuid, + "a_nullable_uuid": a_nullable_uuid, + "required_nullable": required_nullable, + "required_not_nullable": required_not_nullable, + "one_of_models": one_of_models, + "nullable_one_of_models": nullable_one_of_models, + "model": model, + "nullable_model": nullable_model, + } + ) + if any_value is not UNSET: + field_dict["any_value"] = any_value + if an_optional_allof_enum is not UNSET: + field_dict["an_optional_allof_enum"] = an_optional_allof_enum + if nested_list_of_enums is not UNSET: + field_dict["nested_list_of_enums"] = nested_list_of_enums + if a_not_required_date is not UNSET: + field_dict["a_not_required_date"] = a_not_required_date + if a_not_required_uuid is not UNSET: + field_dict["a_not_required_uuid"] = a_not_required_uuid + if attr_1_leading_digit is not UNSET: + field_dict["1_leading_digit"] = attr_1_leading_digit + if attr_leading_underscore is not UNSET: + field_dict["_leading_underscore"] = attr_leading_underscore + if not_required_nullable is not UNSET: + field_dict["not_required_nullable"] = not_required_nullable + if not_required_not_nullable is not UNSET: + field_dict["not_required_not_nullable"] = not_required_not_nullable + if not_required_one_of_models is not UNSET: + field_dict["not_required_one_of_models"] = not_required_one_of_models + if not_required_nullable_one_of_models is not UNSET: + field_dict["not_required_nullable_one_of_models"] = not_required_nullable_one_of_models + if not_required_model is not UNSET: + field_dict["not_required_model"] = not_required_model + if not_required_nullable_model is not UNSET: + field_dict["not_required_nullable_model"] = not_required_nullable_model + if from_extended is not UNSET: + field_dict["fromExtended"] = from_extended + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.free_form_model import FreeFormModel + from ..models.model_with_union_property import ModelWithUnionProperty + + d = dict(src_dict) + an_enum_value = AnEnum(d.pop("an_enum_value")) + + an_allof_enum_with_overridden_default = AnAllOfEnum(d.pop("an_allof_enum_with_overridden_default")) + + def _parse_a_camel_date_time(data: object) -> datetime.date | datetime.datetime: + try: + if not isinstance(data, str): + raise TypeError() + a_camel_date_time_type_0 = isoparse(data) + + return a_camel_date_time_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + if not isinstance(data, str): + raise TypeError() + a_camel_date_time_type_1 = isoparse(data).date() + + return a_camel_date_time_type_1 + + a_camel_date_time = _parse_a_camel_date_time(d.pop("aCamelDateTime")) + + a_date = isoparse(d.pop("a_date")).date() + + def _parse_a_nullable_date(data: object) -> datetime.date | None: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + a_nullable_date_type_0 = isoparse(data).date() + + return a_nullable_date_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(datetime.date | None, data) + + a_nullable_date = _parse_a_nullable_date(d.pop("a_nullable_date")) + + a_uuid = UUID(d.pop("a_uuid")) + + def _parse_a_nullable_uuid(data: object) -> None | UUID: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + a_nullable_uuid_type_0 = UUID(data) + + return a_nullable_uuid_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(None | UUID, data) + + a_nullable_uuid = _parse_a_nullable_uuid(d.pop("a_nullable_uuid")) + + def _parse_required_nullable(data: object) -> None | str: + if data is None: + return data + return cast(None | str, data) + + required_nullable = _parse_required_nullable(d.pop("required_nullable")) + + required_not_nullable = d.pop("required_not_nullable") + + def _parse_one_of_models(data: object) -> Any | FreeFormModel | ModelWithUnionProperty: + try: + if not isinstance(data, dict): + raise TypeError() + one_of_models_type_0 = FreeFormModel.from_dict(data) + + return one_of_models_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + try: + if not isinstance(data, dict): + raise TypeError() + one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) + + return one_of_models_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(Any | FreeFormModel | ModelWithUnionProperty, data) + + one_of_models = _parse_one_of_models(d.pop("one_of_models")) + + def _parse_nullable_one_of_models(data: object) -> FreeFormModel | ModelWithUnionProperty | None: + if data is None: + return data + try: + if not isinstance(data, dict): + raise TypeError() + nullable_one_of_models_type_0 = FreeFormModel.from_dict(data) + + return nullable_one_of_models_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + try: + if not isinstance(data, dict): + raise TypeError() + nullable_one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) + + return nullable_one_of_models_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(FreeFormModel | ModelWithUnionProperty | None, data) + + nullable_one_of_models = _parse_nullable_one_of_models(d.pop("nullable_one_of_models")) + + model = ModelWithUnionProperty.from_dict(d.pop("model")) + + def _parse_nullable_model(data: object) -> ModelWithUnionProperty | None: + if data is None: + return data + try: + if not isinstance(data, dict): + raise TypeError() + nullable_model_type_1 = ModelWithUnionProperty.from_dict(data) + + return nullable_model_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(ModelWithUnionProperty | None, data) + + nullable_model = _parse_nullable_model(d.pop("nullable_model")) + + any_value = d.pop("any_value", UNSET) + + _an_optional_allof_enum = d.pop("an_optional_allof_enum", UNSET) + an_optional_allof_enum: AnAllOfEnum | Unset + if isinstance(_an_optional_allof_enum, Unset): + an_optional_allof_enum = UNSET + else: + an_optional_allof_enum = AnAllOfEnum(_an_optional_allof_enum) + + _nested_list_of_enums = d.pop("nested_list_of_enums", UNSET) + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + if _nested_list_of_enums is not UNSET: + nested_list_of_enums = [] + for nested_list_of_enums_item_data in _nested_list_of_enums: + nested_list_of_enums_item = [] + _nested_list_of_enums_item = nested_list_of_enums_item_data + for nested_list_of_enums_item_item_data in _nested_list_of_enums_item: + nested_list_of_enums_item_item = DifferentEnum(nested_list_of_enums_item_item_data) + + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + _a_not_required_date = d.pop("a_not_required_date", UNSET) + a_not_required_date: datetime.date | Unset + if isinstance(_a_not_required_date, Unset): + a_not_required_date = UNSET + else: + a_not_required_date = isoparse(_a_not_required_date).date() + + _a_not_required_uuid = d.pop("a_not_required_uuid", UNSET) + a_not_required_uuid: UUID | Unset + if isinstance(_a_not_required_uuid, Unset): + a_not_required_uuid = UNSET + else: + a_not_required_uuid = UUID(_a_not_required_uuid) + + attr_1_leading_digit = d.pop("1_leading_digit", UNSET) + + attr_leading_underscore = d.pop("_leading_underscore", UNSET) + + def _parse_not_required_nullable(data: object) -> None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(None | str | Unset, data) + + not_required_nullable = _parse_not_required_nullable(d.pop("not_required_nullable", UNSET)) + + not_required_not_nullable = d.pop("not_required_not_nullable", UNSET) + + def _parse_not_required_one_of_models(data: object) -> FreeFormModel | ModelWithUnionProperty | Unset: + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + not_required_one_of_models_type_0 = FreeFormModel.from_dict(data) + + return not_required_one_of_models_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + if not isinstance(data, dict): + raise TypeError() + not_required_one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) + + return not_required_one_of_models_type_1 + + not_required_one_of_models = _parse_not_required_one_of_models(d.pop("not_required_one_of_models", UNSET)) + + def _parse_not_required_nullable_one_of_models( + data: object, + ) -> FreeFormModel | ModelWithUnionProperty | None | str | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + not_required_nullable_one_of_models_type_0 = FreeFormModel.from_dict(data) + + return not_required_nullable_one_of_models_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + try: + if not isinstance(data, dict): + raise TypeError() + not_required_nullable_one_of_models_type_1 = ModelWithUnionProperty.from_dict(data) + + return not_required_nullable_one_of_models_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(FreeFormModel | ModelWithUnionProperty | None | str | Unset, data) + + not_required_nullable_one_of_models = _parse_not_required_nullable_one_of_models( + d.pop("not_required_nullable_one_of_models", UNSET) + ) + + _not_required_model = d.pop("not_required_model", UNSET) + not_required_model: ModelWithUnionProperty | Unset + if isinstance(_not_required_model, Unset): + not_required_model = UNSET + else: + not_required_model = ModelWithUnionProperty.from_dict(_not_required_model) + + def _parse_not_required_nullable_model(data: object) -> ModelWithUnionProperty | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + not_required_nullable_model_type_1 = ModelWithUnionProperty.from_dict(data) + + return not_required_nullable_model_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(ModelWithUnionProperty | None | Unset, data) + + not_required_nullable_model = _parse_not_required_nullable_model(d.pop("not_required_nullable_model", UNSET)) + + from_extended = d.pop("fromExtended", UNSET) + + extended = cls( + an_enum_value=an_enum_value, + an_allof_enum_with_overridden_default=an_allof_enum_with_overridden_default, + a_camel_date_time=a_camel_date_time, + a_date=a_date, + a_nullable_date=a_nullable_date, + a_uuid=a_uuid, + a_nullable_uuid=a_nullable_uuid, + required_nullable=required_nullable, + required_not_nullable=required_not_nullable, + one_of_models=one_of_models, + nullable_one_of_models=nullable_one_of_models, + model=model, + nullable_model=nullable_model, + any_value=any_value, + an_optional_allof_enum=an_optional_allof_enum, + nested_list_of_enums=nested_list_of_enums, + a_not_required_date=a_not_required_date, + a_not_required_uuid=a_not_required_uuid, + attr_1_leading_digit=attr_1_leading_digit, + attr_leading_underscore=attr_leading_underscore, + not_required_nullable=not_required_nullable, + not_required_not_nullable=not_required_not_nullable, + not_required_one_of_models=not_required_one_of_models, + not_required_nullable_one_of_models=not_required_nullable_one_of_models, + not_required_model=not_required_model, + not_required_nullable_model=not_required_nullable_model, + from_extended=from_extended, + ) + + extended.additional_properties = d + return extended + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/free_form_model.py b/end_to_end_tests/golden-record/my_test_api_client/models/free_form_model.py index f757b10ae..038a4de17 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/free_form_model.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/free_form_model.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class FreeFormModel: """ """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) free_form_model = cls() free_form_model.additional_properties = d return free_form_model @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/get_models_allof_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_allof_response_200.py new file mode 100644 index 000000000..03b423308 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_allof_response_200.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.a_model import AModel + from ..models.extended import Extended + + +T = TypeVar("T", bound="GetModelsAllofResponse200") + + +@_attrs_define +class GetModelsAllofResponse200: + """ + Attributes: + aliased (AModel | Unset): A Model for testing all the ways custom objects can be used + extended (Extended | Unset): + model (AModel | Unset): A Model for testing all the ways custom objects can be used + """ + + aliased: AModel | Unset = UNSET + extended: Extended | Unset = UNSET + model: AModel | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + aliased: dict[str, Any] | Unset = UNSET + if not isinstance(self.aliased, Unset): + aliased = self.aliased.to_dict() + + extended: dict[str, Any] | Unset = UNSET + if not isinstance(self.extended, Unset): + extended = self.extended.to_dict() + + model: dict[str, Any] | Unset = UNSET + if not isinstance(self.model, Unset): + model = self.model.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if aliased is not UNSET: + field_dict["aliased"] = aliased + if extended is not UNSET: + field_dict["extended"] = extended + if model is not UNSET: + field_dict["model"] = model + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.a_model import AModel + from ..models.extended import Extended + + d = dict(src_dict) + _aliased = d.pop("aliased", UNSET) + aliased: AModel | Unset + if isinstance(_aliased, Unset): + aliased = UNSET + else: + aliased = AModel.from_dict(_aliased) + + _extended = d.pop("extended", UNSET) + extended: Extended | Unset + if isinstance(_extended, Unset): + extended = UNSET + else: + extended = Extended.from_dict(_extended) + + _model = d.pop("model", UNSET) + model: AModel | Unset + if isinstance(_model, Unset): + model = UNSET + else: + model = AModel.from_dict(_model) + + get_models_allof_response_200 = cls( + aliased=aliased, + extended=extended, + model=model, + ) + + get_models_allof_response_200.additional_properties = d + return get_models_allof_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_0.py b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_0.py new file mode 100644 index 000000000..a079b954a --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_0.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Literal, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="GetModelsOneofWithRequiredConstResponse200Type0") + + +@_attrs_define +class GetModelsOneofWithRequiredConstResponse200Type0: + """ + Attributes: + type_ (Literal['alpha']): + color (str | Unset): + """ + + type_: Literal["alpha"] + color: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + type_ = self.type_ + + color = self.color + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "type": type_, + } + ) + if color is not UNSET: + field_dict["color"] = color + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + type_ = cast(Literal["alpha"], d.pop("type")) + if type_ != "alpha": + raise ValueError(f"type must match const 'alpha', got '{type_}'") + + color = d.pop("color", UNSET) + + get_models_oneof_with_required_const_response_200_type_0 = cls( + type_=type_, + color=color, + ) + + get_models_oneof_with_required_const_response_200_type_0.additional_properties = d + return get_models_oneof_with_required_const_response_200_type_0 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_1.py b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_1.py new file mode 100644 index 000000000..04670dcba --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/get_models_oneof_with_required_const_response_200_type_1.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Literal, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="GetModelsOneofWithRequiredConstResponse200Type1") + + +@_attrs_define +class GetModelsOneofWithRequiredConstResponse200Type1: + """ + Attributes: + type_ (Literal['beta']): + texture (str | Unset): + """ + + type_: Literal["beta"] + texture: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + type_ = self.type_ + + texture = self.texture + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "type": type_, + } + ) + if texture is not UNSET: + field_dict["texture"] = texture + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + type_ = cast(Literal["beta"], d.pop("type")) + if type_ != "beta": + raise ValueError(f"type must match const 'beta', got '{type_}'") + + texture = d.pop("texture", UNSET) + + get_models_oneof_with_required_const_response_200_type_1 = cls( + type_=type_, + texture=texture, + ) + + get_models_oneof_with_required_const_response_200_type_1.additional_properties = d + return get_models_oneof_with_required_const_response_200_type_1 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/http_validation_error.py b/end_to_end_tests/golden-record/my_test_api_client/models/http_validation_error.py index 1f04c29d0..4d982caee 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/http_validation_error.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/http_validation_error.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define @@ -15,20 +18,21 @@ class HTTPValidationError: """ Attributes: - detail (Union[Unset, List['ValidationError']]): + detail (list[ValidationError] | Unset): """ - detail: Union[Unset, List["ValidationError"]] = UNSET + detail: list[ValidationError] | Unset = UNSET - def to_dict(self) -> Dict[str, Any]: - detail: Union[Unset, List[Dict[str, Any]]] = UNSET + def to_dict(self) -> dict[str, Any]: + detail: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.detail, Unset): detail = [] for detail_item_data in self.detail: detail_item = detail_item_data.to_dict() detail.append(detail_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update({}) if detail is not UNSET: field_dict["detail"] = detail @@ -36,16 +40,18 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.validation_error import ValidationError - d = src_dict.copy() - detail = [] + d = dict(src_dict) _detail = d.pop("detail", UNSET) - for detail_item_data in _detail or []: - detail_item = ValidationError.from_dict(detail_item_data) + detail: list[ValidationError] | Unset = UNSET + if _detail is not UNSET: + detail = [] + for detail_item_data in _detail: + detail_item = ValidationError.from_dict(detail_item_data) - detail.append(detail_item) + detail.append(detail_item) http_validation_error = cls( detail=detail, diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/import_.py b/end_to_end_tests/golden-record/my_test_api_client/models/import_.py index 85cc594e7..51f96846e 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/import_.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/import_.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class Import: """ """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) import_ = cls() import_.additional_properties = d return import_ @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/json_like_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/json_like_body.py index 623dcd848..4e60f7ab6 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/json_like_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/json_like_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class JsonLikeBody: """ Attributes: - a (Union[Unset, str]): + a (str | Unset): """ - a: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a = self.a - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a = d.pop("a", UNSET) json_like_body = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return json_like_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/mixed_case_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/mixed_case_response_200.py index 21bdd918d..93aead870 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/mixed_case_response_200.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/mixed_case_response_200.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,20 +15,20 @@ class MixedCaseResponse200: """ Attributes: - mixed_case (Union[Unset, str]): - mixedCase (Union[Unset, str]): + mixed_case (str | Unset): + mixedCase (str | Unset): """ - mixed_case: Union[Unset, str] = UNSET - mixedCase: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + mixed_case: str | Unset = UNSET + mixedCase: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: mixed_case = self.mixed_case mixedCase = self.mixedCase - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if mixed_case is not UNSET: @@ -36,8 +39,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) mixed_case = d.pop("mixed_case", UNSET) mixedCase = d.pop("mixedCase", UNSET) @@ -51,7 +54,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return mixed_case_response_200 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_from_all_of.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_from_all_of.py index 6414b790d..a8abde58a 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_from_all_of.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_from_all_of.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,38 +17,38 @@ class ModelFromAllOf: """ Attributes: - a_sub_property (Union[Unset, str]): - type (Union[Unset, AnotherAllOfSubModelType]): - type_enum (Union[Unset, AnotherAllOfSubModelTypeEnum]): - another_sub_property (Union[Unset, str]): + a_sub_property (str | Unset): + type_ (AnotherAllOfSubModelType | Unset): + type_enum (AnotherAllOfSubModelTypeEnum | Unset): + another_sub_property (str | Unset): """ - a_sub_property: Union[Unset, str] = UNSET - type: Union[Unset, AnotherAllOfSubModelType] = UNSET - type_enum: Union[Unset, AnotherAllOfSubModelTypeEnum] = UNSET - another_sub_property: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a_sub_property: str | Unset = UNSET + type_: AnotherAllOfSubModelType | Unset = UNSET + type_enum: AnotherAllOfSubModelTypeEnum | Unset = UNSET + another_sub_property: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_sub_property = self.a_sub_property - type: Union[Unset, str] = UNSET - if not isinstance(self.type, Unset): - type = self.type.value + type_: str | Unset = UNSET + if not isinstance(self.type_, Unset): + type_ = self.type_.value - type_enum: Union[Unset, int] = UNSET + type_enum: int | Unset = UNSET if not isinstance(self.type_enum, Unset): type_enum = self.type_enum.value another_sub_property = self.another_sub_property - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a_sub_property is not UNSET: field_dict["a_sub_property"] = a_sub_property - if type is not UNSET: - field_dict["type"] = type + if type_ is not UNSET: + field_dict["type"] = type_ if type_enum is not UNSET: field_dict["type_enum"] = type_enum if another_sub_property is not UNSET: @@ -54,19 +57,19 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_sub_property = d.pop("a_sub_property", UNSET) - _type = d.pop("type", UNSET) - type: Union[Unset, AnotherAllOfSubModelType] - if isinstance(_type, Unset): - type = UNSET + _type_ = d.pop("type", UNSET) + type_: AnotherAllOfSubModelType | Unset + if isinstance(_type_, Unset): + type_ = UNSET else: - type = AnotherAllOfSubModelType(_type) + type_ = AnotherAllOfSubModelType(_type_) _type_enum = d.pop("type_enum", UNSET) - type_enum: Union[Unset, AnotherAllOfSubModelTypeEnum] + type_enum: AnotherAllOfSubModelTypeEnum | Unset if isinstance(_type_enum, Unset): type_enum = UNSET else: @@ -76,7 +79,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: model_from_all_of = cls( a_sub_property=a_sub_property, - type=type, + type_=type_, type_enum=type_enum, another_sub_property=another_sub_property, ) @@ -85,7 +88,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_from_all_of @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_name.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_name.py index 2a86db3a2..4d79be855 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_name.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_name.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class ModelName: """ """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_name = cls() model_name.additional_properties = d return model_name @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_reference_with_periods.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_reference_with_periods.py index a5ff5d211..4700932e4 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_reference_with_periods.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_reference_with_periods.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class ModelReferenceWithPeriods: """A Model with periods in its reference""" - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_reference_with_periods = cls() model_reference_with_periods.additional_properties = d return model_reference_with_periods @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined.py index 761a43e54..9cef96f7c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,20 +21,21 @@ class ModelWithAdditionalPropertiesInlined: """ Attributes: - a_number (Union[Unset, float]): + a_number (float | Unset): """ - a_number: Union[Unset, float] = UNSET - additional_properties: Dict[str, "ModelWithAdditionalPropertiesInlinedAdditionalProperty"] = _attrs_field( + a_number: float | Unset = UNSET + additional_properties: dict[str, ModelWithAdditionalPropertiesInlinedAdditionalProperty] = _attrs_field( init=False, factory=dict ) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_number = self.a_number - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() + field_dict.update({}) if a_number is not UNSET: field_dict["a_number"] = a_number @@ -39,12 +43,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_additional_properties_inlined_additional_property import ( ModelWithAdditionalPropertiesInlinedAdditionalProperty, ) - d = src_dict.copy() + d = dict(src_dict) a_number = d.pop("a_number", UNSET) model_with_additional_properties_inlined = cls( @@ -61,13 +65,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_additional_properties_inlined @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "ModelWithAdditionalPropertiesInlinedAdditionalProperty": + def __getitem__(self, key: str) -> ModelWithAdditionalPropertiesInlinedAdditionalProperty: return self.additional_properties[key] - def __setitem__(self, key: str, value: "ModelWithAdditionalPropertiesInlinedAdditionalProperty") -> None: + def __setitem__(self, key: str, value: ModelWithAdditionalPropertiesInlinedAdditionalProperty) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined_additional_property.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined_additional_property.py index e06a94bfc..8f877a575 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined_additional_property.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_inlined_additional_property.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class ModelWithAdditionalPropertiesInlinedAdditionalProperty: """ Attributes: - extra_props_prop (Union[Unset, str]): + extra_props_prop (str | Unset): """ - extra_props_prop: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + extra_props_prop: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: extra_props_prop = self.extra_props_prop - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if extra_props_prop is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) extra_props_prop = d.pop("extra_props_prop", UNSET) model_with_additional_properties_inlined_additional_property = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_additional_properties_inlined_additional_property @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_refed.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_refed.py index b2500f68c..d3e0e338a 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_refed.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_additional_properties_refed.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,18 +15,18 @@ class ModelWithAdditionalPropertiesRefed: """ """ - additional_properties: Dict[str, AnEnum] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, AnEnum] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.value return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_with_additional_properties_refed = cls() additional_properties = {} @@ -36,7 +39,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_additional_properties_refed @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> AnEnum: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties.py index 6e669914a..7b59d8c2f 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,16 +19,16 @@ class ModelWithAnyJsonProperties: """ """ - additional_properties: Dict[ - str, Union["ModelWithAnyJsonPropertiesAdditionalPropertyType0", List[str], bool, float, int, str] + additional_properties: dict[ + str, bool | float | int | list[str] | ModelWithAnyJsonPropertiesAdditionalPropertyType0 | str ] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: from ..models.model_with_any_json_properties_additional_property_type_0 import ( ModelWithAnyJsonPropertiesAdditionalPropertyType0, ) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): if isinstance(prop, ModelWithAnyJsonPropertiesAdditionalPropertyType0): field_dict[prop_name] = prop.to_dict() @@ -38,12 +41,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_any_json_properties_additional_property_type_0 import ( ModelWithAnyJsonPropertiesAdditionalPropertyType0, ) - d = src_dict.copy() + d = dict(src_dict) model_with_any_json_properties = cls() additional_properties = {} @@ -51,25 +54,25 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: def _parse_additional_property( data: object, - ) -> Union["ModelWithAnyJsonPropertiesAdditionalPropertyType0", List[str], bool, float, int, str]: + ) -> bool | float | int | list[str] | ModelWithAnyJsonPropertiesAdditionalPropertyType0 | str: try: if not isinstance(data, dict): raise TypeError() additional_property_type_0 = ModelWithAnyJsonPropertiesAdditionalPropertyType0.from_dict(data) return additional_property_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass try: if not isinstance(data, list): raise TypeError() - additional_property_type_1 = cast(List[str], data) + additional_property_type_1 = cast(list[str], data) return additional_property_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass return cast( - Union["ModelWithAnyJsonPropertiesAdditionalPropertyType0", List[str], bool, float, int, str], data + bool | float | int | list[str] | ModelWithAnyJsonPropertiesAdditionalPropertyType0 | str, data ) additional_property = _parse_additional_property(prop_dict) @@ -80,18 +83,16 @@ def _parse_additional_property( return model_with_any_json_properties @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__( self, key: str - ) -> Union["ModelWithAnyJsonPropertiesAdditionalPropertyType0", List[str], bool, float, int, str]: + ) -> bool | float | int | list[str] | ModelWithAnyJsonPropertiesAdditionalPropertyType0 | str: return self.additional_properties[key] def __setitem__( - self, - key: str, - value: Union["ModelWithAnyJsonPropertiesAdditionalPropertyType0", List[str], bool, float, int, str], + self, key: str, value: bool | float | int | list[str] | ModelWithAnyJsonPropertiesAdditionalPropertyType0 | str ) -> None: self.additional_properties[key] = value diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties_additional_property_type_0.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties_additional_property_type_0.py index 6ae70905e..5726b1d50 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties_additional_property_type_0.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_any_json_properties_additional_property_type_0.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class ModelWithAnyJsonPropertiesAdditionalPropertyType0: """ """ - additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_with_any_json_properties_additional_property_type_0 = cls() model_with_any_json_properties_additional_property_type_0.additional_properties = d return model_with_any_json_properties_additional_property_type_0 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> str: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_backslash_in_description.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_backslash_in_description.py index 5de43ddb9..22d2978a4 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_backslash_in_description.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_backslash_in_description.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,24 +15,24 @@ class ModelWithBackslashInDescription: """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_with_backslash_in_description = cls() model_with_backslash_in_description.additional_properties = d return model_with_backslash_in_description @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_a.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_a.py index 73cfb1287..5e9849159 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_a.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_a.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,18 +19,18 @@ class ModelWithCircularRefA: """ Attributes: - circular (Union[Unset, ModelWithCircularRefB]): + circular (ModelWithCircularRefB | Unset): """ - circular: Union[Unset, "ModelWithCircularRefB"] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + circular: ModelWithCircularRefB | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - circular: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + circular: dict[str, Any] | Unset = UNSET if not isinstance(self.circular, Unset): circular = self.circular.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if circular is not UNSET: @@ -36,12 +39,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_circular_ref_b import ModelWithCircularRefB - d = src_dict.copy() + d = dict(src_dict) _circular = d.pop("circular", UNSET) - circular: Union[Unset, ModelWithCircularRefB] + circular: ModelWithCircularRefB | Unset if isinstance(_circular, Unset): circular = UNSET else: @@ -55,7 +58,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_circular_ref_a @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_b.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_b.py index 0628d89ae..1a0e61354 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_b.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_b.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,18 +19,18 @@ class ModelWithCircularRefB: """ Attributes: - circular (Union[Unset, ModelWithCircularRefA]): + circular (ModelWithCircularRefA | Unset): """ - circular: Union[Unset, "ModelWithCircularRefA"] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + circular: ModelWithCircularRefA | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - circular: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + circular: dict[str, Any] | Unset = UNSET if not isinstance(self.circular, Unset): circular = self.circular.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if circular is not UNSET: @@ -36,12 +39,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_circular_ref_a import ModelWithCircularRefA - d = src_dict.copy() + d = dict(src_dict) _circular = d.pop("circular", UNSET) - circular: Union[Unset, ModelWithCircularRefA] + circular: ModelWithCircularRefA | Unset if isinstance(_circular, Unset): circular = UNSET else: @@ -55,7 +58,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_circular_ref_b @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_a.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_a.py index 4f1d59c57..cd389aeb3 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_a.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_a.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,24 +17,24 @@ class ModelWithCircularRefInAdditionalPropertiesA: """ """ - additional_properties: Dict[str, "ModelWithCircularRefInAdditionalPropertiesB"] = _attrs_field( + additional_properties: dict[str, ModelWithCircularRefInAdditionalPropertiesB] = _attrs_field( init=False, factory=dict ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_circular_ref_in_additional_properties_b import ( ModelWithCircularRefInAdditionalPropertiesB, ) - d = src_dict.copy() + d = dict(src_dict) model_with_circular_ref_in_additional_properties_a = cls() additional_properties = {} @@ -44,13 +47,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_circular_ref_in_additional_properties_a @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "ModelWithCircularRefInAdditionalPropertiesB": + def __getitem__(self, key: str) -> ModelWithCircularRefInAdditionalPropertiesB: return self.additional_properties[key] - def __setitem__(self, key: str, value: "ModelWithCircularRefInAdditionalPropertiesB") -> None: + def __setitem__(self, key: str, value: ModelWithCircularRefInAdditionalPropertiesB) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_b.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_b.py index 3f55584e5..78a11d482 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_b.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_circular_ref_in_additional_properties_b.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,24 +17,24 @@ class ModelWithCircularRefInAdditionalPropertiesB: """ """ - additional_properties: Dict[str, "ModelWithCircularRefInAdditionalPropertiesA"] = _attrs_field( + additional_properties: dict[str, ModelWithCircularRefInAdditionalPropertiesA] = _attrs_field( init=False, factory=dict ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_circular_ref_in_additional_properties_a import ( ModelWithCircularRefInAdditionalPropertiesA, ) - d = src_dict.copy() + d = dict(src_dict) model_with_circular_ref_in_additional_properties_b = cls() additional_properties = {} @@ -44,13 +47,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_circular_ref_in_additional_properties_b @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "ModelWithCircularRefInAdditionalPropertiesA": + def __getitem__(self, key: str) -> ModelWithCircularRefInAdditionalPropertiesA: return self.additional_properties[key] - def __setitem__(self, key: str, value: "ModelWithCircularRefInAdditionalPropertiesA") -> None: + def __setitem__(self, key: str, value: ModelWithCircularRefInAdditionalPropertiesA) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_date_time_property.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_date_time_property.py index 658b2352d..39dd2500b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_date_time_property.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_date_time_property.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import datetime -from typing import Any, Dict, List, Type, TypeVar, Union +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -14,18 +17,18 @@ class ModelWithDateTimeProperty: """ Attributes: - datetime_ (Union[Unset, datetime.datetime]): + datetime_ (datetime.datetime | Unset): """ - datetime_: Union[Unset, datetime.datetime] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + datetime_: datetime.datetime | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - datetime_: Union[Unset, str] = UNSET + def to_dict(self) -> dict[str, Any]: + datetime_: str | Unset = UNSET if not isinstance(self.datetime_, Unset): datetime_ = self.datetime_.isoformat() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if datetime_ is not UNSET: @@ -34,10 +37,10 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) _datetime_ = d.pop("datetime", UNSET) - datetime_: Union[Unset, datetime.datetime] + datetime_: datetime.datetime | Unset if isinstance(_datetime_, Unset): datetime_ = UNSET else: @@ -51,7 +54,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_date_time_property @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_discriminated_union.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_discriminated_union.py index e03a6e698..0e0021611 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_discriminated_union.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_discriminated_union.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -17,17 +20,17 @@ class ModelWithDiscriminatedUnion: """ Attributes: - discriminated_union (Union['ADiscriminatedUnionType1', 'ADiscriminatedUnionType2', None, Unset]): + discriminated_union (ADiscriminatedUnionType1 | ADiscriminatedUnionType2 | None | Unset): """ - discriminated_union: Union["ADiscriminatedUnionType1", "ADiscriminatedUnionType2", None, Unset] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + discriminated_union: ADiscriminatedUnionType1 | ADiscriminatedUnionType2 | None | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: from ..models.a_discriminated_union_type_1 import ADiscriminatedUnionType1 from ..models.a_discriminated_union_type_2 import ADiscriminatedUnionType2 - discriminated_union: Union[Dict[str, Any], None, Unset] + discriminated_union: dict[str, Any] | None | Unset if isinstance(self.discriminated_union, Unset): discriminated_union = UNSET elif isinstance(self.discriminated_union, ADiscriminatedUnionType1): @@ -37,7 +40,7 @@ def to_dict(self) -> Dict[str, Any]: else: discriminated_union = self.discriminated_union - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if discriminated_union is not UNSET: @@ -46,15 +49,15 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.a_discriminated_union_type_1 import ADiscriminatedUnionType1 from ..models.a_discriminated_union_type_2 import ADiscriminatedUnionType2 - d = src_dict.copy() + d = dict(src_dict) def _parse_discriminated_union( data: object, - ) -> Union["ADiscriminatedUnionType1", "ADiscriminatedUnionType2", None, Unset]: + ) -> ADiscriminatedUnionType1 | ADiscriminatedUnionType2 | None | Unset: if data is None: return data if isinstance(data, Unset): @@ -65,7 +68,7 @@ def _parse_discriminated_union( componentsschemas_a_discriminated_union_type_0 = ADiscriminatedUnionType1.from_dict(data) return componentsschemas_a_discriminated_union_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass try: if not isinstance(data, dict): @@ -73,9 +76,9 @@ def _parse_discriminated_union( componentsschemas_a_discriminated_union_type_1 = ADiscriminatedUnionType2.from_dict(data) return componentsschemas_a_discriminated_union_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["ADiscriminatedUnionType1", "ADiscriminatedUnionType2", None, Unset], data) + return cast(ADiscriminatedUnionType1 | ADiscriminatedUnionType2 | None | Unset, data) discriminated_union = _parse_discriminated_union(d.pop("discriminated_union", UNSET)) @@ -87,7 +90,7 @@ def _parse_discriminated_union( return model_with_discriminated_union @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties.py new file mode 100644 index 000000000..234a49e78 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.model_with_merged_properties_string_to_enum import ModelWithMergedPropertiesStringToEnum +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ModelWithMergedProperties") + + +@_attrs_define +class ModelWithMergedProperties: + """ + Attributes: + simple_string (str | Unset): extended simpleString description Default: 'new default'. + string_to_enum (ModelWithMergedPropertiesStringToEnum | Unset): Default: + ModelWithMergedPropertiesStringToEnum.A. + string_to_date (datetime.date | Unset): + number_to_int (int | Unset): + any_to_string (str | Unset): Default: 'x'. + """ + + simple_string: str | Unset = "new default" + string_to_enum: ModelWithMergedPropertiesStringToEnum | Unset = ModelWithMergedPropertiesStringToEnum.A + string_to_date: datetime.date | Unset = UNSET + number_to_int: int | Unset = UNSET + any_to_string: str | Unset = "x" + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + simple_string = self.simple_string + + string_to_enum: str | Unset = UNSET + if not isinstance(self.string_to_enum, Unset): + string_to_enum = self.string_to_enum.value + + string_to_date: str | Unset = UNSET + if not isinstance(self.string_to_date, Unset): + string_to_date = self.string_to_date.isoformat() + + number_to_int = self.number_to_int + + any_to_string = self.any_to_string + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if simple_string is not UNSET: + field_dict["simpleString"] = simple_string + if string_to_enum is not UNSET: + field_dict["stringToEnum"] = string_to_enum + if string_to_date is not UNSET: + field_dict["stringToDate"] = string_to_date + if number_to_int is not UNSET: + field_dict["numberToInt"] = number_to_int + if any_to_string is not UNSET: + field_dict["anyToString"] = any_to_string + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + simple_string = d.pop("simpleString", UNSET) + + _string_to_enum = d.pop("stringToEnum", UNSET) + string_to_enum: ModelWithMergedPropertiesStringToEnum | Unset + if isinstance(_string_to_enum, Unset): + string_to_enum = UNSET + else: + string_to_enum = ModelWithMergedPropertiesStringToEnum(_string_to_enum) + + _string_to_date = d.pop("stringToDate", UNSET) + string_to_date: datetime.date | Unset + if isinstance(_string_to_date, Unset): + string_to_date = UNSET + else: + string_to_date = isoparse(_string_to_date).date() + + number_to_int = d.pop("numberToInt", UNSET) + + any_to_string = d.pop("anyToString", UNSET) + + model_with_merged_properties = cls( + simple_string=simple_string, + string_to_enum=string_to_enum, + string_to_date=string_to_date, + number_to_int=number_to_int, + any_to_string=any_to_string, + ) + + model_with_merged_properties.additional_properties = d + return model_with_merged_properties + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties_string_to_enum.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties_string_to_enum.py new file mode 100644 index 000000000..5e146c5eb --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_merged_properties_string_to_enum.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class ModelWithMergedPropertiesStringToEnum(str, Enum): + A = "a" + B = "b" + + def __str__(self) -> str: + return str(self.value) diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_no_properties.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_no_properties.py new file mode 100644 index 000000000..6d70837d1 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_no_properties.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define + +T = TypeVar("T", bound="ModelWithNoProperties") + + +@_attrs_define +class ModelWithNoProperties: + """ """ + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + model_with_no_properties = cls() + + return model_with_no_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties.py index 94afa7653..2f4bf9de8 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -18,18 +21,18 @@ class ModelWithPrimitiveAdditionalProperties: """ Attributes: - a_date_holder (Union[Unset, ModelWithPrimitiveAdditionalPropertiesADateHolder]): + a_date_holder (ModelWithPrimitiveAdditionalPropertiesADateHolder | Unset): """ - a_date_holder: Union[Unset, "ModelWithPrimitiveAdditionalPropertiesADateHolder"] = UNSET - additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) + a_date_holder: ModelWithPrimitiveAdditionalPropertiesADateHolder | Unset = UNSET + additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - a_date_holder: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + a_date_holder: dict[str, Any] | Unset = UNSET if not isinstance(self.a_date_holder, Unset): a_date_holder = self.a_date_holder.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a_date_holder is not UNSET: @@ -38,14 +41,14 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_with_primitive_additional_properties_a_date_holder import ( ModelWithPrimitiveAdditionalPropertiesADateHolder, ) - d = src_dict.copy() + d = dict(src_dict) _a_date_holder = d.pop("a_date_holder", UNSET) - a_date_holder: Union[Unset, ModelWithPrimitiveAdditionalPropertiesADateHolder] + a_date_holder: ModelWithPrimitiveAdditionalPropertiesADateHolder | Unset if isinstance(_a_date_holder, Unset): a_date_holder = UNSET else: @@ -59,7 +62,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_primitive_additional_properties @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> str: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties_a_date_holder.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties_a_date_holder.py index b9920fc60..891fdc3e2 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties_a_date_holder.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_primitive_additional_properties_a_date_holder.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import datetime -from typing import Any, Dict, List, Type, TypeVar +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,18 +15,18 @@ class ModelWithPrimitiveAdditionalPropertiesADateHolder: """ """ - additional_properties: Dict[str, datetime.datetime] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, datetime.datetime] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.isoformat() return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_with_primitive_additional_properties_a_date_holder = cls() additional_properties = {} @@ -36,7 +39,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_primitive_additional_properties_a_date_holder @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> datetime.datetime: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_property_ref.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_property_ref.py index f54afdee8..19b63447d 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_property_ref.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_property_ref.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,18 +19,18 @@ class ModelWithPropertyRef: """ Attributes: - inner (Union[Unset, ModelName]): + inner (ModelName | Unset): """ - inner: Union[Unset, "ModelName"] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + inner: ModelName | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - inner: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + inner: dict[str, Any] | Unset = UNSET if not isinstance(self.inner, Unset): inner = self.inner.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if inner is not UNSET: @@ -36,12 +39,12 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.model_name import ModelName - d = src_dict.copy() + d = dict(src_dict) _inner = d.pop("inner", UNSET) - inner: Union[Unset, ModelName] + inner: ModelName | Unset if isinstance(_inner, Unset): inner = UNSET else: @@ -55,7 +58,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_property_ref @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref.py index 578bca7e0..d4149434c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,18 +15,18 @@ class ModelWithRecursiveRef: """ Attributes: - recursive (Union[Unset, ModelWithRecursiveRef]): + recursive (ModelWithRecursiveRef | Unset): """ - recursive: Union[Unset, "ModelWithRecursiveRef"] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + recursive: ModelWithRecursiveRef | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - recursive: Union[Unset, Dict[str, Any]] = UNSET + def to_dict(self) -> dict[str, Any]: + recursive: dict[str, Any] | Unset = UNSET if not isinstance(self.recursive, Unset): recursive = self.recursive.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if recursive is not UNSET: @@ -32,10 +35,10 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) _recursive = d.pop("recursive", UNSET) - recursive: Union[Unset, ModelWithRecursiveRef] + recursive: ModelWithRecursiveRef | Unset if isinstance(_recursive, Unset): recursive = UNSET else: @@ -49,7 +52,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_recursive_ref @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref_in_additional_properties.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref_in_additional_properties.py index 2ed2526f5..3b83ea3f9 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref_in_additional_properties.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_recursive_ref_in_additional_properties.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,20 +13,20 @@ class ModelWithRecursiveRefInAdditionalProperties: """ """ - additional_properties: Dict[str, "ModelWithRecursiveRefInAdditionalProperties"] = _attrs_field( + additional_properties: dict[str, ModelWithRecursiveRefInAdditionalProperties] = _attrs_field( init=False, factory=dict ) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} for prop_name, prop in self.additional_properties.items(): field_dict[prop_name] = prop.to_dict() return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) model_with_recursive_ref_in_additional_properties = cls() additional_properties = {} @@ -36,13 +39,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return model_with_recursive_ref_in_additional_properties @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> "ModelWithRecursiveRefInAdditionalProperties": + def __getitem__(self, key: str) -> ModelWithRecursiveRefInAdditionalProperties: return self.additional_properties[key] - def __setitem__(self, key: str, value: "ModelWithRecursiveRefInAdditionalProperties") -> None: + def __setitem__(self, key: str, value: ModelWithRecursiveRefInAdditionalProperties) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property.py index 890010b78..176b3eadc 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define @@ -13,13 +16,13 @@ class ModelWithUnionProperty: """ Attributes: - a_property (Union[AnEnum, AnIntEnum, Unset]): + a_property (AnEnum | AnIntEnum | Unset): """ - a_property: Union[AnEnum, AnIntEnum, Unset] = UNSET + a_property: AnEnum | AnIntEnum | Unset = UNSET - def to_dict(self) -> Dict[str, Any]: - a_property: Union[Unset, int, str] + def to_dict(self) -> dict[str, Any]: + a_property: int | str | Unset if isinstance(self.a_property, Unset): a_property = UNSET elif isinstance(self.a_property, AnEnum): @@ -27,7 +30,8 @@ def to_dict(self) -> Dict[str, Any]: else: a_property = self.a_property.value - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update({}) if a_property is not UNSET: field_dict["a_property"] = a_property @@ -35,10 +39,10 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) - def _parse_a_property(data: object) -> Union[AnEnum, AnIntEnum, Unset]: + def _parse_a_property(data: object) -> AnEnum | AnIntEnum | Unset: if isinstance(data, Unset): return data try: @@ -47,7 +51,7 @@ def _parse_a_property(data: object) -> Union[AnEnum, AnIntEnum, Unset]: a_property_type_0 = AnEnum(data) return a_property_type_0 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass if not isinstance(data, int): raise TypeError() diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined.py index 2a832e21a..ab144ded1 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined.py @@ -1,12 +1,15 @@ -from typing import TYPE_CHECKING, Any, Dict, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from ..types import UNSET, Unset if TYPE_CHECKING: - from ..models.model_with_union_property_inlined_fruit_type_0 import ModelWithUnionPropertyInlinedFruitType0 - from ..models.model_with_union_property_inlined_fruit_type_1 import ModelWithUnionPropertyInlinedFruitType1 + from ..models.model_with_union_property_inlined_apples import ModelWithUnionPropertyInlinedApples + from ..models.model_with_union_property_inlined_bananas import ModelWithUnionPropertyInlinedBananas T = TypeVar("T", bound="ModelWithUnionPropertyInlined") @@ -16,23 +19,24 @@ class ModelWithUnionPropertyInlined: """ Attributes: - fruit (Union['ModelWithUnionPropertyInlinedFruitType0', 'ModelWithUnionPropertyInlinedFruitType1', Unset]): + fruit (ModelWithUnionPropertyInlinedApples | ModelWithUnionPropertyInlinedBananas | Unset): """ - fruit: Union["ModelWithUnionPropertyInlinedFruitType0", "ModelWithUnionPropertyInlinedFruitType1", Unset] = UNSET + fruit: ModelWithUnionPropertyInlinedApples | ModelWithUnionPropertyInlinedBananas | Unset = UNSET - def to_dict(self) -> Dict[str, Any]: - from ..models.model_with_union_property_inlined_fruit_type_0 import ModelWithUnionPropertyInlinedFruitType0 + def to_dict(self) -> dict[str, Any]: + from ..models.model_with_union_property_inlined_apples import ModelWithUnionPropertyInlinedApples - fruit: Union[Dict[str, Any], Unset] + fruit: dict[str, Any] | Unset if isinstance(self.fruit, Unset): fruit = UNSET - elif isinstance(self.fruit, ModelWithUnionPropertyInlinedFruitType0): + elif isinstance(self.fruit, ModelWithUnionPropertyInlinedApples): fruit = self.fruit.to_dict() else: fruit = self.fruit.to_dict() - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update({}) if fruit is not UNSET: field_dict["fruit"] = fruit @@ -40,30 +44,30 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - from ..models.model_with_union_property_inlined_fruit_type_0 import ModelWithUnionPropertyInlinedFruitType0 - from ..models.model_with_union_property_inlined_fruit_type_1 import ModelWithUnionPropertyInlinedFruitType1 + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.model_with_union_property_inlined_apples import ModelWithUnionPropertyInlinedApples + from ..models.model_with_union_property_inlined_bananas import ModelWithUnionPropertyInlinedBananas - d = src_dict.copy() + d = dict(src_dict) def _parse_fruit( data: object, - ) -> Union["ModelWithUnionPropertyInlinedFruitType0", "ModelWithUnionPropertyInlinedFruitType1", Unset]: + ) -> ModelWithUnionPropertyInlinedApples | ModelWithUnionPropertyInlinedBananas | Unset: if isinstance(data, Unset): return data try: if not isinstance(data, dict): raise TypeError() - fruit_type_0 = ModelWithUnionPropertyInlinedFruitType0.from_dict(data) + fruit_apples = ModelWithUnionPropertyInlinedApples.from_dict(data) - return fruit_type_0 - except: # noqa: E722 + return fruit_apples + except (TypeError, ValueError, AttributeError, KeyError): pass if not isinstance(data, dict): raise TypeError() - fruit_type_1 = ModelWithUnionPropertyInlinedFruitType1.from_dict(data) + fruit_bananas = ModelWithUnionPropertyInlinedBananas.from_dict(data) - return fruit_type_1 + return fruit_bananas fruit = _parse_fruit(d.pop("fruit", UNSET)) diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_0.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_apples.py similarity index 56% rename from end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_0.py rename to end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_apples.py index b0f25360a..0ad6f0cf2 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_0.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_apples.py @@ -1,27 +1,30 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field from ..types import UNSET, Unset -T = TypeVar("T", bound="ModelWithUnionPropertyInlinedFruitType0") +T = TypeVar("T", bound="ModelWithUnionPropertyInlinedApples") @_attrs_define -class ModelWithUnionPropertyInlinedFruitType0: +class ModelWithUnionPropertyInlinedApples: """ Attributes: - apples (Union[Unset, str]): + apples (str | Unset): """ - apples: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + apples: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: apples = self.apples - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if apples is not UNSET: @@ -30,19 +33,19 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) apples = d.pop("apples", UNSET) - model_with_union_property_inlined_fruit_type_0 = cls( + model_with_union_property_inlined_apples = cls( apples=apples, ) - model_with_union_property_inlined_fruit_type_0.additional_properties = d - return model_with_union_property_inlined_fruit_type_0 + model_with_union_property_inlined_apples.additional_properties = d + return model_with_union_property_inlined_apples @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_1.py b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_bananas.py similarity index 56% rename from end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_1.py rename to end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_bananas.py index 1a32f2445..68a54e760 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_fruit_type_1.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/model_with_union_property_inlined_bananas.py @@ -1,27 +1,30 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field from ..types import UNSET, Unset -T = TypeVar("T", bound="ModelWithUnionPropertyInlinedFruitType1") +T = TypeVar("T", bound="ModelWithUnionPropertyInlinedBananas") @_attrs_define -class ModelWithUnionPropertyInlinedFruitType1: +class ModelWithUnionPropertyInlinedBananas: """ Attributes: - bananas (Union[Unset, str]): + bananas (str | Unset): """ - bananas: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + bananas: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: bananas = self.bananas - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if bananas is not UNSET: @@ -30,19 +33,19 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) bananas = d.pop("bananas", UNSET) - model_with_union_property_inlined_fruit_type_1 = cls( + model_with_union_property_inlined_bananas = cls( bananas=bananas, ) - model_with_union_property_inlined_fruit_type_1.additional_properties = d - return model_with_union_property_inlined_fruit_type_1 + model_with_union_property_inlined_bananas.additional_properties = d + return model_with_union_property_inlined_bananas @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/none.py b/end_to_end_tests/golden-record/my_test_api_client/models/none.py index 3510497bf..26a7ba2ba 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/none.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/none.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class None_: """ """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) none = cls() none.additional_properties = d return none @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/octet_stream_tests_octet_stream_post_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/octet_stream_tests_octet_stream_post_response_200.py new file mode 100644 index 000000000..8d138937a --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/octet_stream_tests_octet_stream_post_response_200.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from collections.abc import Mapping +from io import BytesIO +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, File, FileTypes, Unset + +T = TypeVar("T", bound="OctetStreamTestsOctetStreamPostResponse200") + + +@_attrs_define +class OctetStreamTestsOctetStreamPostResponse200: + """ + Attributes: + data (File | Unset): + """ + + data: File | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + data: FileTypes | Unset = UNSET + if not isinstance(self.data, Unset): + data = self.data.to_tuple() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if data is not UNSET: + field_dict["data"] = data + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + _data = d.pop("data", UNSET) + data: File | Unset + if isinstance(_data, Unset): + data = UNSET + else: + data = File(payload=BytesIO(_data)) + + octet_stream_tests_octet_stream_post_response_200 = cls( + data=data, + ) + + octet_stream_tests_octet_stream_post_response_200.additional_properties = d + return octet_stream_tests_octet_stream_post_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/optional_body_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/optional_body_body.py new file mode 100644 index 000000000..a4665b7df --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/optional_body_body.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="OptionalBodyBody") + + +@_attrs_define +class OptionalBodyBody: + """ """ + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + optional_body_body = cls() + + optional_body_body.additional_properties = d + return optional_body_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_data_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_data_body.py index adc78cd6f..d83f8d7eb 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_data_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_data_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class PostBodiesMultipleDataBody: """ Attributes: - a (Union[Unset, str]): + a (str | Unset): """ - a: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a = self.a - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a = d.pop("a", UNSET) post_bodies_multiple_data_body = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_bodies_multiple_data_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_files_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_files_body.py index 1c61d3385..81d7db7a7 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_files_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_files_body.py @@ -1,8 +1,12 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field +from .. import types from ..types import UNSET, Unset T = TypeVar("T", bound="PostBodiesMultipleFilesBody") @@ -12,16 +16,16 @@ class PostBodiesMultipleFilesBody: """ Attributes: - a (Union[Unset, str]): + a (str | Unset): """ - a: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a = self.a - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a is not UNSET: @@ -29,22 +33,20 @@ def to_dict(self) -> Dict[str, Any]: return field_dict - def to_multipart(self) -> Dict[str, Any]: - a = self.a if isinstance(self.a, Unset) else (None, str(self.a).encode(), "text/plain") + def to_multipart(self) -> types.RequestFiles: + files: types.RequestFiles = [] - field_dict: Dict[str, Any] = {} - field_dict.update( - {key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items()} - ) - field_dict.update({}) - if a is not UNSET: - field_dict["a"] = a + if not isinstance(self.a, Unset): + files.append(("a", (None, str(self.a).encode(), "text/plain"))) - return field_dict + for prop_name, prop in self.additional_properties.items(): + files.append((prop_name, (None, str(prop).encode(), "text/plain"))) + + return files @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a = d.pop("a", UNSET) post_bodies_multiple_files_body = cls( @@ -55,7 +57,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_bodies_multiple_files_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_json_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_json_body.py index 88e5ec6f9..a5ec152e9 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_json_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_bodies_multiple_json_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,16 +15,16 @@ class PostBodiesMultipleJsonBody: """ Attributes: - a (Union[Unset, str]): + a (str | Unset): """ - a: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a = self.a - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if a is not UNSET: @@ -30,8 +33,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a = d.pop("a", UNSET) post_bodies_multiple_json_body = cls( @@ -42,7 +45,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_bodies_multiple_json_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_form_data_inline_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_form_data_inline_body.py index 08a7bbc3a..5d7c3982b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_form_data_inline_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_form_data_inline_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -13,19 +16,19 @@ class PostFormDataInlineBody: """ Attributes: a_required_field (str): - an_optional_field (Union[Unset, str]): + an_optional_field (str | Unset): """ a_required_field: str - an_optional_field: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + an_optional_field: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_required_field = self.a_required_field an_optional_field = self.an_optional_field - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -38,8 +41,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_required_field = d.pop("a_required_field") an_optional_field = d.pop("an_optional_field", UNSET) @@ -53,7 +56,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_form_data_inline_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_body.py index ed2f8efa1..2895d97d8 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,20 +15,20 @@ class PostNamingPropertyConflictWithImportBody: """ Attributes: - field (Union[Unset, str]): A python_name of field should not interfere with attrs field - define (Union[Unset, str]): A python_name of define should not interfere with attrs define + field (str | Unset): A python_name of field should not interfere with attrs field + define (str | Unset): A python_name of define should not interfere with attrs define """ - field: Union[Unset, str] = UNSET - define: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + field: str | Unset = UNSET + define: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: field = self.field define = self.define - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if field is not UNSET: @@ -36,8 +39,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) field = d.pop("Field", UNSET) define = d.pop("Define", UNSET) @@ -51,7 +54,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_naming_property_conflict_with_import_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_response_200.py index 9bdd79a02..c2a8ac7f3 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_response_200.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_naming_property_conflict_with_import_response_200.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,20 +15,20 @@ class PostNamingPropertyConflictWithImportResponse200: """ Attributes: - field (Union[Unset, str]): A python_name of field should not interfere with attrs field - define (Union[Unset, str]): A python_name of define should not interfere with attrs define + field (str | Unset): A python_name of field should not interfere with attrs field + define (str | Unset): A python_name of define should not interfere with attrs define """ - field: Union[Unset, str] = UNSET - define: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + field: str | Unset = UNSET + define: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: field = self.field define = self.define - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if field is not UNSET: @@ -36,8 +39,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) field = d.pop("Field", UNSET) define = d.pop("Define", UNSET) @@ -51,7 +54,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_naming_property_conflict_with_import_response_200 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200.py index 0b6a29243..41685d494 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,24 +19,24 @@ class PostResponsesUnionsSimpleBeforeComplexResponse200: """ Attributes: - a (Union['PostResponsesUnionsSimpleBeforeComplexResponse200AType1', str]): + a (PostResponsesUnionsSimpleBeforeComplexResponse200AType1 | str): """ - a: Union["PostResponsesUnionsSimpleBeforeComplexResponse200AType1", str] - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + a: PostResponsesUnionsSimpleBeforeComplexResponse200AType1 | str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: from ..models.post_responses_unions_simple_before_complex_response_200a_type_1 import ( PostResponsesUnionsSimpleBeforeComplexResponse200AType1, ) - a: Union[Dict[str, Any], str] + a: dict[str, Any] | str if isinstance(self.a, PostResponsesUnionsSimpleBeforeComplexResponse200AType1): a = self.a.to_dict() else: a = self.a - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -44,23 +47,23 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.post_responses_unions_simple_before_complex_response_200a_type_1 import ( PostResponsesUnionsSimpleBeforeComplexResponse200AType1, ) - d = src_dict.copy() + d = dict(src_dict) - def _parse_a(data: object) -> Union["PostResponsesUnionsSimpleBeforeComplexResponse200AType1", str]: + def _parse_a(data: object) -> PostResponsesUnionsSimpleBeforeComplexResponse200AType1 | str: try: if not isinstance(data, dict): raise TypeError() a_type_1 = PostResponsesUnionsSimpleBeforeComplexResponse200AType1.from_dict(data) return a_type_1 - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass - return cast(Union["PostResponsesUnionsSimpleBeforeComplexResponse200AType1", str], data) + return cast(PostResponsesUnionsSimpleBeforeComplexResponse200AType1 | str, data) a = _parse_a(d.pop("a")) @@ -72,7 +75,7 @@ def _parse_a(data: object) -> Union["PostResponsesUnionsSimpleBeforeComplexRespo return post_responses_unions_simple_before_complex_response_200 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200a_type_1.py b/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200a_type_1.py index 601d17cf8..ed608b31c 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200a_type_1.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/post_responses_unions_simple_before_complex_response_200a_type_1.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -10,24 +13,24 @@ class PostResponsesUnionsSimpleBeforeComplexResponse200AType1: """ """ - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - field_dict: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) post_responses_unions_simple_before_complex_response_200a_type_1 = cls() post_responses_unions_simple_before_complex_response_200a_type_1.additional_properties = d return post_responses_unions_simple_before_complex_response_200a_type_1 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx.py b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx.py new file mode 100644 index 000000000..61e806ff9 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.status_code_patterns_response_2xx_status import StatusCodePatternsResponse2XXStatus +from ..types import UNSET, Unset + +T = TypeVar("T", bound="StatusCodePatternsResponse2XX") + + +@_attrs_define +class StatusCodePatternsResponse2XX: + """ + Attributes: + status (StatusCodePatternsResponse2XXStatus | Unset): + """ + + status: StatusCodePatternsResponse2XXStatus | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + status: str | Unset = UNSET + if not isinstance(self.status, Unset): + status = self.status.value + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if status is not UNSET: + field_dict["status"] = status + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + _status = d.pop("status", UNSET) + status: StatusCodePatternsResponse2XXStatus | Unset + if isinstance(_status, Unset): + status = UNSET + else: + status = StatusCodePatternsResponse2XXStatus(_status) + + status_code_patterns_response_2xx = cls( + status=status, + ) + + status_code_patterns_response_2xx.additional_properties = d + return status_code_patterns_response_2xx + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx_status.py b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx_status.py new file mode 100644 index 000000000..0baaf6da8 --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_2xx_status.py @@ -0,0 +1,9 @@ +from enum import Enum + + +class StatusCodePatternsResponse2XXStatus(str, Enum): + FAILURE = "failure" + SUCCESS = "success" + + def __str__(self) -> str: + return str(self.value) diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_4xx.py b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_4xx.py new file mode 100644 index 000000000..f62c4168a --- /dev/null +++ b/end_to_end_tests/golden-record/my_test_api_client/models/status_code_patterns_response_4xx.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="StatusCodePatternsResponse4XX") + + +@_attrs_define +class StatusCodePatternsResponse4XX: + """ + Attributes: + error (str | Unset): + """ + + error: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + error = self.error + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if error is not UNSET: + field_dict["error"] = error + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + error = d.pop("error", UNSET) + + status_code_patterns_response_4xx = cls( + error=error, + ) + + status_code_patterns_response_4xx.additional_properties = d + return status_code_patterns_response_4xx + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_body.py b/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_body.py index 8c1843b41..73b054b24 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_body.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define @@ -11,15 +14,16 @@ class TestInlineObjectsBody: """ Attributes: - a_property (Union[Unset, str]): + a_property (str | Unset): """ - a_property: Union[Unset, str] = UNSET + a_property: str | Unset = UNSET - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_property = self.a_property - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update({}) if a_property is not UNSET: field_dict["a_property"] = a_property @@ -27,8 +31,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_property = d.pop("a_property", UNSET) test_inline_objects_body = cls( diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_response_200.py b/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_response_200.py index 6a0ade77f..174a5957b 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_response_200.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/test_inline_objects_response_200.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define @@ -11,15 +14,16 @@ class TestInlineObjectsResponse200: """ Attributes: - a_property (Union[Unset, str]): + a_property (str | Unset): """ - a_property: Union[Unset, str] = UNSET + a_property: str | Unset = UNSET - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_property = self.a_property - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} + field_dict.update({}) if a_property is not UNSET: field_dict["a_property"] = a_property @@ -27,8 +31,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) a_property = d.pop("a_property", UNSET) test_inline_objects_response_200 = cls( diff --git a/end_to_end_tests/golden-record/my_test_api_client/models/validation_error.py b/end_to_end_tests/golden-record/my_test_api_client/models/validation_error.py index 6ff5d4790..9bb9070d6 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/models/validation_error.py +++ b/end_to_end_tests/golden-record/my_test_api_client/models/validation_error.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast from attrs import define as _attrs_define @@ -9,46 +12,47 @@ class ValidationError: """ Attributes: - loc (List[str]): + loc (list[str]): msg (str): - type (str): + type_ (str): """ - loc: List[str] + loc: list[str] msg: str - type: str + type_: str - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: loc = self.loc msg = self.msg - type = self.type + type_ = self.type_ + + field_dict: dict[str, Any] = {} - field_dict: Dict[str, Any] = {} field_dict.update( { "loc": loc, "msg": msg, - "type": type, + "type": type_, } ) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() - loc = cast(List[str], d.pop("loc")) + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + loc = cast(list[str], d.pop("loc")) msg = d.pop("msg") - type = d.pop("type") + type_ = d.pop("type") validation_error = cls( loc=loc, msg=msg, - type=type, + type_=type_, ) return validation_error diff --git a/end_to_end_tests/golden-record/my_test_api_client/types.py b/end_to_end_tests/golden-record/my_test_api_client/types.py index 21fac106f..b64af0952 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/types.py +++ b/end_to_end_tests/golden-record/my_test_api_client/types.py @@ -1,7 +1,8 @@ """Contains some shared types for properties""" +from collections.abc import Mapping, MutableMapping from http import HTTPStatus -from typing import BinaryIO, Generic, Literal, MutableMapping, Optional, Tuple, TypeVar +from typing import IO, BinaryIO, Generic, Literal, TypeVar from attrs import define @@ -13,7 +14,15 @@ def __bool__(self) -> Literal[False]: UNSET: Unset = Unset() -FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] @define @@ -21,10 +30,10 @@ class File: """Contains information for file uploads""" payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None + file_name: str | None = None + mime_type: str | None = None - def to_tuple(self) -> FileJsonType: + def to_tuple(self) -> FileTypes: """Return a tuple representation that httpx will accept for multipart/form-data""" return self.file_name, self.payload, self.mime_type @@ -39,7 +48,7 @@ class Response(Generic[T]): status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] - parsed: Optional[T] + parsed: T | None -__all__ = ["File", "Response", "FileJsonType", "Unset", "UNSET"] +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/end_to_end_tests/golden-record/pyproject.toml b/end_to_end_tests/golden-record/pyproject.toml index 526beacf6..d3fd8e4d7 100644 --- a/end_to_end_tests/golden-record/pyproject.toml +++ b/end_to_end_tests/golden-record/pyproject.toml @@ -5,19 +5,18 @@ description = "A client library for accessing My Test API" authors = [] readme = "README.md" packages = [ - {include = "my_test_api_client"}, + { include = "my_test_api_client" }, ] -include = ["CHANGELOG.md", "my_test_api_client/py.typed"] - +include = ["my_test_api_client/py.typed"] [tool.poetry.dependencies] -python = "^3.8" -httpx = ">=0.20.0,<0.28.0" -attrs = ">=21.3.0" +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" python-dateutil = "^2.8.0" [build-system] -requires = ["poetry-core>=1.0.0"] +requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" [tool.ruff] diff --git a/end_to_end_tests/literal-enums-golden-record/.gitignore b/end_to_end_tests/literal-enums-golden-record/.gitignore new file mode 100644 index 000000000..79a2c3d73 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/.gitignore @@ -0,0 +1,23 @@ +__pycache__/ +build/ +dist/ +*.egg-info/ +.pytest_cache/ + +# pyenv +.python-version + +# Environments +.env +.venv + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# JetBrains +.idea/ + +/coverage.xml +/.coverage diff --git a/end_to_end_tests/literal-enums-golden-record/README.md b/end_to_end_tests/literal-enums-golden-record/README.md new file mode 100644 index 000000000..2c6268349 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/README.md @@ -0,0 +1,124 @@ +# my-enum-api-client +A client library for accessing My Enum API + +## Usage +First, create a client: + +```python +from my_enum_api_client import Client + +client = Client(base_url="https://api.example.com") +``` + +If the endpoints you're going to hit require authentication, use `AuthenticatedClient` instead: + +```python +from my_enum_api_client import AuthenticatedClient + +client = AuthenticatedClient(base_url="https://api.example.com", token="SuperSecretToken") +``` + +Now call your endpoint and use your models: + +```python +from my_enum_api_client.models import MyDataModel +from my_enum_api_client.api.my_tag import get_my_data_model +from my_enum_api_client.types import Response + +with client as client: + my_data: MyDataModel = get_my_data_model.sync(client=client) + # or if you need more info (e.g. status_code) + response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) +``` + +Or do the same thing with an async version: + +```python +from my_enum_api_client.models import MyDataModel +from my_enum_api_client.api.my_tag import get_my_data_model +from my_enum_api_client.types import Response + +async with client as client: + my_data: MyDataModel = await get_my_data_model.asyncio(client=client) + response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) +``` + +By default, when you're calling an HTTPS API it will attempt to verify that SSL is working correctly. Using certificate verification is highly recommended most of the time, but sometimes you may need to authenticate to a server (especially an internal server) using a custom certificate bundle. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl="/path/to/certificate_bundle.pem", +) +``` + +You can also disable certificate validation altogether, but beware that **this is a security risk**. + +```python +client = AuthenticatedClient( + base_url="https://internal_api.example.com", + token="SuperSecretToken", + verify_ssl=False +) +``` + +Things to know: +1. Every path/method combo becomes a Python module with four functions: + 1. `sync`: Blocking request that returns parsed data (if successful) or `None` + 1. `sync_detailed`: Blocking request that always returns a `Request`, optionally with `parsed` set if the request was successful. + 1. `asyncio`: Like `sync` but async instead of blocking + 1. `asyncio_detailed`: Like `sync_detailed` but async instead of blocking + +1. All path/query params, and bodies become method arguments. +1. If your endpoint had any tags on it, the first tag will be used as a module name for the function (my_tag above) +1. Any endpoint which did not have a tag will be in `my_enum_api_client.api.default` + +## Advanced customizations + +There are more settings on the generated `Client` class which let you control more runtime behavior, check out the docstring on that class for more info. You can also customize the underlying `httpx.Client` or `httpx.AsyncClient` (depending on your use-case): + +```python +from my_enum_api_client import Client + +def log_request(request): + print(f"Request event hook: {request.method} {request.url} - Waiting for response") + +def log_response(response): + request = response.request + print(f"Response event hook: {request.method} {request.url} - Status {response.status_code}") + +client = Client( + base_url="https://api.example.com", + httpx_args={"event_hooks": {"request": [log_request], "response": [log_response]}}, +) + +# Or get the underlying httpx client to modify directly with client.get_httpx_client() or client.get_async_httpx_client() +``` + +You can even set the httpx client directly, but beware that this will override any existing settings (e.g., base_url): + +```python +import httpx +from my_enum_api_client import Client + +client = Client( + base_url="https://api.example.com", +) +# Note that base_url needs to be re-set, as would any shared cookies, headers, etc. +client.set_httpx_client(httpx.Client(base_url="https://api.example.com", proxies="http://localhost:8030")) +``` + +## Building / publishing this package +This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: +1. Update the metadata in pyproject.toml (e.g. authors, version) +1. If you're using a private repository, configure it with Poetry + 1. `poetry config repositories. ` + 1. `poetry config http-basic. ` +1. Publish the client with `poetry publish --build -r ` or, if for public PyPI, just `poetry publish --build` + +If you want to install this client into another project without publishing it (e.g. for development) then: +1. If that project **is using Poetry**, you can simply do `poetry add ` from that project +1. If that project is not using Poetry: + 1. Build a wheel with `poetry build -f wheel` + 1. Install that wheel from the other project `pip install ` diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/__init__.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/__init__.py new file mode 100644 index 000000000..5d1901164 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/__init__.py @@ -0,0 +1,8 @@ +"""A client library for accessing My Enum API""" + +from .client import AuthenticatedClient, Client + +__all__ = ( + "AuthenticatedClient", + "Client", +) diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/__init__.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/__init__.py new file mode 100644 index 000000000..81f9fa241 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/__init__.py @@ -0,0 +1 @@ +"""Contains methods for accessing the API""" diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/__init__.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/__init__.py new file mode 100644 index 000000000..2d7c0b23d --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/bool_enum_tests_bool_enum_post.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/bool_enum_tests_bool_enum_post.py new file mode 100644 index 000000000..4ed5ff6ee --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/bool_enum_tests_bool_enum_post.py @@ -0,0 +1,102 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...types import UNSET, Response + + +def _get_kwargs( + *, + bool_enum: bool, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["bool_enum"] = bool_enum + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/enum/bool", + "params": params, + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + bool_enum: bool, +) -> Response[Any]: + """Bool Enum + + Args: + bool_enum (bool): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + bool_enum=bool_enum, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + bool_enum: bool, +) -> Response[Any]: + """Bool Enum + + Args: + bool_enum (bool): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + bool_enum=bool_enum, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/int_enum_tests_int_enum_post.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/int_enum_tests_int_enum_post.py new file mode 100644 index 000000000..bdec57bf1 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/enums/int_enum_tests_int_enum_post.py @@ -0,0 +1,104 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.an_int_enum import AnIntEnum +from ...types import UNSET, Response + + +def _get_kwargs( + *, + int_enum: AnIntEnum, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + json_int_enum: int = int_enum + params["int_enum"] = json_int_enum + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/enum/int", + "params": params, + } + + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Any | None: + if response.status_code == 200: + return None + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[Any]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + int_enum: AnIntEnum, +) -> Response[Any]: + """Int Enum + + Args: + int_enum (AnIntEnum): An enumeration. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + int_enum=int_enum, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + int_enum: AnIntEnum, +) -> Response[Any]: + """Int Enum + + Args: + int_enum (AnIntEnum): An enumeration. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Any] + """ + + kwargs = _get_kwargs( + int_enum=int_enum, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/__init__.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/__init__.py new file mode 100644 index 000000000..2d7c0b23d --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/get_user_list.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/get_user_list.py new file mode 100644 index 000000000..15b821528 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/get_user_list.py @@ -0,0 +1,254 @@ +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.a_model import AModel +from ...models.an_enum import AnEnum +from ...models.an_enum_with_null import AnEnumWithNull +from ...models.get_user_list_int_enum_header import GetUserListIntEnumHeader +from ...models.get_user_list_string_enum_header import ( + GetUserListStringEnumHeader, +) +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + int_enum_header: GetUserListIntEnumHeader | Unset = UNSET, + string_enum_header: GetUserListStringEnumHeader | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + if not isinstance(int_enum_header, Unset): + headers["Int-Enum-Header"] = str(int_enum_header) + + if not isinstance(string_enum_header, Unset): + headers["String-Enum-Header"] = str(string_enum_header) + + params: dict[str, Any] = {} + + json_an_enum_value = [] + for an_enum_value_item_data in an_enum_value: + an_enum_value_item: str = an_enum_value_item_data + json_an_enum_value.append(an_enum_value_item) + + params["an_enum_value"] = json_an_enum_value + + json_an_enum_value_with_null = [] + for an_enum_value_with_null_item_data in an_enum_value_with_null: + an_enum_value_with_null_item: None | str + if isinstance(an_enum_value_with_null_item_data, str): + an_enum_value_with_null_item = an_enum_value_with_null_item_data + else: + an_enum_value_with_null_item = an_enum_value_with_null_item_data + json_an_enum_value_with_null.append(an_enum_value_with_null_item) + + params["an_enum_value_with_null"] = json_an_enum_value_with_null + + json_an_enum_value_with_only_null = an_enum_value_with_only_null + + params["an_enum_value_with_only_null"] = json_an_enum_value_with_only_null + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/https/github.com/tests/", + "params": params, + } + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[AModel] | None: + if response.status_code == 200: + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = AModel.from_dict(response_200_item_data) + + response_200.append(response_200_item) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[AModel]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + int_enum_header: GetUserListIntEnumHeader | Unset = UNSET, + string_enum_header: GetUserListStringEnumHeader | Unset = UNSET, +) -> Response[list[AModel]]: + """Get List + + Get a list of things + + Args: + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + int_enum_header (GetUserListIntEnumHeader | Unset): + string_enum_header (GetUserListStringEnumHeader | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[list[AModel]] + """ + + kwargs = _get_kwargs( + an_enum_value=an_enum_value, + an_enum_value_with_null=an_enum_value_with_null, + an_enum_value_with_only_null=an_enum_value_with_only_null, + int_enum_header=int_enum_header, + string_enum_header=string_enum_header, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + int_enum_header: GetUserListIntEnumHeader | Unset = UNSET, + string_enum_header: GetUserListStringEnumHeader | Unset = UNSET, +) -> list[AModel] | None: + """Get List + + Get a list of things + + Args: + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + int_enum_header (GetUserListIntEnumHeader | Unset): + string_enum_header (GetUserListStringEnumHeader | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + list[AModel] + """ + + return sync_detailed( + client=client, + an_enum_value=an_enum_value, + an_enum_value_with_null=an_enum_value_with_null, + an_enum_value_with_only_null=an_enum_value_with_only_null, + int_enum_header=int_enum_header, + string_enum_header=string_enum_header, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + int_enum_header: GetUserListIntEnumHeader | Unset = UNSET, + string_enum_header: GetUserListStringEnumHeader | Unset = UNSET, +) -> Response[list[AModel]]: + """Get List + + Get a list of things + + Args: + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + int_enum_header (GetUserListIntEnumHeader | Unset): + string_enum_header (GetUserListStringEnumHeader | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[list[AModel]] + """ + + kwargs = _get_kwargs( + an_enum_value=an_enum_value, + an_enum_value_with_null=an_enum_value_with_null, + an_enum_value_with_only_null=an_enum_value_with_only_null, + int_enum_header=int_enum_header, + string_enum_header=string_enum_header, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, + an_enum_value: list[AnEnum], + an_enum_value_with_null: list[AnEnumWithNull | None], + an_enum_value_with_only_null: list[None], + int_enum_header: GetUserListIntEnumHeader | Unset = UNSET, + string_enum_header: GetUserListStringEnumHeader | Unset = UNSET, +) -> list[AModel] | None: + """Get List + + Get a list of things + + Args: + an_enum_value (list[AnEnum]): + an_enum_value_with_null (list[AnEnumWithNull | None]): + an_enum_value_with_only_null (list[None]): + int_enum_header (GetUserListIntEnumHeader | Unset): + string_enum_header (GetUserListStringEnumHeader | Unset): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + list[AModel] + """ + + return ( + await asyncio_detailed( + client=client, + an_enum_value=an_enum_value, + an_enum_value_with_null=an_enum_value_with_null, + an_enum_value_with_only_null=an_enum_value_with_only_null, + int_enum_header=int_enum_header, + string_enum_header=string_enum_header, + ) + ).parsed diff --git a/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_multiple_files_tests_upload_post.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/post_user_list.py similarity index 53% rename from end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_multiple_files_tests_upload_post.py rename to end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/post_user_list.py index 6cfcfaa57..920c35f78 100644 --- a/end_to_end_tests/golden-record/my_test_api_client/api/tests/upload_multiple_files_tests_upload_post.py +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/api/tests/post_user_list.py @@ -1,56 +1,51 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any import httpx from ... import errors from ...client import AuthenticatedClient, Client -from ...models.http_validation_error import HTTPValidationError -from ...types import File, Response +from ...models.a_model import AModel +from ...models.post_user_list_body import PostUserListBody +from ...types import UNSET, Response, Unset def _get_kwargs( *, - body: List[File], -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + body: PostUserListBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", - "url": "/https/github.com/tests/upload/multiple", + "url": "/https/github.com/tests/", } - _body = [] - for body_item_data in body: - body_item = body_item_data.to_tuple() - - _body.append(body_item) - - _kwargs["files"] = _body + if not isinstance(body, Unset): + _kwargs["files"] = body.to_multipart() _kwargs["headers"] = headers return _kwargs -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, HTTPValidationError]]: - if response.status_code == HTTPStatus.OK: - response_200 = response.json() +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> list[AModel] | None: + if response.status_code == 200: + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = AModel.from_dict(response_200_item_data) + + response_200.append(response_200_item) + return response_200 - if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY: - response_422 = HTTPValidationError.from_dict(response.json()) - return response_422 if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, HTTPValidationError]]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[list[AModel]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -61,22 +56,22 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: List[File], -) -> Response[Union[Any, HTTPValidationError]]: - """Upload multiple files + client: AuthenticatedClient | Client, + body: PostUserListBody | Unset = UNSET, +) -> Response[list[AModel]]: + """Post List - Upload several files in the same request + Post a list of things Args: - body (List[File]): + body (PostUserListBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[list[AModel]] """ kwargs = _get_kwargs( @@ -92,22 +87,22 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - body: List[File], -) -> Optional[Union[Any, HTTPValidationError]]: - """Upload multiple files + client: AuthenticatedClient | Client, + body: PostUserListBody | Unset = UNSET, +) -> list[AModel] | None: + """Post List - Upload several files in the same request + Post a list of things Args: - body (List[File]): + body (PostUserListBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + list[AModel] """ return sync_detailed( @@ -118,22 +113,22 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: List[File], -) -> Response[Union[Any, HTTPValidationError]]: - """Upload multiple files + client: AuthenticatedClient | Client, + body: PostUserListBody | Unset = UNSET, +) -> Response[list[AModel]]: + """Post List - Upload several files in the same request + Post a list of things Args: - body (List[File]): + body (PostUserListBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[Any, HTTPValidationError]] + Response[list[AModel]] """ kwargs = _get_kwargs( @@ -147,22 +142,22 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - body: List[File], -) -> Optional[Union[Any, HTTPValidationError]]: - """Upload multiple files + client: AuthenticatedClient | Client, + body: PostUserListBody | Unset = UNSET, +) -> list[AModel] | None: + """Post List - Upload several files in the same request + Post a list of things Args: - body (List[File]): + body (PostUserListBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[Any, HTTPValidationError] + list[AModel] """ return ( diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/client.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/client.py new file mode 100644 index 000000000..1b7055ab8 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/client.py @@ -0,0 +1,268 @@ +import ssl +from typing import Any + +import httpx +from attrs import define, evolve, field + + +@define +class Client: + """A class for keeping track of data related to the API + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + + Attributes: + raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a + status code that was not documented in the source OpenAPI document. Can also be provided as a keyword + argument to the constructor. + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "Client": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "Client": + """Get a new client matching this one with a new timeout configuration""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "Client": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "Client": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": + """Manually set the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "Client": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) + + +@define +class AuthenticatedClient: + """A Client which has been authenticated for use on secured endpoints + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + + Attributes: + raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a + status code that was not documented in the source OpenAPI document. Can also be provided as a keyword + argument to the constructor. + token: The token to use for authentication + prefix: The prefix to use for the Authorization header + auth_header_name: The name of the Authorization header + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + token: str + prefix: str = "Bearer" + auth_header_name: str = "Authorization" + + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": + """Get a new client matching this one with a new timeout configuration""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "AuthenticatedClient": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": + """Manually set the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "AuthenticatedClient": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/errors.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/errors.py new file mode 100644 index 000000000..5f92e76ac --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/errors.py @@ -0,0 +1,16 @@ +"""Contains shared errors types that can be raised from API functions""" + + +class UnexpectedStatus(Exception): + """Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True""" + + def __init__(self, status_code: int, content: bytes): + self.status_code = status_code + self.content = content + + super().__init__( + f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}" + ) + + +__all__ = ["UnexpectedStatus"] diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/__init__.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/__init__.py new file mode 100644 index 000000000..2bdeafad7 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/__init__.py @@ -0,0 +1,23 @@ +"""Contains all the data models used in inputs/outputs""" + +from .a_model import AModel +from .an_all_of_enum import AnAllOfEnum +from .an_enum import AnEnum +from .an_enum_with_null import AnEnumWithNull +from .an_int_enum import AnIntEnum +from .different_enum import DifferentEnum +from .get_user_list_int_enum_header import GetUserListIntEnumHeader +from .get_user_list_string_enum_header import GetUserListStringEnumHeader +from .post_user_list_body import PostUserListBody + +__all__ = ( + "AModel", + "AnAllOfEnum", + "AnEnum", + "AnEnumWithNull", + "AnIntEnum", + "DifferentEnum", + "GetUserListIntEnumHeader", + "GetUserListStringEnumHeader", + "PostUserListBody", +) diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/a_model.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/a_model.py new file mode 100644 index 000000000..890f7507f --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/a_model.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define + +from ..models.an_all_of_enum import AnAllOfEnum, check_an_all_of_enum +from ..models.an_enum import AnEnum, check_an_enum +from ..models.different_enum import DifferentEnum, check_different_enum +from ..types import UNSET, Unset + +T = TypeVar("T", bound="AModel") + + +@_attrs_define +class AModel: + """A Model for testing all the ways enums can be used + + Attributes: + an_enum_value (AnEnum): For testing Enums in all the ways they can be used + an_allof_enum_with_overridden_default (AnAllOfEnum): Default: 'overridden_default'. + any_value (Any | Unset): + an_optional_allof_enum (AnAllOfEnum | Unset): + nested_list_of_enums (list[list[DifferentEnum]] | Unset): + """ + + an_enum_value: AnEnum + an_allof_enum_with_overridden_default: AnAllOfEnum = "overridden_default" + any_value: Any | Unset = UNSET + an_optional_allof_enum: AnAllOfEnum | Unset = UNSET + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + + def to_dict(self) -> dict[str, Any]: + an_enum_value: str = self.an_enum_value + + an_allof_enum_with_overridden_default: str = self.an_allof_enum_with_overridden_default + + any_value = self.any_value + + an_optional_allof_enum: str | Unset = UNSET + if not isinstance(self.an_optional_allof_enum, Unset): + an_optional_allof_enum = self.an_optional_allof_enum + + nested_list_of_enums: list[list[str]] | Unset = UNSET + if not isinstance(self.nested_list_of_enums, Unset): + nested_list_of_enums = [] + for nested_list_of_enums_item_data in self.nested_list_of_enums: + nested_list_of_enums_item = [] + for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data: + nested_list_of_enums_item_item: str = nested_list_of_enums_item_item_data + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + field_dict: dict[str, Any] = {} + + field_dict.update( + { + "an_enum_value": an_enum_value, + "an_allof_enum_with_overridden_default": an_allof_enum_with_overridden_default, + } + ) + if any_value is not UNSET: + field_dict["any_value"] = any_value + if an_optional_allof_enum is not UNSET: + field_dict["an_optional_allof_enum"] = an_optional_allof_enum + if nested_list_of_enums is not UNSET: + field_dict["nested_list_of_enums"] = nested_list_of_enums + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + an_enum_value = check_an_enum(d.pop("an_enum_value")) + + an_allof_enum_with_overridden_default = check_an_all_of_enum(d.pop("an_allof_enum_with_overridden_default")) + + any_value = d.pop("any_value", UNSET) + + _an_optional_allof_enum = d.pop("an_optional_allof_enum", UNSET) + an_optional_allof_enum: AnAllOfEnum | Unset + if isinstance(_an_optional_allof_enum, Unset): + an_optional_allof_enum = UNSET + else: + an_optional_allof_enum = check_an_all_of_enum(_an_optional_allof_enum) + + _nested_list_of_enums = d.pop("nested_list_of_enums", UNSET) + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + if _nested_list_of_enums is not UNSET: + nested_list_of_enums = [] + for nested_list_of_enums_item_data in _nested_list_of_enums: + nested_list_of_enums_item = [] + _nested_list_of_enums_item = nested_list_of_enums_item_data + for nested_list_of_enums_item_item_data in _nested_list_of_enums_item: + nested_list_of_enums_item_item = check_different_enum(nested_list_of_enums_item_item_data) + + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + a_model = cls( + an_enum_value=an_enum_value, + an_allof_enum_with_overridden_default=an_allof_enum_with_overridden_default, + any_value=any_value, + an_optional_allof_enum=an_optional_allof_enum, + nested_list_of_enums=nested_list_of_enums, + ) + + return a_model diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_all_of_enum.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_all_of_enum.py new file mode 100644 index 000000000..3455e04d0 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_all_of_enum.py @@ -0,0 +1,16 @@ +from typing import Literal, cast + +AnAllOfEnum = Literal["a_default", "bar", "foo", "overridden_default"] + +AN_ALL_OF_ENUM_VALUES: set[AnAllOfEnum] = { + "a_default", + "bar", + "foo", + "overridden_default", +} + + +def check_an_all_of_enum(value: str) -> AnAllOfEnum: + if value in AN_ALL_OF_ENUM_VALUES: + return cast(AnAllOfEnum, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {AN_ALL_OF_ENUM_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum.py new file mode 100644 index 000000000..27b5c45f9 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum.py @@ -0,0 +1,14 @@ +from typing import Literal, cast + +AnEnum = Literal["FIRST_VALUE", "SECOND_VALUE"] + +AN_ENUM_VALUES: set[AnEnum] = { + "FIRST_VALUE", + "SECOND_VALUE", +} + + +def check_an_enum(value: str) -> AnEnum: + if value in AN_ENUM_VALUES: + return cast(AnEnum, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {AN_ENUM_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum_with_null.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum_with_null.py new file mode 100644 index 000000000..4203876de --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_enum_with_null.py @@ -0,0 +1,14 @@ +from typing import Literal, cast + +AnEnumWithNull = Literal["FIRST_VALUE", "SECOND_VALUE"] + +AN_ENUM_WITH_NULL_VALUES: set[AnEnumWithNull] = { + "FIRST_VALUE", + "SECOND_VALUE", +} + + +def check_an_enum_with_null(value: str) -> AnEnumWithNull: + if value in AN_ENUM_WITH_NULL_VALUES: + return cast(AnEnumWithNull, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {AN_ENUM_WITH_NULL_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_int_enum.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_int_enum.py new file mode 100644 index 000000000..9d0abd942 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/an_int_enum.py @@ -0,0 +1,15 @@ +from typing import Literal, cast + +AnIntEnum = Literal[-1, 1, 2] + +AN_INT_ENUM_VALUES: set[AnIntEnum] = { + -1, + 1, + 2, +} + + +def check_an_int_enum(value: int) -> AnIntEnum: + if value in AN_INT_ENUM_VALUES: + return cast(AnIntEnum, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {AN_INT_ENUM_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/different_enum.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/different_enum.py new file mode 100644 index 000000000..e672a9821 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/different_enum.py @@ -0,0 +1,14 @@ +from typing import Literal, cast + +DifferentEnum = Literal["DIFFERENT", "OTHER"] + +DIFFERENT_ENUM_VALUES: set[DifferentEnum] = { + "DIFFERENT", + "OTHER", +} + + +def check_different_enum(value: str) -> DifferentEnum: + if value in DIFFERENT_ENUM_VALUES: + return cast(DifferentEnum, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {DIFFERENT_ENUM_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_int_enum_header.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_int_enum_header.py new file mode 100644 index 000000000..845d6c2a0 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_int_enum_header.py @@ -0,0 +1,15 @@ +from typing import Literal, cast + +GetUserListIntEnumHeader = Literal[1, 2, 3] + +GET_USER_LIST_INT_ENUM_HEADER_VALUES: set[GetUserListIntEnumHeader] = { + 1, + 2, + 3, +} + + +def check_get_user_list_int_enum_header(value: int) -> GetUserListIntEnumHeader: + if value in GET_USER_LIST_INT_ENUM_HEADER_VALUES: + return cast(GetUserListIntEnumHeader, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {GET_USER_LIST_INT_ENUM_HEADER_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_string_enum_header.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_string_enum_header.py new file mode 100644 index 000000000..55dbbad62 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/get_user_list_string_enum_header.py @@ -0,0 +1,15 @@ +from typing import Literal, cast + +GetUserListStringEnumHeader = Literal["one", "three", "two"] + +GET_USER_LIST_STRING_ENUM_HEADER_VALUES: set[GetUserListStringEnumHeader] = { + "one", + "three", + "two", +} + + +def check_get_user_list_string_enum_header(value: str) -> GetUserListStringEnumHeader: + if value in GET_USER_LIST_STRING_ENUM_HEADER_VALUES: + return cast(GetUserListStringEnumHeader, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {GET_USER_LIST_STRING_ENUM_HEADER_VALUES!r}") diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/post_user_list_body.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/post_user_list_body.py new file mode 100644 index 000000000..7e8e93950 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/models/post_user_list_body.py @@ -0,0 +1,249 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from .. import types +from ..models.an_all_of_enum import AnAllOfEnum, check_an_all_of_enum +from ..models.an_enum import AnEnum, check_an_enum +from ..models.an_enum_with_null import AnEnumWithNull, check_an_enum_with_null +from ..models.different_enum import DifferentEnum, check_different_enum +from ..types import UNSET, Unset + +T = TypeVar("T", bound="PostUserListBody") + + +@_attrs_define +class PostUserListBody: + """ + Attributes: + an_enum_value (list[AnEnum] | Unset): + an_enum_value_with_null (list[AnEnumWithNull | None] | Unset): + an_enum_value_with_only_null (list[None] | Unset): + an_allof_enum_with_overridden_default (AnAllOfEnum | Unset): Default: 'overridden_default'. + an_optional_allof_enum (AnAllOfEnum | Unset): + nested_list_of_enums (list[list[DifferentEnum]] | Unset): + """ + + an_enum_value: list[AnEnum] | Unset = UNSET + an_enum_value_with_null: list[AnEnumWithNull | None] | Unset = UNSET + an_enum_value_with_only_null: list[None] | Unset = UNSET + an_allof_enum_with_overridden_default: AnAllOfEnum | Unset = "overridden_default" + an_optional_allof_enum: AnAllOfEnum | Unset = UNSET + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + an_enum_value: list[str] | Unset = UNSET + if not isinstance(self.an_enum_value, Unset): + an_enum_value = [] + for an_enum_value_item_data in self.an_enum_value: + an_enum_value_item: str = an_enum_value_item_data + an_enum_value.append(an_enum_value_item) + + an_enum_value_with_null: list[None | str] | Unset = UNSET + if not isinstance(self.an_enum_value_with_null, Unset): + an_enum_value_with_null = [] + for an_enum_value_with_null_item_data in self.an_enum_value_with_null: + an_enum_value_with_null_item: None | str + if isinstance(an_enum_value_with_null_item_data, str): + an_enum_value_with_null_item = an_enum_value_with_null_item_data + else: + an_enum_value_with_null_item = an_enum_value_with_null_item_data + an_enum_value_with_null.append(an_enum_value_with_null_item) + + an_enum_value_with_only_null: list[None] | Unset = UNSET + if not isinstance(self.an_enum_value_with_only_null, Unset): + an_enum_value_with_only_null = self.an_enum_value_with_only_null + + an_allof_enum_with_overridden_default: str | Unset = UNSET + if not isinstance(self.an_allof_enum_with_overridden_default, Unset): + an_allof_enum_with_overridden_default = self.an_allof_enum_with_overridden_default + + an_optional_allof_enum: str | Unset = UNSET + if not isinstance(self.an_optional_allof_enum, Unset): + an_optional_allof_enum = self.an_optional_allof_enum + + nested_list_of_enums: list[list[str]] | Unset = UNSET + if not isinstance(self.nested_list_of_enums, Unset): + nested_list_of_enums = [] + for nested_list_of_enums_item_data in self.nested_list_of_enums: + nested_list_of_enums_item = [] + for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data: + nested_list_of_enums_item_item: str = nested_list_of_enums_item_item_data + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if an_enum_value is not UNSET: + field_dict["an_enum_value"] = an_enum_value + if an_enum_value_with_null is not UNSET: + field_dict["an_enum_value_with_null"] = an_enum_value_with_null + if an_enum_value_with_only_null is not UNSET: + field_dict["an_enum_value_with_only_null"] = an_enum_value_with_only_null + if an_allof_enum_with_overridden_default is not UNSET: + field_dict["an_allof_enum_with_overridden_default"] = an_allof_enum_with_overridden_default + if an_optional_allof_enum is not UNSET: + field_dict["an_optional_allof_enum"] = an_optional_allof_enum + if nested_list_of_enums is not UNSET: + field_dict["nested_list_of_enums"] = nested_list_of_enums + + return field_dict + + def to_multipart(self) -> types.RequestFiles: + files: types.RequestFiles = [] + + if not isinstance(self.an_enum_value, Unset): + for an_enum_value_item_element in self.an_enum_value: + files.append(("an_enum_value", (None, str(an_enum_value_item_element).encode(), "text/plain"))) + + if not isinstance(self.an_enum_value_with_null, Unset): + for an_enum_value_with_null_item_element in self.an_enum_value_with_null: + if an_enum_value_with_null_item_element is None: + files.append( + ( + "an_enum_value_with_null", + (None, str(an_enum_value_with_null_item_element).encode(), "text/plain"), + ) + ) + else: + files.append( + ( + "an_enum_value_with_null", + (None, str(an_enum_value_with_null_item_element).encode(), "text/plain"), + ) + ) + + if not isinstance(self.an_enum_value_with_only_null, Unset): + for an_enum_value_with_only_null_item_element in self.an_enum_value_with_only_null: + files.append( + ( + "an_enum_value_with_only_null", + (None, str(an_enum_value_with_only_null_item_element).encode(), "text/plain"), + ) + ) + + if not isinstance(self.an_allof_enum_with_overridden_default, Unset): + files.append( + ( + "an_allof_enum_with_overridden_default", + (None, str(self.an_allof_enum_with_overridden_default).encode(), "text/plain"), + ) + ) + + if not isinstance(self.an_optional_allof_enum, Unset): + files.append(("an_optional_allof_enum", (None, str(self.an_optional_allof_enum).encode(), "text/plain"))) + + if not isinstance(self.nested_list_of_enums, Unset): + for nested_list_of_enums_item_element in self.nested_list_of_enums: + for nested_list_of_enums_item_item_element in nested_list_of_enums_item_element: + files.append( + ( + "nested_list_of_enums", + (None, str(nested_list_of_enums_item_item_element).encode(), "text/plain"), + ) + ) + + for prop_name, prop in self.additional_properties.items(): + files.append((prop_name, (None, str(prop).encode(), "text/plain"))) + + return files + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + _an_enum_value = d.pop("an_enum_value", UNSET) + an_enum_value: list[AnEnum] | Unset = UNSET + if _an_enum_value is not UNSET: + an_enum_value = [] + for an_enum_value_item_data in _an_enum_value: + an_enum_value_item = check_an_enum(an_enum_value_item_data) + + an_enum_value.append(an_enum_value_item) + + _an_enum_value_with_null = d.pop("an_enum_value_with_null", UNSET) + an_enum_value_with_null: list[AnEnumWithNull | None] | Unset = UNSET + if _an_enum_value_with_null is not UNSET: + an_enum_value_with_null = [] + for an_enum_value_with_null_item_data in _an_enum_value_with_null: + + def _parse_an_enum_value_with_null_item(data: object) -> AnEnumWithNull | None: + if data is None: + return data + try: + if not isinstance(data, str): + raise TypeError() + componentsschemas_an_enum_with_null_type_1 = check_an_enum_with_null(data) + + return componentsschemas_an_enum_with_null_type_1 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(AnEnumWithNull | None, data) + + an_enum_value_with_null_item = _parse_an_enum_value_with_null_item(an_enum_value_with_null_item_data) + + an_enum_value_with_null.append(an_enum_value_with_null_item) + + an_enum_value_with_only_null = cast(list[None], d.pop("an_enum_value_with_only_null", UNSET)) + + _an_allof_enum_with_overridden_default = d.pop("an_allof_enum_with_overridden_default", UNSET) + an_allof_enum_with_overridden_default: AnAllOfEnum | Unset + if isinstance(_an_allof_enum_with_overridden_default, Unset): + an_allof_enum_with_overridden_default = UNSET + else: + an_allof_enum_with_overridden_default = check_an_all_of_enum(_an_allof_enum_with_overridden_default) + + _an_optional_allof_enum = d.pop("an_optional_allof_enum", UNSET) + an_optional_allof_enum: AnAllOfEnum | Unset + if isinstance(_an_optional_allof_enum, Unset): + an_optional_allof_enum = UNSET + else: + an_optional_allof_enum = check_an_all_of_enum(_an_optional_allof_enum) + + _nested_list_of_enums = d.pop("nested_list_of_enums", UNSET) + nested_list_of_enums: list[list[DifferentEnum]] | Unset = UNSET + if _nested_list_of_enums is not UNSET: + nested_list_of_enums = [] + for nested_list_of_enums_item_data in _nested_list_of_enums: + nested_list_of_enums_item = [] + _nested_list_of_enums_item = nested_list_of_enums_item_data + for nested_list_of_enums_item_item_data in _nested_list_of_enums_item: + nested_list_of_enums_item_item = check_different_enum(nested_list_of_enums_item_item_data) + + nested_list_of_enums_item.append(nested_list_of_enums_item_item) + + nested_list_of_enums.append(nested_list_of_enums_item) + + post_user_list_body = cls( + an_enum_value=an_enum_value, + an_enum_value_with_null=an_enum_value_with_null, + an_enum_value_with_only_null=an_enum_value_with_only_null, + an_allof_enum_with_overridden_default=an_allof_enum_with_overridden_default, + an_optional_allof_enum=an_optional_allof_enum, + nested_list_of_enums=nested_list_of_enums, + ) + + post_user_list_body.additional_properties = d + return post_user_list_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/py.typed b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/py.typed new file mode 100644 index 000000000..1aad32711 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 \ No newline at end of file diff --git a/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/types.py b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/types.py new file mode 100644 index 000000000..b64af0952 --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/my_enum_api_client/types.py @@ -0,0 +1,54 @@ +"""Contains some shared types for properties""" + +from collections.abc import Mapping, MutableMapping +from http import HTTPStatus +from typing import IO, BinaryIO, Generic, Literal, TypeVar + +from attrs import define + + +class Unset: + def __bool__(self) -> Literal[False]: + return False + + +UNSET: Unset = Unset() + +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] + + +@define +class File: + """Contains information for file uploads""" + + payload: BinaryIO + file_name: str | None = None + mime_type: str | None = None + + def to_tuple(self) -> FileTypes: + """Return a tuple representation that httpx will accept for multipart/form-data""" + return self.file_name, self.payload, self.mime_type + + +T = TypeVar("T") + + +@define +class Response(Generic[T]): + """A response from an endpoint""" + + status_code: HTTPStatus + content: bytes + headers: MutableMapping[str, str] + parsed: T | None + + +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/end_to_end_tests/literal-enums-golden-record/pyproject.toml b/end_to_end_tests/literal-enums-golden-record/pyproject.toml new file mode 100644 index 000000000..a83d5f1ad --- /dev/null +++ b/end_to_end_tests/literal-enums-golden-record/pyproject.toml @@ -0,0 +1,26 @@ +[tool.poetry] +name = "my-enum-api-client" +version = "0.1.0" +description = "A client library for accessing My Enum API" +authors = [] +readme = "README.md" +packages = [ + { include = "my_enum_api_client" }, +] +include = ["my_enum_api_client/py.typed"] + +[tool.poetry.dependencies] +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" +python-dateutil = "^2.8.0" + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +select = ["F", "I", "UP"] diff --git a/end_to_end_tests/literal_enums.config.yml b/end_to_end_tests/literal_enums.config.yml new file mode 100644 index 000000000..120eae0a7 --- /dev/null +++ b/end_to_end_tests/literal_enums.config.yml @@ -0,0 +1 @@ +literal_enums: true diff --git a/end_to_end_tests/metadata_snapshots/pdm.pyproject.toml b/end_to_end_tests/metadata_snapshots/pdm.pyproject.toml index fddcea97f..d5fb9e988 100644 --- a/end_to_end_tests/metadata_snapshots/pdm.pyproject.toml +++ b/end_to_end_tests/metadata_snapshots/pdm.pyproject.toml @@ -4,10 +4,10 @@ version = "0.1.0" description = "A client library for accessing Test 3.1 Features" authors = [] readme = "README.md" -requires-python = ">=3.8,<4.0" +requires-python = ">=3.10" dependencies = [ - "httpx>=0.20.0,<0.28.0", - "attrs>=21.3.0", + "httpx>=0.23.0,<0.29.0", + "attrs>=22.2.0", "python-dateutil>=2.8.0", ] diff --git a/end_to_end_tests/metadata_snapshots/poetry.pyproject.toml b/end_to_end_tests/metadata_snapshots/poetry.pyproject.toml index f9a1becf8..202b9d360 100644 --- a/end_to_end_tests/metadata_snapshots/poetry.pyproject.toml +++ b/end_to_end_tests/metadata_snapshots/poetry.pyproject.toml @@ -5,19 +5,18 @@ description = "A client library for accessing Test 3.1 Features" authors = [] readme = "README.md" packages = [ - {include = "test_3_1_features_client"}, + { include = "test_3_1_features_client" }, ] -include = ["CHANGELOG.md", "test_3_1_features_client/py.typed"] - +include = ["test_3_1_features_client/py.typed"] [tool.poetry.dependencies] -python = "^3.8" -httpx = ">=0.20.0,<0.28.0" -attrs = ">=21.3.0" +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" python-dateutil = "^2.8.0" [build-system] -requires = ["poetry-core>=1.0.0"] +requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" [tool.ruff] diff --git a/end_to_end_tests/metadata_snapshots/setup.py b/end_to_end_tests/metadata_snapshots/setup.py index 6350b8c4c..7ef00915b 100644 --- a/end_to_end_tests/metadata_snapshots/setup.py +++ b/end_to_end_tests/metadata_snapshots/setup.py @@ -12,7 +12,7 @@ long_description=long_description, long_description_content_type="text/markdown", packages=find_packages(), - python_requires=">=3.8, <4", - install_requires=["httpx >= 0.20.0, < 0.28.0", "attrs >= 21.3.0", "python-dateutil >= 2.8.0, < 3"], + python_requires=">=3.10, <4", + install_requires=["httpx >= 0.23.0, < 0.29.0", "attrs >= 22.2.0", "python-dateutil >= 2.8.0, < 3"], package_data={"test_3_1_features_client": ["py.typed"]}, ) diff --git a/end_to_end_tests/metadata_snapshots/uv.pyproject.toml b/end_to_end_tests/metadata_snapshots/uv.pyproject.toml new file mode 100644 index 000000000..65d85d2fc --- /dev/null +++ b/end_to_end_tests/metadata_snapshots/uv.pyproject.toml @@ -0,0 +1,26 @@ +[project] +name = "test-3-1-features-client" +version = "0.1.0" +description = "A client library for accessing Test 3.1 Features" +authors = [] +requires-python = ">=3.10" +readme = "README.md" +dependencies = [ + "httpx>=0.23.0,<0.29.0", + "attrs>=22.2.0", + "python-dateutil>=2.8.0,<3", +] + +[tool.uv.build-backend] +module-name = "test_3_1_features_client" +module-root = "" + +[build-system] +requires = ["uv_build>=0.9.0,<0.10.0"] +build-backend = "uv_build" + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +select = ["F", "I", "UP"] diff --git a/end_to_end_tests/openapi_3.1_enums.yaml b/end_to_end_tests/openapi_3.1_enums.yaml new file mode 100644 index 000000000..b77d4ff74 --- /dev/null +++ b/end_to_end_tests/openapi_3.1_enums.yaml @@ -0,0 +1,226 @@ +openapi: 3.1.0 +info: + title: My Enum API + description: An API for testing enum handling in openapi-python-client + version: 0.1.0 +paths: + /tests/: + get: + tags: + - tests + summary: Get List + description: 'Get a list of things ' + operationId: getUserList + parameters: + - required: true + schema: + title: An Enum Value + type: array + items: + $ref: '#/components/schemas/AnEnum' + name: an_enum_value + in: query + - required: true + schema: + title: An Enum Value With Null And String Values + type: array + items: + $ref: '#/components/schemas/AnEnumWithNull' + name: an_enum_value_with_null + in: query + - required: true + schema: + title: An Enum Value With Only Null Values + type: array + items: + $ref: '#/components/schemas/AnEnumWithOnlyNull' + name: an_enum_value_with_only_null + in: query + - in: header + name: Int-Enum-Header + required: false + schema: + type: integer + enum: + - 1 + - 2 + - 3 + - in: header + name: String-Enum-Header + required: false + schema: + type: string + enum: + - one + - two + - three + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Get List Tests Get + type: array + items: + $ref: '#/components/schemas/AModel' + post: + tags: + - tests + summary: Post List + description: 'Post a list of things ' + operationId: postUserList + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + an_enum_value: + title: An Enum Value + type: array + items: + $ref: '#/components/schemas/AnEnum' + an_enum_value_with_null: + title: An Enum Value With Null And String Values + type: array + items: + $ref: '#/components/schemas/AnEnumWithNull' + an_enum_value_with_only_null: + title: An Enum Value With Only Null Values + type: array + items: + $ref: '#/components/schemas/AnEnumWithOnlyNull' + an_allof_enum_with_overridden_default: + title: An AllOf Enum With Overridden Default + allOf: + - $ref: '#/components/schemas/AnAllOfEnum' + default: overridden_default + an_optional_allof_enum: + title: An Optional AllOf Enum + $ref: '#/components/schemas/AnAllOfEnum' + nested_list_of_enums: + title: Nested List Of Enums + type: array + items: + type: array + items: + $ref: '#/components/schemas/DifferentEnum' + default: [] + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Get List Tests Get + type: array + items: + $ref: '#/components/schemas/AModel' + /enum/int: + post: + tags: + - enums + summary: Int Enum + operationId: int_enum_tests_int_enum_post + parameters: + - required: true + schema: + $ref: '#/components/schemas/AnIntEnum' + name: int_enum + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + /enum/bool: + post: + tags: + - enums + summary: Bool Enum + operationId: bool_enum_tests_bool_enum_post + parameters: + - required: true + schema: + type: boolean + enum: + - true + - false + name: bool_enum + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} +components: + schemas: + AModel: + title: AModel + required: + - an_enum_value + - an_allof_enum_with_overridden_default + type: object + properties: + any_value: {} + an_enum_value: + $ref: '#/components/schemas/AnEnum' + an_allof_enum_with_overridden_default: + allOf: + - $ref: '#/components/schemas/AnAllOfEnum' + default: overridden_default + an_optional_allof_enum: + $ref: '#/components/schemas/AnAllOfEnum' + nested_list_of_enums: + title: Nested List Of Enums + type: array + items: + type: array + items: + $ref: '#/components/schemas/DifferentEnum' + default: [] + description: 'A Model for testing all the ways enums can be used ' + additionalProperties: false + AnEnum: + title: AnEnum + enum: + - FIRST_VALUE + - SECOND_VALUE + description: 'For testing Enums in all the ways they can be used ' + AnEnumWithNull: + title: AnEnumWithNull + enum: + - FIRST_VALUE + - SECOND_VALUE + - null + description: 'For testing Enums with mixed string / null values ' + AnEnumWithOnlyNull: + title: AnEnumWithOnlyNull + enum: + - null + description: 'For testing Enums with only null values ' + AnAllOfEnum: + title: AnAllOfEnum + enum: + - foo + - bar + - a_default + - overridden_default + default: a_default + AnIntEnum: + title: AnIntEnum + enum: + - -1 + - 1 + - 2 + type: integer + description: An enumeration. + DifferentEnum: + title: DifferentEnum + enum: + - DIFFERENT + - OTHER + description: An enumeration. diff --git a/end_to_end_tests/regen_golden_record.py b/end_to_end_tests/regen_golden_record.py index 0bffe132a..e01a84884 100644 --- a/end_to_end_tests/regen_golden_record.py +++ b/end_to_end_tests/regen_golden_record.py @@ -1,8 +1,6 @@ """ Regenerate golden-record """ import filecmp -import os import shutil -import tempfile from pathlib import Path from typer.testing import CliRunner @@ -10,67 +8,89 @@ from openapi_python_client.cli import app -def regen_golden_record(): +def _regenerate( + *, + spec_file_name: str, + output_dir: str = "my-test-api-client", + golden_record_dir: str | None = None, + config_file_name: str = "config.yml", + extra_args: list[str] | None = None +) -> None: + end_to_end_tests_base_path = Path(__file__).parent + project_base_path = end_to_end_tests_base_path.parent runner = CliRunner() - openapi_path = Path(__file__).parent / "baseline_openapi_3.0.json" - - gr_path = Path(__file__).parent / "golden-record" - output_path = Path.cwd() / "my-test-api-client" - config_path = Path(__file__).parent / "config.yml" + openapi_path = end_to_end_tests_base_path / spec_file_name - shutil.rmtree(gr_path, ignore_errors=True) + output_path = project_base_path / output_dir shutil.rmtree(output_path, ignore_errors=True) - result = runner.invoke( - app, ["generate", f"--config={config_path}", f"--path={openapi_path}"] - ) + args = ["generate", f"--path={openapi_path}"] + if config_file_name: + config_path = end_to_end_tests_base_path / config_file_name + args.append(f"--config={config_path}") + if extra_args: + args.extend(extra_args) + print(f"Using {spec_file_name}{f' and {config_file_name}' if config_file_name else ''}") + + result = runner.invoke(app, args) if result.stdout: print(result.stdout) if result.exception: - raise result.exception - output_path.rename(gr_path) + raise Exception(f"{result.exception} {result.stderr}") + if golden_record_dir: + gr_path = end_to_end_tests_base_path / golden_record_dir + shutil.rmtree(gr_path, ignore_errors=True) + output_path.rename(gr_path) -def regen_golden_record_3_1_features(): - runner = CliRunner() - openapi_path = Path(__file__).parent / "3.1_specific.openapi.yaml" +def regen_golden_record(): + _regenerate( + spec_file_name="baseline_openapi_3.0.json", + golden_record_dir="golden-record", + ) - gr_path = Path(__file__).parent / "test-3-1-golden-record" - output_path = Path.cwd() / "test-3-1-features-client" - shutil.rmtree(gr_path, ignore_errors=True) - shutil.rmtree(output_path, ignore_errors=True) +def regen_golden_record_3_1_features(): + _regenerate( + spec_file_name="3.1_specific.openapi.yaml", + output_dir="test-3-1-features-client", + golden_record_dir="test-3-1-golden-record", + ) - result = runner.invoke(app, ["generate", f"--path={openapi_path}"]) - if result.stdout: - print(result.stdout) - if result.exception: - raise result.exception - output_path.rename(gr_path) +def regen_literal_enums_golden_record(): + _regenerate( + spec_file_name="openapi_3.1_enums.yaml", + output_dir="my-enum-api-client", + golden_record_dir="literal-enums-golden-record", + config_file_name="literal_enums.config.yml", + ) def regen_metadata_snapshots(): - runner = CliRunner() - openapi_path = Path(__file__).parent / "3.1_specific.openapi.yaml" output_path = Path.cwd() / "test-3-1-features-client" snapshots_dir = Path(__file__).parent / "metadata_snapshots" - for (meta, file, rename_to) in (("setup", "setup.py", "setup.py"), ("pdm", "pyproject.toml", "pdm.pyproject.toml"), ("poetry", "pyproject.toml", "poetry.pyproject.toml")): - shutil.rmtree(output_path, ignore_errors=True) - result = runner.invoke(app, ["generate", f"--path={openapi_path}", f"--meta={meta}"]) - - if result.stdout: - print(result.stdout) - if result.exception: - raise result.exception - + for (meta, file, rename_to) in (("setup", "setup.py", "setup.py"), ("pdm", "pyproject.toml", "pdm.pyproject.toml"), ("poetry", "pyproject.toml", "poetry.pyproject.toml"), ("uv", "pyproject.toml", "uv.pyproject.toml")): + _regenerate( + spec_file_name="3.1_specific.openapi.yaml", + output_dir="test-3-1-features-client", + extra_args=[f"--meta={meta}"], + ) (output_path / file).rename(snapshots_dir / rename_to) shutil.rmtree(output_path, ignore_errors=True) +def regen_docstrings_on_attributes_golden_record(): + _regenerate( + spec_file_name="docstrings_on_attributes.yml", + golden_record_dir="docstrings-on-attributes-golden-record", + config_file_name="docstrings_on_attributes.config.yml", + ) + + def regen_custom_template_golden_record(): runner = CliRunner() openapi_path = Path(__file__).parent / "baseline_openapi_3.0.json" @@ -123,4 +143,6 @@ def regen_custom_template_golden_record(): regen_golden_record() regen_golden_record_3_1_features() regen_metadata_snapshots() + regen_docstrings_on_attributes_golden_record() regen_custom_template_golden_record() + regen_literal_enums_golden_record() diff --git a/end_to_end_tests/test-3-1-golden-record/pyproject.toml b/end_to_end_tests/test-3-1-golden-record/pyproject.toml index f9a1becf8..202b9d360 100644 --- a/end_to_end_tests/test-3-1-golden-record/pyproject.toml +++ b/end_to_end_tests/test-3-1-golden-record/pyproject.toml @@ -5,19 +5,18 @@ description = "A client library for accessing Test 3.1 Features" authors = [] readme = "README.md" packages = [ - {include = "test_3_1_features_client"}, + { include = "test_3_1_features_client" }, ] -include = ["CHANGELOG.md", "test_3_1_features_client/py.typed"] - +include = ["test_3_1_features_client/py.typed"] [tool.poetry.dependencies] -python = "^3.8" -httpx = ">=0.20.0,<0.28.0" -attrs = ">=21.3.0" +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" python-dateutil = "^2.8.0" [build-system] -requires = ["poetry-core>=1.0.0"] +requires = ["poetry-core>=2.0.0,<3.0.0"] build-backend = "poetry.core.masonry.api" [tool.ruff] diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/__init__.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/__init__.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/post_const_path.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/post_const_path.py index 3f864b3dc..bf3472121 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/post_const_path.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/const/post_const_path.py @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, Literal, Optional, Union, cast +from typing import Any, Literal, cast +from urllib.parse import quote import httpx @@ -14,11 +15,11 @@ def _get_kwargs( *, body: PostConstPathBody, required_query: Literal["this always goes in the query"], - optional_query: Union[Literal["this sometimes goes in the query"], Unset] = UNSET, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + optional_query: Literal["this sometimes goes in the query"] | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - params: Dict[str, Any] = {} + params: dict[str, Any] = {} params["required query"] = required_query @@ -26,15 +27,16 @@ def _get_kwargs( params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", - "url": f"/const/{path}", + "url": "/https/github.com/const/{path}".format( + path=quote(str(path), safe=""), + ), "params": params, } - _body = body.to_dict() + _kwargs["json"] = body.to_dict() - _kwargs["json"] = _body headers["Content-Type"] = "application/json" _kwargs["headers"] = headers @@ -42,11 +44,16 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Literal["Why have a fixed response? I dunno"]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Literal["Why have a fixed response? I dunno"] | None: + if response.status_code == 200: response_200 = cast(Literal["Why have a fixed response? I dunno"], response.json()) + if response_200 != "Why have a fixed response? I dunno": + raise ValueError( + f"response_200 must match const 'Why have a fixed response? I dunno', got '{response_200}'" + ) return response_200 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -54,7 +61,7 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response + *, client: AuthenticatedClient | Client, response: httpx.Response ) -> Response[Literal["Why have a fixed response? I dunno"]]: return Response( status_code=HTTPStatus(response.status_code), @@ -67,16 +74,16 @@ def _build_response( def sync_detailed( path: Literal["this goes in the path"], *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostConstPathBody, required_query: Literal["this always goes in the query"], - optional_query: Union[Literal["this sometimes goes in the query"], Unset] = UNSET, + optional_query: Literal["this sometimes goes in the query"] | Unset = UNSET, ) -> Response[Literal["Why have a fixed response? I dunno"]]: """ Args: path (Literal['this goes in the path']): required_query (Literal['this always goes in the query']): - optional_query (Union[Literal['this sometimes goes in the query'], Unset]): + optional_query (Literal['this sometimes goes in the query'] | Unset): body (PostConstPathBody): Raises: @@ -104,16 +111,16 @@ def sync_detailed( def sync( path: Literal["this goes in the path"], *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostConstPathBody, required_query: Literal["this always goes in the query"], - optional_query: Union[Literal["this sometimes goes in the query"], Unset] = UNSET, -) -> Optional[Literal["Why have a fixed response? I dunno"]]: + optional_query: Literal["this sometimes goes in the query"] | Unset = UNSET, +) -> Literal["Why have a fixed response? I dunno"] | None: """ Args: path (Literal['this goes in the path']): required_query (Literal['this always goes in the query']): - optional_query (Union[Literal['this sometimes goes in the query'], Unset]): + optional_query (Literal['this sometimes goes in the query'] | Unset): body (PostConstPathBody): Raises: @@ -136,16 +143,16 @@ def sync( async def asyncio_detailed( path: Literal["this goes in the path"], *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostConstPathBody, required_query: Literal["this always goes in the query"], - optional_query: Union[Literal["this sometimes goes in the query"], Unset] = UNSET, + optional_query: Literal["this sometimes goes in the query"] | Unset = UNSET, ) -> Response[Literal["Why have a fixed response? I dunno"]]: """ Args: path (Literal['this goes in the path']): required_query (Literal['this always goes in the query']): - optional_query (Union[Literal['this sometimes goes in the query'], Unset]): + optional_query (Literal['this sometimes goes in the query'] | Unset): body (PostConstPathBody): Raises: @@ -171,16 +178,16 @@ async def asyncio_detailed( async def asyncio( path: Literal["this goes in the path"], *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, body: PostConstPathBody, required_query: Literal["this always goes in the query"], - optional_query: Union[Literal["this sometimes goes in the query"], Unset] = UNSET, -) -> Optional[Literal["Why have a fixed response? I dunno"]]: + optional_query: Literal["this sometimes goes in the query"] | Unset = UNSET, +) -> Literal["Why have a fixed response? I dunno"] | None: """ Args: path (Literal['this goes in the path']): required_query (Literal['this always goes in the query']): - optional_query (Union[Literal['this sometimes goes in the query'], Unset]): + optional_query (Literal['this sometimes goes in the query'] | Unset): body (PostConstPathBody): Raises: diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/__init__.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/__init__.py new file mode 100644 index 000000000..2d7c0b23d --- /dev/null +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/post_prefix_items.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/post_prefix_items.py new file mode 100644 index 000000000..5b114873e --- /dev/null +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/api/prefix_items/post_prefix_items.py @@ -0,0 +1,150 @@ +from http import HTTPStatus +from typing import Any, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.post_prefix_items_body import PostPrefixItemsBody +from ...types import Response + + +def _get_kwargs( + *, + body: PostPrefixItemsBody, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": "/https/github.com/prefixItems", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> str | None: + if response.status_code == 200: + response_200 = cast(str, response.json()) + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[str]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: AuthenticatedClient | Client, + body: PostPrefixItemsBody, +) -> Response[str]: + """ + Args: + body (PostPrefixItemsBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: AuthenticatedClient | Client, + body: PostPrefixItemsBody, +) -> str | None: + """ + Args: + body (PostPrefixItemsBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return sync_detailed( + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + *, + client: AuthenticatedClient | Client, + body: PostPrefixItemsBody, +) -> Response[str]: + """ + Args: + body (PostPrefixItemsBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[str] + """ + + kwargs = _get_kwargs( + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: AuthenticatedClient | Client, + body: PostPrefixItemsBody, +) -> str | None: + """ + Args: + body (PostPrefixItemsBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + str + """ + + return ( + await asyncio_detailed( + client=client, + body=body, + ) + ).parsed diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/client.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/client.py index 74b476ca8..1b7055ab8 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/client.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/client.py @@ -1,5 +1,5 @@ import ssl -from typing import Any, Dict, Optional, Union +from typing import Any import httpx from attrs import define, evolve, field @@ -35,17 +35,17 @@ class Client: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) - - def with_headers(self, headers: Dict[str, str]) -> "Client": + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -53,7 +53,7 @@ def with_headers(self, headers: Dict[str, str]) -> "Client": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "Client": + def with_cookies(self, cookies: dict[str, str]) -> "Client": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -62,7 +62,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "Client": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "Client": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -70,7 +70,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "Client": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "Client": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -101,7 +101,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -165,21 +165,21 @@ class AuthenticatedClient: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) token: str prefix: str = "Bearer" auth_header_name: str = "Authorization" - def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -187,7 +187,7 @@ def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -196,7 +196,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -204,7 +204,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -236,7 +236,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/__init__.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/__init__.py index f923a5c37..aeafedd08 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/__init__.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/__init__.py @@ -1,5 +1,9 @@ """Contains all the data models used in inputs/outputs""" from .post_const_path_body import PostConstPathBody +from .post_prefix_items_body import PostPrefixItemsBody -__all__ = ("PostConstPathBody",) +__all__ = ( + "PostConstPathBody", + "PostPrefixItemsBody", +) diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_const_path_body.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_const_path_body.py index 387e693e0..f59894f08 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_const_path_body.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_const_path_body.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Literal, Type, TypeVar, Union, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Literal, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -13,24 +16,24 @@ class PostConstPathBody: """ Attributes: required (Literal['this always goes in the body']): - nullable (Union[Literal['this or null goes in the body'], None]): - optional (Union[Literal['this sometimes goes in the body'], Unset]): + nullable (Literal['this or null goes in the body'] | None): + optional (Literal['this sometimes goes in the body'] | Unset): """ required: Literal["this always goes in the body"] - nullable: Union[Literal["this or null goes in the body"], None] - optional: Union[Literal["this sometimes goes in the body"], Unset] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + nullable: Literal["this or null goes in the body"] | None + optional: Literal["this sometimes goes in the body"] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: required = self.required - nullable: Union[Literal["this or null goes in the body"], None] + nullable: Literal["this or null goes in the body"] | None nullable = self.nullable optional = self.optional - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -44,18 +47,28 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() - required = d.pop("required") + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + required = cast(Literal["this always goes in the body"], d.pop("required")) + if required != "this always goes in the body": + raise ValueError(f"required must match const 'this always goes in the body', got '{required}'") - def _parse_nullable(data: object) -> Union[Literal["this or null goes in the body"], None]: + def _parse_nullable(data: object) -> Literal["this or null goes in the body"] | None: if data is None: return data - return cast(Union[Literal["this or null goes in the body"], None], data) + nullable_type_1 = cast(Literal["this or null goes in the body"], data) + if nullable_type_1 != "this or null goes in the body": + raise ValueError( + f"nullable_type_1 must match const 'this or null goes in the body', got '{nullable_type_1}'" + ) + return nullable_type_1 + return cast(Literal["this or null goes in the body"] | None, data) nullable = _parse_nullable(d.pop("nullable")) - optional = d.pop("optional", UNSET) + optional = cast(Literal["this sometimes goes in the body"] | Unset, d.pop("optional", UNSET)) + if optional != "this sometimes goes in the body" and not isinstance(optional, Unset): + raise ValueError(f"optional must match const 'this sometimes goes in the body', got '{optional}'") post_const_path_body = cls( required=required, @@ -67,7 +80,7 @@ def _parse_nullable(data: object) -> Union[Literal["this or null goes in the bod return post_const_path_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_prefix_items_body.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_prefix_items_body.py new file mode 100644 index 000000000..b074bb717 --- /dev/null +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/models/post_prefix_items_body.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, Literal, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="PostPrefixItemsBody") + + +@_attrs_define +class PostPrefixItemsBody: + """ + Attributes: + prefix_items_and_items (list[float | Literal['prefix'] | str] | Unset): + prefix_items_only (list[float | str] | Unset): + """ + + prefix_items_and_items: list[float | Literal["prefix"] | str] | Unset = UNSET + prefix_items_only: list[float | str] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + prefix_items_and_items: list[float | Literal["prefix"] | str] | Unset = UNSET + if not isinstance(self.prefix_items_and_items, Unset): + prefix_items_and_items = [] + for prefix_items_and_items_item_data in self.prefix_items_and_items: + prefix_items_and_items_item: float | Literal["prefix"] | str + prefix_items_and_items_item = prefix_items_and_items_item_data + prefix_items_and_items.append(prefix_items_and_items_item) + + prefix_items_only: list[float | str] | Unset = UNSET + if not isinstance(self.prefix_items_only, Unset): + prefix_items_only = [] + for prefix_items_only_item_data in self.prefix_items_only: + prefix_items_only_item: float | str + prefix_items_only_item = prefix_items_only_item_data + prefix_items_only.append(prefix_items_only_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if prefix_items_and_items is not UNSET: + field_dict["prefixItemsAndItems"] = prefix_items_and_items + if prefix_items_only is not UNSET: + field_dict["prefixItemsOnly"] = prefix_items_only + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + _prefix_items_and_items = d.pop("prefixItemsAndItems", UNSET) + prefix_items_and_items: list[float | Literal["prefix"] | str] | Unset = UNSET + if _prefix_items_and_items is not UNSET: + prefix_items_and_items = [] + for prefix_items_and_items_item_data in _prefix_items_and_items: + + def _parse_prefix_items_and_items_item(data: object) -> float | Literal["prefix"] | str: + prefix_items_and_items_item_type_0 = cast(Literal["prefix"], data) + if prefix_items_and_items_item_type_0 != "prefix": + raise ValueError( + f"prefixItemsAndItems_item_type_0 must match const 'prefix', got '{prefix_items_and_items_item_type_0}'" + ) + return prefix_items_and_items_item_type_0 + return cast(float | Literal["prefix"] | str, data) + + prefix_items_and_items_item = _parse_prefix_items_and_items_item(prefix_items_and_items_item_data) + + prefix_items_and_items.append(prefix_items_and_items_item) + + _prefix_items_only = d.pop("prefixItemsOnly", UNSET) + prefix_items_only: list[float | str] | Unset = UNSET + if _prefix_items_only is not UNSET: + prefix_items_only = [] + for prefix_items_only_item_data in _prefix_items_only: + + def _parse_prefix_items_only_item(data: object) -> float | str: + return cast(float | str, data) + + prefix_items_only_item = _parse_prefix_items_only_item(prefix_items_only_item_data) + + prefix_items_only.append(prefix_items_only_item) + + post_prefix_items_body = cls( + prefix_items_and_items=prefix_items_and_items, + prefix_items_only=prefix_items_only, + ) + + post_prefix_items_body.additional_properties = d + return post_prefix_items_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/types.py b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/types.py index 21fac106f..b64af0952 100644 --- a/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/types.py +++ b/end_to_end_tests/test-3-1-golden-record/test_3_1_features_client/types.py @@ -1,7 +1,8 @@ """Contains some shared types for properties""" +from collections.abc import Mapping, MutableMapping from http import HTTPStatus -from typing import BinaryIO, Generic, Literal, MutableMapping, Optional, Tuple, TypeVar +from typing import IO, BinaryIO, Generic, Literal, TypeVar from attrs import define @@ -13,7 +14,15 @@ def __bool__(self) -> Literal[False]: UNSET: Unset = Unset() -FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] @define @@ -21,10 +30,10 @@ class File: """Contains information for file uploads""" payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None + file_name: str | None = None + mime_type: str | None = None - def to_tuple(self) -> FileJsonType: + def to_tuple(self) -> FileTypes: """Return a tuple representation that httpx will accept for multipart/form-data""" return self.file_name, self.payload, self.mime_type @@ -39,7 +48,7 @@ class Response(Generic[T]): status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] - parsed: Optional[T] + parsed: T | None -__all__ = ["File", "Response", "FileJsonType", "Unset", "UNSET"] +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/end_to_end_tests/test_custom_templates/api_init.py.jinja b/end_to_end_tests/test_custom_templates/api_init.py.jinja index 0c5720bf2..78e473147 100644 --- a/end_to_end_tests/test_custom_templates/api_init.py.jinja +++ b/end_to_end_tests/test_custom_templates/api_init.py.jinja @@ -1,6 +1,5 @@ """ Contains methods for accessing the API """ -from typing import Type {% for tag in endpoint_collections_by_tag.keys() %} from .{{ tag }} import {{ class_name(tag) }}Endpoints {% endfor %} @@ -8,6 +7,6 @@ from .{{ tag }} import {{ class_name(tag) }}Endpoints class {{ class_name(package_name) }}Api: {% for tag in endpoint_collections_by_tag.keys() %} @classmethod - def {{ tag }}(cls) -> Type[{{ class_name(tag) }}Endpoints]: + def {{ tag }}(cls) -> type[{{ class_name(tag) }}Endpoints]: return {{ class_name(tag) }}Endpoints {% endfor %} diff --git a/end_to_end_tests/test_custom_templates/models_init.py.jinja b/end_to_end_tests/test_custom_templates/models_init.py.jinja new file mode 100644 index 000000000..8b0a55aee --- /dev/null +++ b/end_to_end_tests/test_custom_templates/models_init.py.jinja @@ -0,0 +1,33 @@ + +# Testing that we can access model-related information via Jinja variables. + +# To avoid having to update this file in the golden record every time the test specs are changed, +# we won't include all the classes in this output - we'll just look for one of them. + +# Using "alls" +{% for name in alls %} +{% if name == "AModel" %} +# {{ name }} +{% endif %} +{% endfor %} + +# Using "imports" +{% for import in imports %} +{% if import.endswith("import AModel") %} +# {{ import }} +{% endif %} +{% endfor %} + +# Using "openapi.models" +{% for model in openapi.models %} +{% if model.class_info.name == "AModel" %} +# {{ model.class_info.name }} ({{ model.class_info.module_name }}) +{% endif %} +{% endfor %} + +# Using "openapi.enums" +{% for enum in openapi.enums %} +{% if enum.class_info.name == "AnEnum" %} +# {{ enum.class_info.name }} ({{ enum.class_info.module_name }}) +{% endif %} +{% endfor %} diff --git a/end_to_end_tests/test_end_to_end.py b/end_to_end_tests/test_end_to_end.py index 9087beca3..93a4695ac 100644 --- a/end_to_end_tests/test_end_to_end.py +++ b/end_to_end_tests/test_end_to_end.py @@ -1,21 +1,23 @@ import shutil from filecmp import cmpfiles, dircmp from pathlib import Path -from typing import Dict, List, Optional, Set import pytest from click.testing import Result from typer.testing import CliRunner +from end_to_end_tests.generated_client import ( + _run_command, generate_client, generate_client_from_inline_spec, +) from openapi_python_client.cli import app def _compare_directories( record: Path, test_subject: Path, - expected_differences: Dict[Path, str], - expected_missing: Optional[Set[str]] = None, - ignore: List[str] = None, + expected_differences: dict[Path, str] | None = None, + expected_missing: set[str] | None = None, + ignore: list[str] = None, depth=0, ): """ @@ -78,56 +80,31 @@ def _compare_directories( def run_e2e_test( openapi_document: str, - extra_args: List[str], - expected_differences: Optional[Dict[Path, str]] = None, + extra_args: list[str], + expected_differences: dict[Path, str] | None = None, golden_record_path: str = "golden-record", output_path: str = "my-test-api-client", - expected_missing: Optional[Set[str]] = None, + expected_missing: set[str] | None = None, + specify_output_path_explicitly: bool = True, ) -> Result: - output_path = Path.cwd() / output_path - shutil.rmtree(output_path, ignore_errors=True) - result = generate(extra_args, openapi_document) - gr_path = Path(__file__).parent / golden_record_path - - expected_differences = expected_differences or {} - # Use absolute paths for expected differences for easier comparisons - expected_differences = { - output_path.joinpath(key): value for key, value in expected_differences.items() - } - _compare_directories( - gr_path, output_path, expected_differences=expected_differences, expected_missing=expected_missing - ) - - import mypy.api - - out, err, status = mypy.api.run([str(output_path), "--strict"]) - assert status == 0, f"Type checking client failed: {out}" - - shutil.rmtree(output_path) - return result - + with generate_client(openapi_document, extra_args, output_path, specify_output_path_explicitly=specify_output_path_explicitly) as g: + gr_path = Path(__file__).parent / golden_record_path + + expected_differences = expected_differences or {} + # Use absolute paths for expected differences for easier comparisons + expected_differences = { + g.output_path.joinpath(key): value for key, value in expected_differences.items() + } + _compare_directories( + gr_path, g.output_path, expected_differences=expected_differences, expected_missing=expected_missing + ) -def generate(extra_args: Optional[List[str]], openapi_document: str) -> Result: - """Generate a client from an OpenAPI document and return the path to the generated code""" - _run_command("generate", extra_args, openapi_document) + import mypy.api + out, err, status = mypy.api.run([str(g.output_path), "--strict"]) + assert status == 0, f"Type checking client failed: {out}" -def _run_command(command: str, extra_args: Optional[List[str]] = None, openapi_document: Optional[str] = None, url: Optional[str] = None, config_path: Optional[Path] = None) -> Result: - """Generate a client from an OpenAPI document and return the path to the generated code""" - runner = CliRunner() - if openapi_document is not None: - openapi_path = Path(__file__).parent / openapi_document - source_arg = f"--path={openapi_path}" - else: - source_arg = f"--url={url}" - config_path = config_path or (Path(__file__).parent / "config.yml") - args = [command, f"--config={config_path}", source_arg] - if extra_args: - args.extend(extra_args) - result = runner.invoke(app, args) - if result.exit_code != 0: - raise Exception(result.stdout) - return result + return g.generator_result def test_baseline_end_to_end_3_0(): @@ -148,27 +125,38 @@ def test_3_1_specific_features(): ) +def test_literal_enums_end_to_end(): + config_path = Path(__file__).parent / "literal_enums.config.yml" + run_e2e_test( + "openapi_3.1_enums.yaml", + [f"--config={config_path}"], + {}, + "literal-enums-golden-record", + "my-enum-api-client" + ) + + @pytest.mark.parametrize( "meta,generated_file,expected_file", ( ("setup", "setup.py", "setup.py"), ("pdm", "pyproject.toml", "pdm.pyproject.toml"), ("poetry", "pyproject.toml", "poetry.pyproject.toml"), + ("uv", "pyproject.toml", "uv.pyproject.toml"), ) ) -def test_meta(meta: str, generated_file: Optional[str], expected_file: Optional[str]): - output_path = Path.cwd() / "test-3-1-features-client" - shutil.rmtree(output_path, ignore_errors=True) - generate([f"--meta={meta}"], "3.1_specific.openapi.yaml") - - if generated_file and expected_file: - assert (output_path / generated_file).exists() - assert ( - (output_path / generated_file).read_text() == - (Path(__file__).parent / "metadata_snapshots" / expected_file).read_text() - ) - - shutil.rmtree(output_path) +def test_meta(meta: str, generated_file: str | None, expected_file: str | None): + with generate_client( + "3.1_specific.openapi.yaml", + extra_args=[f"--meta={meta}"], + output_path="test-3-1-features-client", + ) as g: + if generated_file and expected_file: + assert (g.output_path / generated_file).exists() + assert ( + (g.output_path / generated_file).read_text() == + (Path(__file__).parent / "metadata_snapshots" / expected_file).read_text() + ) def test_none_meta(): @@ -178,6 +166,17 @@ def test_none_meta(): golden_record_path="test-3-1-golden-record/test_3_1_features_client", output_path="test_3_1_features_client", expected_missing={"py.typed"}, + specify_output_path_explicitly=False, + ) + + +def test_docstrings_on_attributes(): + config_path = Path(__file__).parent / "docstrings_on_attributes.config.yml" + run_e2e_test( + "docstrings_on_attributes.yml", + [f"--config={config_path}"], + {}, + "docstrings-on-attributes-golden-record", ) @@ -186,6 +185,7 @@ def test_custom_templates(): {} ) # key: path relative to generated directory, value: expected generated content api_dir = Path("my_test_api_client").joinpath("api") + models_dir = Path("my_test_api_client").joinpath("models") golden_tpls_root_dir = Path(__file__).parent.joinpath( "custom-templates-golden-record" ) @@ -193,6 +193,7 @@ def test_custom_templates(): expected_difference_paths = [ Path("README.md"), api_dir.joinpath("__init__.py"), + models_dir.joinpath("__init__.py"), ] for expected_difference_path in expected_difference_paths: @@ -215,74 +216,75 @@ def test_custom_templates(): ) -@pytest.mark.parametrize( - "command", ("generate", "update") -) -def test_bad_url(command: str): +def test_bad_url(): runner = CliRunner() - result = runner.invoke(app, [command, "--url=not_a_url"]) + result = runner.invoke(app, ["generate", "--url=not_a_url"]) assert result.exit_code == 1 - assert "Could not get OpenAPI document from provided URL" in result.stdout + assert "Could not get OpenAPI document from provided URL" in result.stderr + + +ERROR_DOCUMENTS = [path for path in Path(__file__).parent.joinpath("documents_with_errors").iterdir() if path.is_file()] + + +@pytest.mark.parametrize("document", ERROR_DOCUMENTS, ids=[path.stem for path in ERROR_DOCUMENTS]) +def test_documents_with_errors(snapshot, document): + with generate_client( + document, + extra_args=["--fail-on-warning"], + output_path="test-documents-with-errors", + raise_on_error=False, + ) as g: + result = g.generator_result + assert result.exit_code == 1 + output = (result.stdout + result.stderr).replace(str(g.output_path), "/test-documents-with-errors") + assert output == snapshot def test_custom_post_hooks(): - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) - runner = CliRunner() - openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" config_path = Path(__file__).parent / "custom_post_hooks.config.yml" - result = runner.invoke(app, ["generate", f"--path={openapi_document}", f"--config={config_path}"]) - assert result.exit_code == 1 - assert "this should fail" in result.stdout - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) + with generate_client( + "baseline_openapi_3.0.json", + [f"--config={config_path}"], + raise_on_error=False, + ) as g: + assert g.generator_result.exit_code == 1 + assert "this should fail" in g.generator_result.stderr def test_generate_dir_already_exists(): project_dir = Path.cwd() / "my-test-api-client" if not project_dir.exists(): project_dir.mkdir() - runner = CliRunner() - openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" - result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) - assert result.exit_code == 1 - assert "Directory already exists" in result.stdout - shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) - - -def test_update_dir_not_found(): - project_dir = Path.cwd() / "my-test-api-client" - shutil.rmtree(project_dir, ignore_errors=True) - runner = CliRunner() - openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" - result = runner.invoke(app, ["update", f"--path={openapi_document}"]) - assert result.exit_code == 1 - assert str(project_dir) in result.stdout - - -@pytest.mark.parametrize( - ("file_name", "content", "expected_error"), - ( - ("invalid_openapi.yaml", "not a valid openapi document", "Failed to parse OpenAPI document"), - ("invalid_json.json", "Invalid JSON", "Invalid JSON"), - ("invalid_yaml.yaml", "{", "Invalid YAML"), - ), - ids=("invalid_openapi", "invalid_json", "invalid_yaml") -) -def test_invalid_openapi_document(file_name, content, expected_error): - runner = CliRunner() - openapi_document = Path.cwd() / file_name - openapi_document.write_text(content) - result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) - assert result.exit_code == 1 - assert expected_error in result.stdout - openapi_document.unlink() + try: + runner = CliRunner() + openapi_document = Path(__file__).parent / "baseline_openapi_3.0.json" + result = runner.invoke(app, ["generate", f"--path={openapi_document}"]) + assert result.exit_code == 1 + assert "Directory already exists" in result.stderr + finally: + shutil.rmtree(Path.cwd() / "my-test-api-client", ignore_errors=True) def test_update_integration_tests(): - url = "https://raw.githubusercontent.com/openapi-generators/openapi-test-server/main/openapi.json" + url = "https://raw.githubusercontent.com/openapi-generators/openapi-test-server/refs/tags/v0.2.1/openapi.yaml" source_path = Path(__file__).parent.parent / "integration-tests" - project_path = Path.cwd() / "integration-tests" - if source_path != project_path: # Just in case someone runs this from root dir - shutil.copytree(source_path, project_path) - config_path = project_path / "config.yaml" - _run_command("update", url=url, config_path=config_path) - _compare_directories(source_path, project_path, expected_differences={}) + temp_dir = Path.cwd() / "test_update_integration_tests" + shutil.rmtree(temp_dir, ignore_errors=True) + + try: + shutil.copytree(source_path, temp_dir) + config_path = source_path / "config.yaml" + _run_command( + "generate", + extra_args=["--overwrite", "--meta=pdm", f"--output-path={temp_dir}"], + url=url, + config_path=config_path + ) + _compare_directories(source_path, temp_dir, ignore=["pyproject.toml"]) + import mypy.api + + out, err, status = mypy.api.run([str(temp_dir), "--strict"]) + assert status == 0, f"Type checking client failed: {out=} {err=}" + + finally: + shutil.rmtree(temp_dir) diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore index ed29cb977..79a2c3d73 100644 --- a/integration-tests/.gitignore +++ b/integration-tests/.gitignore @@ -20,4 +20,4 @@ dmypy.json .idea/ /coverage.xml -/.coverage \ No newline at end of file +/.coverage diff --git a/integration-tests/config.yaml b/integration-tests/config.yaml index 80153f799..8b6e35763 100644 --- a/integration-tests/config.yaml +++ b/integration-tests/config.yaml @@ -1,5 +1,4 @@ project_name_override: integration-tests post_hooks: - ruff check . --fix - - ruff format . - - mypy . --strict \ No newline at end of file + - ruff format . \ No newline at end of file diff --git a/integration-tests/integration_tests/api/body/__init__.py b/integration-tests/integration_tests/api/body/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/integration-tests/integration_tests/api/body/__init__.py +++ b/integration-tests/integration_tests/api/body/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/integration-tests/integration_tests/api/body/post_body_multipart.py b/integration-tests/integration_tests/api/body/post_body_multipart.py index c64b4c4c2..58c217231 100644 --- a/integration-tests/integration_tests/api/body/post_body_multipart.py +++ b/integration-tests/integration_tests/api/body/post_body_multipart.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -8,39 +8,40 @@ from ...models.post_body_multipart_body import PostBodyMultipartBody from ...models.post_body_multipart_response_200 import PostBodyMultipartResponse200 from ...models.public_error import PublicError -from ...types import Response +from ...types import UNSET, Response, Unset def _get_kwargs( *, - body: PostBodyMultipartBody, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} + body: PostBodyMultipartBody | Unset = UNSET, +) -> dict[str, Any]: + headers: dict[str, Any] = {} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/body/multipart", } - _body = body.to_multipart() - - _kwargs["files"] = _body + if not isinstance(body, Unset): + _kwargs["files"] = body.to_multipart() _kwargs["headers"] = headers return _kwargs def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> PostBodyMultipartResponse200 | PublicError | None: + if response.status_code == 200: response_200 = PostBodyMultipartResponse200.from_dict(response.json()) return response_200 - if response.status_code == HTTPStatus.BAD_REQUEST: + + if response.status_code == 400: response_400 = PublicError.from_dict(response.json()) return response_400 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -48,8 +49,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[PostBodyMultipartResponse200 | PublicError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -60,19 +61,19 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], - body: PostBodyMultipartBody, -) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + client: AuthenticatedClient | Client, + body: PostBodyMultipartBody | Unset = UNSET, +) -> Response[PostBodyMultipartResponse200 | PublicError]: """ Args: - body (PostBodyMultipartBody): + body (PostBodyMultipartBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[PostBodyMultipartResponse200, PublicError]] + Response[PostBodyMultipartResponse200 | PublicError] """ kwargs = _get_kwargs( @@ -88,19 +89,19 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], - body: PostBodyMultipartBody, -) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: + client: AuthenticatedClient | Client, + body: PostBodyMultipartBody | Unset = UNSET, +) -> PostBodyMultipartResponse200 | PublicError | None: """ Args: - body (PostBodyMultipartBody): + body (PostBodyMultipartBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[PostBodyMultipartResponse200, PublicError] + PostBodyMultipartResponse200 | PublicError """ return sync_detailed( @@ -111,19 +112,19 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], - body: PostBodyMultipartBody, -) -> Response[Union[PostBodyMultipartResponse200, PublicError]]: + client: AuthenticatedClient | Client, + body: PostBodyMultipartBody | Unset = UNSET, +) -> Response[PostBodyMultipartResponse200 | PublicError]: """ Args: - body (PostBodyMultipartBody): + body (PostBodyMultipartBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[PostBodyMultipartResponse200, PublicError]] + Response[PostBodyMultipartResponse200 | PublicError] """ kwargs = _get_kwargs( @@ -137,19 +138,19 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], - body: PostBodyMultipartBody, -) -> Optional[Union[PostBodyMultipartResponse200, PublicError]]: + client: AuthenticatedClient | Client, + body: PostBodyMultipartBody | Unset = UNSET, +) -> PostBodyMultipartResponse200 | PublicError | None: """ Args: - body (PostBodyMultipartBody): + body (PostBodyMultipartBody | Unset): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[PostBodyMultipartResponse200, PublicError] + PostBodyMultipartResponse200 | PublicError """ return ( diff --git a/integration-tests/integration_tests/api/parameters/__init__.py b/integration-tests/integration_tests/api/parameters/__init__.py index e69de29bb..2d7c0b23d 100644 --- a/integration-tests/integration_tests/api/parameters/__init__.py +++ b/integration-tests/integration_tests/api/parameters/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/integration-tests/integration_tests/api/parameters/post_parameters_header.py b/integration-tests/integration_tests/api/parameters/post_parameters_header.py index 784eaf37f..190b7efe0 100644 --- a/integration-tests/integration_tests/api/parameters/post_parameters_header.py +++ b/integration-tests/integration_tests/api/parameters/post_parameters_header.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Any, Dict, Optional, Union +from typing import Any import httpx @@ -16,8 +16,8 @@ def _get_kwargs( string_header: str, number_header: float, integer_header: int, -) -> Dict[str, Any]: - headers: Dict[str, Any] = {} +) -> dict[str, Any]: + headers: dict[str, Any] = {} headers["Boolean-Header"] = "true" if boolean_header else "false" headers["String-Header"] = string_header @@ -26,7 +26,7 @@ def _get_kwargs( headers["Integer-Header"] = str(integer_header) - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "post", "url": "/https/github.com/parameters/header", } @@ -36,16 +36,18 @@ def _get_kwargs( def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[PostParametersHeaderResponse200, PublicError]]: - if response.status_code == HTTPStatus.OK: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> PostParametersHeaderResponse200 | PublicError | None: + if response.status_code == 200: response_200 = PostParametersHeaderResponse200.from_dict(response.json()) return response_200 - if response.status_code == HTTPStatus.BAD_REQUEST: + + if response.status_code == 400: response_400 = PublicError.from_dict(response.json()) return response_400 + if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: @@ -53,8 +55,8 @@ def _parse_response( def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[PostParametersHeaderResponse200, PublicError]]: + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[PostParametersHeaderResponse200 | PublicError]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -65,12 +67,12 @@ def _build_response( def sync_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, boolean_header: bool, string_header: str, number_header: float, integer_header: int, -) -> Response[Union[PostParametersHeaderResponse200, PublicError]]: +) -> Response[PostParametersHeaderResponse200 | PublicError]: """ Args: boolean_header (bool): @@ -83,7 +85,7 @@ def sync_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[PostParametersHeaderResponse200, PublicError]] + Response[PostParametersHeaderResponse200 | PublicError] """ kwargs = _get_kwargs( @@ -102,12 +104,12 @@ def sync_detailed( def sync( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, boolean_header: bool, string_header: str, number_header: float, integer_header: int, -) -> Optional[Union[PostParametersHeaderResponse200, PublicError]]: +) -> PostParametersHeaderResponse200 | PublicError | None: """ Args: boolean_header (bool): @@ -120,7 +122,7 @@ def sync( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[PostParametersHeaderResponse200, PublicError] + PostParametersHeaderResponse200 | PublicError """ return sync_detailed( @@ -134,12 +136,12 @@ def sync( async def asyncio_detailed( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, boolean_header: bool, string_header: str, number_header: float, integer_header: int, -) -> Response[Union[PostParametersHeaderResponse200, PublicError]]: +) -> Response[PostParametersHeaderResponse200 | PublicError]: """ Args: boolean_header (bool): @@ -152,7 +154,7 @@ async def asyncio_detailed( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Response[Union[PostParametersHeaderResponse200, PublicError]] + Response[PostParametersHeaderResponse200 | PublicError] """ kwargs = _get_kwargs( @@ -169,12 +171,12 @@ async def asyncio_detailed( async def asyncio( *, - client: Union[AuthenticatedClient, Client], + client: AuthenticatedClient | Client, boolean_header: bool, string_header: str, number_header: float, integer_header: int, -) -> Optional[Union[PostParametersHeaderResponse200, PublicError]]: +) -> PostParametersHeaderResponse200 | PublicError | None: """ Args: boolean_header (bool): @@ -187,7 +189,7 @@ async def asyncio( httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: - Union[PostParametersHeaderResponse200, PublicError] + PostParametersHeaderResponse200 | PublicError """ return ( diff --git a/integration-tests/integration_tests/client.py b/integration-tests/integration_tests/client.py index 74b476ca8..1b7055ab8 100644 --- a/integration-tests/integration_tests/client.py +++ b/integration-tests/integration_tests/client.py @@ -1,5 +1,5 @@ import ssl -from typing import Any, Dict, Optional, Union +from typing import Any import httpx from attrs import define, evolve, field @@ -35,17 +35,17 @@ class Client: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) - - def with_headers(self, headers: Dict[str, str]) -> "Client": + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -53,7 +53,7 @@ def with_headers(self, headers: Dict[str, str]) -> "Client": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "Client": + def with_cookies(self, cookies: dict[str, str]) -> "Client": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -62,7 +62,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "Client": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "Client": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -70,7 +70,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "Client": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "Client": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -101,7 +101,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -165,21 +165,21 @@ class AuthenticatedClient: """ raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) token: str prefix: str = "Bearer" auth_header_name: str = "Authorization" - def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -187,7 +187,7 @@ def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient": self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -196,7 +196,7 @@ def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient": return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -204,7 +204,7 @@ def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": return evolve(self, timeout=timeout) def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -236,7 +236,7 @@ def __exit__(self, *args: Any, **kwargs: Any) -> None: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ diff --git a/integration-tests/integration_tests/models/__init__.py b/integration-tests/integration_tests/models/__init__.py index 275cf6faa..257cfe9fa 100644 --- a/integration-tests/integration_tests/models/__init__.py +++ b/integration-tests/integration_tests/models/__init__.py @@ -1,5 +1,7 @@ """Contains all the data models used in inputs/outputs""" +from .an_object import AnObject +from .file import File from .post_body_multipart_body import PostBodyMultipartBody from .post_body_multipart_response_200 import PostBodyMultipartResponse200 from .post_parameters_header_response_200 import PostParametersHeaderResponse200 @@ -7,6 +9,8 @@ from .public_error import PublicError __all__ = ( + "AnObject", + "File", "PostBodyMultipartBody", "PostBodyMultipartResponse200", "PostParametersHeaderResponse200", diff --git a/integration-tests/integration_tests/models/an_object.py b/integration-tests/integration_tests/models/an_object.py new file mode 100644 index 000000000..228568b8c --- /dev/null +++ b/integration-tests/integration_tests/models/an_object.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="AnObject") + + +@_attrs_define +class AnObject: + """ + Attributes: + an_int (int): + a_float (float): + """ + + an_int: int + a_float: float + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + an_int = self.an_int + + a_float = self.a_float + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "an_int": an_int, + "a_float": a_float, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + an_int = d.pop("an_int") + + a_float = d.pop("a_float") + + an_object = cls( + an_int=an_int, + a_float=a_float, + ) + + an_object.additional_properties = d + return an_object + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/models/file.py b/integration-tests/integration_tests/models/file.py new file mode 100644 index 000000000..2380e8737 --- /dev/null +++ b/integration-tests/integration_tests/models/file.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="File") + + +@_attrs_define +class File: + """ + Attributes: + data (str | Unset): Echo of content of the 'file' input parameter from the form. + name (str | Unset): The name of the file uploaded. + content_type (str | Unset): The content type of the file uploaded. + """ + + data: str | Unset = UNSET + name: str | Unset = UNSET + content_type: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + data = self.data + + name = self.name + + content_type = self.content_type + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if data is not UNSET: + field_dict["data"] = data + if name is not UNSET: + field_dict["name"] = name + if content_type is not UNSET: + field_dict["content_type"] = content_type + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + data = d.pop("data", UNSET) + + name = d.pop("name", UNSET) + + content_type = d.pop("content_type", UNSET) + + file = cls( + data=data, + name=name, + content_type=content_type, + ) + + file.additional_properties = d + return file + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/integration-tests/integration_tests/models/post_body_multipart_body.py b/integration-tests/integration_tests/models/post_body_multipart_body.py index de9992232..09f437501 100644 --- a/integration-tests/integration_tests/models/post_body_multipart_body.py +++ b/integration-tests/integration_tests/models/post_body_multipart_body.py @@ -1,10 +1,21 @@ +from __future__ import annotations + +import datetime +import json +from collections.abc import Mapping from io import BytesIO -from typing import Any, Dict, List, Type, TypeVar, Union +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from .. import types +from ..types import File + +if TYPE_CHECKING: + from ..models.an_object import AnObject -from ..types import UNSET, File, Unset T = TypeVar("T", bound="PostBodyMultipartBody") @@ -14,84 +25,118 @@ class PostBodyMultipartBody: """ Attributes: a_string (str): - file (File): For the sake of this test, include a file name and content type. The payload should also be valid - UTF-8. - description (Union[Unset, str]): + files (list[File]): + description (str): + objects (list[AnObject]): + times (list[datetime.datetime]): """ a_string: str - file: File - description: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + files: list[File] + description: str + objects: list[AnObject] + times: list[datetime.datetime] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_string = self.a_string - file = self.file.to_tuple() + files = [] + for files_item_data in self.files: + files_item = files_item_data.to_tuple() + + files.append(files_item) description = self.description - field_dict: Dict[str, Any] = {} + objects = [] + for objects_item_data in self.objects: + objects_item = objects_item_data.to_dict() + objects.append(objects_item) + + times = [] + for times_item_data in self.times: + times_item = times_item_data.isoformat() + times.append(times_item) + + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { "a_string": a_string, - "file": file, + "files": files, + "description": description, + "objects": objects, + "times": times, } ) - if description is not UNSET: - field_dict["description"] = description return field_dict - def to_multipart(self) -> Dict[str, Any]: - a_string = ( - self.a_string if isinstance(self.a_string, Unset) else (None, str(self.a_string).encode(), "text/plain") - ) + def to_multipart(self) -> types.RequestFiles: + files: types.RequestFiles = [] - file = self.file.to_tuple() + files.append(("a_string", (None, str(self.a_string).encode(), "text/plain"))) - description = ( - self.description - if isinstance(self.description, Unset) - else (None, str(self.description).encode(), "text/plain") - ) + for files_item_element in self.files: + files.append(("files", files_item_element.to_tuple())) - field_dict: Dict[str, Any] = {} - field_dict.update( - {key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items()} - ) - field_dict.update( - { - "a_string": a_string, - "file": file, - } - ) - if description is not UNSET: - field_dict["description"] = description + files.append(("description", (None, str(self.description).encode(), "text/plain"))) - return field_dict + for objects_item_element in self.objects: + files.append(("objects", (None, json.dumps(objects_item_element.to_dict()).encode(), "application/json"))) + + for times_item_element in self.times: + files.append(("times", (None, times_item_element.isoformat().encode(), "text/plain"))) + + for prop_name, prop in self.additional_properties.items(): + files.append((prop_name, (None, str(prop).encode(), "text/plain"))) + + return files @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.an_object import AnObject + + d = dict(src_dict) a_string = d.pop("a_string") - file = File(payload=BytesIO(d.pop("file"))) + files = [] + _files = d.pop("files") + for files_item_data in _files: + files_item = File(payload=BytesIO(files_item_data)) + + files.append(files_item) + + description = d.pop("description") + + objects = [] + _objects = d.pop("objects") + for objects_item_data in _objects: + objects_item = AnObject.from_dict(objects_item_data) + + objects.append(objects_item) + + times = [] + _times = d.pop("times") + for times_item_data in _times: + times_item = isoparse(times_item_data) - description = d.pop("description", UNSET) + times.append(times_item) post_body_multipart_body = cls( a_string=a_string, - file=file, + files=files, description=description, + objects=objects, + times=times, ) post_body_multipart_body.additional_properties = d return post_body_multipart_body @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/integration-tests/integration_tests/models/post_body_multipart_response_200.py b/integration-tests/integration_tests/models/post_body_multipart_response_200.py index 79359ec41..20ba3eebd 100644 --- a/integration-tests/integration_tests/models/post_body_multipart_response_200.py +++ b/integration-tests/integration_tests/models/post_body_multipart_response_200.py @@ -1,7 +1,17 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field +from dateutil.parser import isoparse + +if TYPE_CHECKING: + from ..models.an_object import AnObject + from ..models.file import File + T = TypeVar("T", bound="PostBodyMultipartResponse200") @@ -11,70 +21,97 @@ class PostBodyMultipartResponse200: """ Attributes: a_string (str): Echo of the 'a_string' input parameter from the form. - file_data (str): Echo of content of the 'file' input parameter from the form. description (str): Echo of the 'description' input parameter from the form. - file_name (str): The name of the file uploaded. - file_content_type (str): The content type of the file uploaded. + files (list[File]): + times (list[datetime.datetime]): + objects (list[AnObject]): """ a_string: str - file_data: str description: str - file_name: str - file_content_type: str - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + files: list[File] + times: list[datetime.datetime] + objects: list[AnObject] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: a_string = self.a_string - file_data = self.file_data - description = self.description - file_name = self.file_name + files = [] + for files_item_data in self.files: + files_item = files_item_data.to_dict() + files.append(files_item) + + times = [] + for times_item_data in self.times: + times_item = times_item_data.isoformat() + times.append(times_item) - file_content_type = self.file_content_type + objects = [] + for objects_item_data in self.objects: + objects_item = objects_item_data.to_dict() + objects.append(objects_item) - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { "a_string": a_string, - "file_data": file_data, "description": description, - "file_name": file_name, - "file_content_type": file_content_type, + "files": files, + "times": times, + "objects": objects, } ) return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() - a_string = d.pop("a_string") + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.an_object import AnObject + from ..models.file import File - file_data = d.pop("file_data") + d = dict(src_dict) + a_string = d.pop("a_string") description = d.pop("description") - file_name = d.pop("file_name") + files = [] + _files = d.pop("files") + for files_item_data in _files: + files_item = File.from_dict(files_item_data) + + files.append(files_item) + + times = [] + _times = d.pop("times") + for times_item_data in _times: + times_item = isoparse(times_item_data) + + times.append(times_item) + + objects = [] + _objects = d.pop("objects") + for objects_item_data in _objects: + objects_item = AnObject.from_dict(objects_item_data) - file_content_type = d.pop("file_content_type") + objects.append(objects_item) post_body_multipart_response_200 = cls( a_string=a_string, - file_data=file_data, description=description, - file_name=file_name, - file_content_type=file_content_type, + files=files, + times=times, + objects=objects, ) post_body_multipart_response_200.additional_properties = d return post_body_multipart_response_200 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/integration-tests/integration_tests/models/post_parameters_header_response_200.py b/integration-tests/integration_tests/models/post_parameters_header_response_200.py index 03e688ba1..d5baeabbd 100644 --- a/integration-tests/integration_tests/models/post_parameters_header_response_200.py +++ b/integration-tests/integration_tests/models/post_parameters_header_response_200.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -20,9 +23,9 @@ class PostParametersHeaderResponse200: string: str number: float integer: int - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: boolean = self.boolean string = self.string @@ -31,7 +34,7 @@ def to_dict(self) -> Dict[str, Any]: integer = self.integer - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { @@ -45,8 +48,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) boolean = d.pop("boolean") string = d.pop("string") @@ -66,7 +69,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return post_parameters_header_response_200 @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/integration-tests/integration_tests/models/problem.py b/integration-tests/integration_tests/models/problem.py index bde5b6d37..a7f270874 100644 --- a/integration-tests/integration_tests/models/problem.py +++ b/integration-tests/integration_tests/models/problem.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Type, TypeVar, Union +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -12,20 +15,20 @@ class Problem: """ Attributes: - parameter_name (Union[Unset, str]): - description (Union[Unset, str]): + parameter_name (str | Unset): + description (str | Unset): """ - parameter_name: Union[Unset, str] = UNSET - description: Union[Unset, str] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + parameter_name: str | Unset = UNSET + description: str | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: parameter_name = self.parameter_name description = self.description - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if parameter_name is not UNSET: @@ -36,8 +39,8 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - d = src_dict.copy() + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) parameter_name = d.pop("parameter_name", UNSET) description = d.pop("description", UNSET) @@ -51,7 +54,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return problem @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/integration-tests/integration_tests/models/public_error.py b/integration-tests/integration_tests/models/public_error.py index 993bd8ad3..40c711bed 100644 --- a/integration-tests/integration_tests/models/public_error.py +++ b/integration-tests/integration_tests/models/public_error.py @@ -1,4 +1,7 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast +from __future__ import annotations + +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field @@ -16,39 +19,39 @@ class PublicError: """ Attributes: - errors (Union[Unset, List[str]]): - extra_parameters (Union[Unset, List[str]]): - invalid_parameters (Union[Unset, List['Problem']]): - missing_parameters (Union[Unset, List[str]]): + errors (list[str] | Unset): + extra_parameters (list[str] | Unset): + invalid_parameters (list[Problem] | Unset): + missing_parameters (list[str] | Unset): """ - errors: Union[Unset, List[str]] = UNSET - extra_parameters: Union[Unset, List[str]] = UNSET - invalid_parameters: Union[Unset, List["Problem"]] = UNSET - missing_parameters: Union[Unset, List[str]] = UNSET - additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + errors: list[str] | Unset = UNSET + extra_parameters: list[str] | Unset = UNSET + invalid_parameters: list[Problem] | Unset = UNSET + missing_parameters: list[str] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - def to_dict(self) -> Dict[str, Any]: - errors: Union[Unset, List[str]] = UNSET + def to_dict(self) -> dict[str, Any]: + errors: list[str] | Unset = UNSET if not isinstance(self.errors, Unset): errors = self.errors - extra_parameters: Union[Unset, List[str]] = UNSET + extra_parameters: list[str] | Unset = UNSET if not isinstance(self.extra_parameters, Unset): extra_parameters = self.extra_parameters - invalid_parameters: Union[Unset, List[Dict[str, Any]]] = UNSET + invalid_parameters: list[dict[str, Any]] | Unset = UNSET if not isinstance(self.invalid_parameters, Unset): invalid_parameters = [] for invalid_parameters_item_data in self.invalid_parameters: invalid_parameters_item = invalid_parameters_item_data.to_dict() invalid_parameters.append(invalid_parameters_item) - missing_parameters: Union[Unset, List[str]] = UNSET + missing_parameters: list[str] | Unset = UNSET if not isinstance(self.missing_parameters, Unset): missing_parameters = self.missing_parameters - field_dict: Dict[str, Any] = {} + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) if errors is not UNSET: @@ -63,22 +66,24 @@ def to_dict(self) -> Dict[str, Any]: return field_dict @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.problem import Problem - d = src_dict.copy() - errors = cast(List[str], d.pop("errors", UNSET)) + d = dict(src_dict) + errors = cast(list[str], d.pop("errors", UNSET)) - extra_parameters = cast(List[str], d.pop("extra_parameters", UNSET)) + extra_parameters = cast(list[str], d.pop("extra_parameters", UNSET)) - invalid_parameters = [] _invalid_parameters = d.pop("invalid_parameters", UNSET) - for invalid_parameters_item_data in _invalid_parameters or []: - invalid_parameters_item = Problem.from_dict(invalid_parameters_item_data) + invalid_parameters: list[Problem] | Unset = UNSET + if _invalid_parameters is not UNSET: + invalid_parameters = [] + for invalid_parameters_item_data in _invalid_parameters: + invalid_parameters_item = Problem.from_dict(invalid_parameters_item_data) - invalid_parameters.append(invalid_parameters_item) + invalid_parameters.append(invalid_parameters_item) - missing_parameters = cast(List[str], d.pop("missing_parameters", UNSET)) + missing_parameters = cast(list[str], d.pop("missing_parameters", UNSET)) public_error = cls( errors=errors, @@ -91,7 +96,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: return public_error @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: diff --git a/integration-tests/integration_tests/types.py b/integration-tests/integration_tests/types.py index 21fac106f..b64af0952 100644 --- a/integration-tests/integration_tests/types.py +++ b/integration-tests/integration_tests/types.py @@ -1,7 +1,8 @@ """Contains some shared types for properties""" +from collections.abc import Mapping, MutableMapping from http import HTTPStatus -from typing import BinaryIO, Generic, Literal, MutableMapping, Optional, Tuple, TypeVar +from typing import IO, BinaryIO, Generic, Literal, TypeVar from attrs import define @@ -13,7 +14,15 @@ def __bool__(self) -> Literal[False]: UNSET: Unset = Unset() -FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] @define @@ -21,10 +30,10 @@ class File: """Contains information for file uploads""" payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None + file_name: str | None = None + mime_type: str | None = None - def to_tuple(self) -> FileJsonType: + def to_tuple(self) -> FileTypes: """Return a tuple representation that httpx will accept for multipart/form-data""" return self.file_name, self.payload, self.mime_type @@ -39,7 +48,7 @@ class Response(Generic[T]): status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] - parsed: Optional[T] + parsed: T | None -__all__ = ["File", "Response", "FileJsonType", "Unset", "UNSET"] +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/integration-tests/pdm.lock b/integration-tests/pdm.lock index 6d9908e6a..6eab58f9c 100644 --- a/integration-tests/pdm.lock +++ b/integration-tests/pdm.lock @@ -3,47 +3,61 @@ [metadata] groups = ["default", "dev"] -strategy = ["cross_platform", "inherit_metadata"] -lock_version = "4.4.1" -content_hash = "sha256:21f2d31fc91486810f21163e5ce7d73ebd8265f44bbef79d817d14c61d97c34a" +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:9404cbb9c293ee893b3e763b97fc5db2cb27fc7911dabedc0c3306a965a69a44" + +[[metadata.targets]] +requires_python = "~=3.10" [[package]] name = "anyio" -version = "4.2.0" -requires_python = ">=3.8" -summary = "High level compatibility layer for multiple asynchronous event loop implementations" +version = "4.12.0" +requires_python = ">=3.9" +summary = "High-level concurrency and networking framework on top of asyncio or Trio" groups = ["default"] dependencies = [ "exceptiongroup>=1.0.2; python_version < \"3.11\"", "idna>=2.8", - "sniffio>=1.1", - "typing-extensions>=4.1; python_version < \"3.11\"", + "typing-extensions>=4.5; python_version < \"3.13\"", ] files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb"}, + {file = "anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0"}, ] [[package]] name = "attrs" -version = "23.2.0" -requires_python = ">=3.7" +version = "25.4.0" +requires_python = ">=3.9" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +requires_python = "<3.11,>=3.8" +summary = "Backport of asyncio.Runner, a context manager that controls event loop life cycle." +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}, + {file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}, ] [[package]] name = "certifi" -version = "2023.11.17" -requires_python = ">=3.6" +version = "2026.1.4" +requires_python = ">=3.7" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default"] files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, ] [[package]] @@ -60,45 +74,48 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.3.1" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" groups = ["default", "dev"] marker = "python_version < \"3.11\"" +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [[package]] name = "h11" -version = "0.14.0" -requires_python = ">=3.7" +version = "0.16.0" +requires_python = ">=3.8" summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" groups = ["default"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.9" requires_python = ">=3.8" summary = "A minimal low-level HTTP client." groups = ["default"] dependencies = [ "certifi", - "h11<0.15,>=0.13", + "h11>=0.16", ] files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [[package]] name = "httpx" -version = "0.27.0" +version = "0.28.1" requires_python = ">=3.8" summary = "The next generation HTTP client." groups = ["default"] @@ -107,140 +124,247 @@ dependencies = [ "certifi", "httpcore==1.*", "idna", - "sniffio", ] files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [[package]] name = "idna" -version = "3.6" -requires_python = ">=3.5" +version = "3.11" +requires_python = ">=3.8" summary = "Internationalized Domain Names in Applications (IDNA)" groups = ["default"] files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [[package]] name = "iniconfig" -version = "2.0.0" -requires_python = ">=3.7" +version = "2.3.0" +requires_python = ">=3.10" summary = "brain-dead simple config-ini parsing" groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "librt" +version = "0.7.7" +requires_python = ">=3.9" +summary = "Mypyc runtime library" +groups = ["dev"] +marker = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "librt-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4836c5645f40fbdc275e5670819bde5ab5f2e882290d304e3c6ddab1576a6d0"}, + {file = "librt-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae8aec43117a645a31e5f60e9e3a0797492e747823b9bda6972d521b436b4e8"}, + {file = "librt-0.7.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aea05f701ccd2a76b34f0daf47ca5068176ff553510b614770c90d76ac88df06"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b16ccaeff0ed4355dfb76fe1ea7a5d6d03b5ad27f295f77ee0557bc20a72495"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48c7e150c095d5e3cea7452347ba26094be905d6099d24f9319a8b475fcd3e0"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dcee2f921a8632636d1c37f1bbdb8841d15666d119aa61e5399c5268e7ce02e"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14ef0f4ac3728ffd85bfc58e2f2f48fb4ef4fa871876f13a73a7381d10a9f77c"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e4ab69fa37f8090f2d971a5d2bc606c7401170dbdae083c393d6cbf439cb45b8"}, + {file = "librt-0.7.7-cp310-cp310-win32.whl", hash = "sha256:4bf3cc46d553693382d2abf5f5bd493d71bb0f50a7c0beab18aa13a5545c8900"}, + {file = "librt-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:f0c8fe5aeadd8a0e5b0598f8a6ee3533135ca50fd3f20f130f9d72baf5c6ac58"}, + {file = "librt-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a487b71fbf8a9edb72a8c7a456dda0184642d99cd007bc819c0b7ab93676a8ee"}, + {file = "librt-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4d4efb218264ecf0f8516196c9e2d1a0679d9fb3bb15df1155a35220062eba8"}, + {file = "librt-0.7.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b8bb331aad734b059c4b450cd0a225652f16889e286b2345af5e2c3c625c3d85"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:467dbd7443bda08338fc8ad701ed38cef48194017554f4c798b0a237904b3f99"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50d1d1ee813d2d1a3baf2873634ba506b263032418d16287c92ec1cc9c1a00cb"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7e5070cf3ec92d98f57574da0224f8c73faf1ddd6d8afa0b8c9f6e86997bc74"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bdb9f3d865b2dafe7f9ad7f30ef563c80d0ddd2fdc8cc9b8e4f242f475e34d75"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8185c8497d45164e256376f9da5aed2bb26ff636c798c9dabe313b90e9f25b28"}, + {file = "librt-0.7.7-cp311-cp311-win32.whl", hash = "sha256:44d63ce643f34a903f09ff7ca355aae019a3730c7afd6a3c037d569beeb5d151"}, + {file = "librt-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:7d13cc340b3b82134f8038a2bfe7137093693dcad8ba5773da18f95ad6b77a8a"}, + {file = "librt-0.7.7-cp311-cp311-win_arm64.whl", hash = "sha256:983de36b5a83fe9222f4f7dcd071f9b1ac6f3f17c0af0238dadfb8229588f890"}, + {file = "librt-0.7.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a85a1fc4ed11ea0eb0a632459ce004a2d14afc085a50ae3463cd3dfe1ce43fc"}, + {file = "librt-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c87654e29a35938baead1c4559858f346f4a2a7588574a14d784f300ffba0efd"}, + {file = "librt-0.7.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c9faaebb1c6212c20afd8043cd6ed9de0a47d77f91a6b5b48f4e46ed470703fe"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1908c3e5a5ef86b23391448b47759298f87f997c3bd153a770828f58c2bb4630"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbc4900e95a98fc0729523be9d93a8fedebb026f32ed9ffc08acd82e3e181503"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7ea4e1fbd253e5c68ea0fe63d08577f9d288a73f17d82f652ebc61fa48d878d"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef7699b7a5a244b1119f85c5bbc13f152cd38240cbb2baa19b769433bae98e50"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:955c62571de0b181d9e9e0a0303c8bc90d47670a5eff54cf71bf5da61d1899cf"}, + {file = "librt-0.7.7-cp312-cp312-win32.whl", hash = "sha256:1bcd79be209313b270b0e1a51c67ae1af28adad0e0c7e84c3ad4b5cb57aaa75b"}, + {file = "librt-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:4353ee891a1834567e0302d4bd5e60f531912179578c36f3d0430f8c5e16b456"}, + {file = "librt-0.7.7-cp312-cp312-win_arm64.whl", hash = "sha256:a76f1d679beccccdf8c1958e732a1dfcd6e749f8821ee59d7bec009ac308c029"}, + {file = "librt-0.7.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f4a0b0a3c86ba9193a8e23bb18f100d647bf192390ae195d84dfa0a10fb6244"}, + {file = "librt-0.7.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5335890fea9f9e6c4fdf8683061b9ccdcbe47c6dc03ab8e9b68c10acf78be78d"}, + {file = "librt-0.7.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b4346b1225be26def3ccc6c965751c74868f0578cbcba293c8ae9168483d811"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a10b8eebdaca6e9fdbaf88b5aefc0e324b763a5f40b1266532590d5afb268a4c"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:067be973d90d9e319e6eb4ee2a9b9307f0ecd648b8a9002fa237289a4a07a9e7"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:23d2299ed007812cccc1ecef018db7d922733382561230de1f3954db28433977"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6b6f8ea465524aa4c7420c7cc4ca7d46fe00981de8debc67b1cc2e9957bb5b9d"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8df32a99cc46eb0ee90afd9ada113ae2cafe7e8d673686cf03ec53e49635439"}, + {file = "librt-0.7.7-cp313-cp313-win32.whl", hash = "sha256:86f86b3b785487c7760247bcdac0b11aa8bf13245a13ed05206286135877564b"}, + {file = "librt-0.7.7-cp313-cp313-win_amd64.whl", hash = "sha256:4862cb2c702b1f905c0503b72d9d4daf65a7fdf5a9e84560e563471e57a56949"}, + {file = "librt-0.7.7-cp313-cp313-win_arm64.whl", hash = "sha256:0996c83b1cb43c00e8c87835a284f9057bc647abd42b5871e5f941d30010c832"}, + {file = "librt-0.7.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:23daa1ab0512bafdd677eb1bfc9611d8ffbe2e328895671e64cb34166bc1b8c8"}, + {file = "librt-0.7.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:558a9e5a6f3cc1e20b3168fb1dc802d0d8fa40731f6e9932dcc52bbcfbd37111"}, + {file = "librt-0.7.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2567cb48dc03e5b246927ab35cbb343376e24501260a9b5e30b8e255dca0d1d2"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6066c638cdf85ff92fc6f932d2d73c93a0e03492cdfa8778e6d58c489a3d7259"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a609849aca463074c17de9cda173c276eb8fee9e441053529e7b9e249dc8b8ee"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:add4e0a000858fe9bb39ed55f31085506a5c38363e6eb4a1e5943a10c2bfc3d1"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a3bfe73a32bd0bdb9a87d586b05a23c0a1729205d79df66dee65bb2e40d671ba"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0ecce0544d3db91a40f8b57ae26928c02130a997b540f908cefd4d279d6c5848"}, + {file = "librt-0.7.7-cp314-cp314-win32.whl", hash = "sha256:8f7a74cf3a80f0c3b0ec75b0c650b2f0a894a2cec57ef75f6f72c1e82cdac61d"}, + {file = "librt-0.7.7-cp314-cp314-win_amd64.whl", hash = "sha256:3d1fe2e8df3268dd6734dba33ededae72ad5c3a859b9577bc00b715759c5aaab"}, + {file = "librt-0.7.7-cp314-cp314-win_arm64.whl", hash = "sha256:2987cf827011907d3dfd109f1be0d61e173d68b1270107bb0e89f2fca7f2ed6b"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8e92c8de62b40bfce91d5e12c6e8b15434da268979b1af1a6589463549d491e6"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f683dcd49e2494a7535e30f779aa1ad6e3732a019d80abe1309ea91ccd3230e3"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b15e5d17812d4d629ff576699954f74e2cc24a02a4fc401882dd94f81daba45"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c084841b879c4d9b9fa34e5d5263994f21aea7fd9c6add29194dbb41a6210536"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c8fb9966f84737115513fecbaf257f9553d067a7dd45a69c2c7e5339e6a8dc"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9b5fb1ecb2c35362eab2dbd354fd1efa5a8440d3e73a68be11921042a0edc0ff"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:d1454899909d63cc9199a89fcc4f81bdd9004aef577d4ffc022e600c412d57f3"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7ef28f2e7a016b29792fe0a2dd04dec75725b32a1264e390c366103f834a9c3a"}, + {file = "librt-0.7.7-cp314-cp314t-win32.whl", hash = "sha256:5e419e0db70991b6ba037b70c1d5bbe92b20ddf82f31ad01d77a347ed9781398"}, + {file = "librt-0.7.7-cp314-cp314t-win_amd64.whl", hash = "sha256:d6b7d93657332c817b8d674ef6bf1ab7796b4f7ce05e420fd45bd258a72ac804"}, + {file = "librt-0.7.7-cp314-cp314t-win_arm64.whl", hash = "sha256:142c2cd91794b79fd0ce113bd658993b7ede0fe93057668c2f98a45ca00b7e91"}, + {file = "librt-0.7.7.tar.gz", hash = "sha256:81d957b069fed1890953c3b9c3895c7689960f233eea9a1d9607f71ce7f00b2c"}, ] [[package]] name = "mypy" -version = "1.9.0" -requires_python = ">=3.8" +version = "1.19.1" +requires_python = ">=3.9" summary = "Optional static typing for Python" groups = ["dev"] dependencies = [ + "librt>=0.6.2; platform_python_implementation != \"PyPy\"", "mypy-extensions>=1.0.0", + "pathspec>=0.9.0", "tomli>=1.1.0; python_version < \"3.11\"", - "typing-extensions>=4.1.0", + "typing-extensions>=4.6.0", ] files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, ] [[package]] name = "mypy-extensions" -version = "1.0.0" -requires_python = ">=3.5" +version = "1.1.0" +requires_python = ">=3.8" summary = "Type system extensions for programs checked with the mypy type checker." groups = ["dev"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "packaging" -version = "23.2" -requires_python = ">=3.7" +version = "25.0" +requires_python = ">=3.8" summary = "Core utilities for Python packages" groups = ["dev"] files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] -name = "pluggy" -version = "1.4.0" +name = "pathspec" +version = "0.12.1" requires_python = ">=3.8" +summary = "Utility library for gitignore style pattern matching of file paths." +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +requires_python = ">=3.9" summary = "plugin and hook calling mechanisms for python" groups = ["dev"] files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [[package]] -name = "pytest" -version = "8.1.1" +name = "pygments" +version = "2.19.2" requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[[package]] +name = "pytest" +version = "9.0.2" +requires_python = ">=3.10" summary = "pytest: simple powerful testing with Python" groups = ["dev"] dependencies = [ - "colorama; sys_platform == \"win32\"", - "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "iniconfig", - "packaging", - "pluggy<2.0,>=1.4", + "colorama>=0.4; sys_platform == \"win32\"", + "exceptiongroup>=1; python_version < \"3.11\"", + "iniconfig>=1.0.1", + "packaging>=22", + "pluggy<2,>=1.5", + "pygments>=2.7.2", "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"}, + {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"}, ] [[package]] name = "pytest-asyncio" -version = "0.23.5.post1" -requires_python = ">=3.8" +version = "1.3.0" +requires_python = ">=3.10" summary = "Pytest support for asyncio" groups = ["dev"] dependencies = [ - "pytest<9,>=7.0.0", + "backports-asyncio-runner<2,>=1.1; python_version < \"3.11\"", + "pytest<10,>=8.2", + "typing-extensions>=4.12; python_version < \"3.13\"", ] files = [ - {file = "pytest-asyncio-0.23.5.post1.tar.gz", hash = "sha256:b9a8806bea78c21276bc34321bbf234ba1b2ea5b30d9f0ce0f2dea45e4685813"}, - {file = "pytest_asyncio-0.23.5.post1-py3-none-any.whl", hash = "sha256:30f54d27774e79ac409778889880242b0403d09cabd65b727ce90fe92dd5d80e"}, + {file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"}, + {file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"}, ] [[package]] @@ -259,45 +383,85 @@ files = [ [[package]] name = "six" -version = "1.16.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Python 2 and 3 compatibility utilities" groups = ["default"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] -name = "sniffio" -version = "1.3.0" -requires_python = ">=3.7" -summary = "Sniff out which async library your code is running under" -groups = ["default"] +name = "tomli" +version = "2.3.0" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_version < \"3.11\"" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] [[package]] -name = "tomli" -version = "2.0.1" -requires_python = ">=3.7" -summary = "A lil' TOML parser" +name = "types-python-dateutil" +version = "2.9.0.20251115" +requires_python = ">=3.9" +summary = "Typing stubs for python-dateutil" groups = ["dev"] -marker = "python_version < \"3.11\"" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624"}, + {file = "types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" -requires_python = ">=3.8" -summary = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +requires_python = ">=3.9" +summary = "Backported and Experimental Type Hints for Python 3.9+" groups = ["default", "dev"] files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] diff --git a/integration-tests/pdm.minimal.lock b/integration-tests/pdm.minimal.lock new file mode 100644 index 000000000..18e20097e --- /dev/null +++ b/integration-tests/pdm.minimal.lock @@ -0,0 +1,381 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev"] +strategy = ["direct_minimal_versions", "inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:9404cbb9c293ee893b3e763b97fc5db2cb27fc7911dabedc0c3306a965a69a44" + +[[metadata.targets]] +requires_python = "~=3.10" + +[[package]] +name = "anyio" +version = "3.7.1" +requires_python = ">=3.7" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default"] +dependencies = [ + "exceptiongroup; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, +] + +[[package]] +name = "attrs" +version = "22.2.0" +requires_python = ">=3.6" +summary = "Classes Without Boilerplate" +groups = ["default"] +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +requires_python = ">=3.7" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default"] +files = [ + {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, + {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["dev"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["default", "dev"] +marker = "python_version < \"3.11\"" +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[[package]] +name = "h11" +version = "0.12.0" +requires_python = ">=3.6" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["default"] +files = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] + +[[package]] +name = "httpcore" +version = "0.15.0" +requires_python = ">=3.7" +summary = "A minimal low-level HTTP client." +groups = ["default"] +dependencies = [ + "anyio==3.*", + "certifi", + "h11<0.13,>=0.11", + "sniffio==1.*", +] +files = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] + +[[package]] +name = "httpx" +version = "0.23.0" +requires_python = ">=3.7" +summary = "The next generation HTTP client." +groups = ["default"] +dependencies = [ + "certifi", + "httpcore<0.16.0,>=0.15.0", + "rfc3986[idna2008]<2,>=1.3", + "sniffio", +] +files = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, +] + +[[package]] +name = "idna" +version = "3.11" +requires_python = ">=3.8" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +requires_python = ">=3.10" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["dev"] +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +requires_python = ">=3.8" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +requires_python = ">=3.9" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[[package]] +name = "pytest" +version = "8.0.1" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2.0,>=1.3.0", + "tomli>=1.0.0; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, + {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, +] + +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +requires_python = ">=3.8" +summary = "Pytest support for asyncio" +groups = ["dev"] +dependencies = [ + "pytest<9,>=7.0.0", +] +files = [ + {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, + {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Extensions to the standard Python datetime module" +groups = ["default"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.8.0.tar.gz", hash = "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"}, + {file = "python_dateutil-2.8.0-py2.py3-none-any.whl", hash = "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb"}, +] + +[[package]] +name = "rfc3986" +version = "1.5.0" +summary = "Validating URI References per RFC 3986" +groups = ["default"] +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[[package]] +name = "rfc3986" +version = "1.5.0" +extras = ["idna2008"] +summary = "Validating URI References per RFC 3986" +groups = ["default"] +dependencies = [ + "idna", + "rfc3986==1.5.0", +] +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[[package]] +name = "six" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tomli" +version = "2.3.0" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240315" +requires_python = ">=3.8" +summary = "Typing stubs for python-dateutil" +groups = ["dev"] +files = [ + {file = "types-python-dateutil-2.9.0.20240315.tar.gz", hash = "sha256:c1f6310088eb9585da1b9f811765b989ed2e2cdd4203c1a367e944b666507e4e"}, + {file = "types_python_dateutil-2.9.0.20240315-py3-none-any.whl", hash = "sha256:78aa9124f360df90bb6e85eb1a4d06e75425445bf5ecb13774cb0adef7ff3956"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +requires_python = ">=3.9" +summary = "Backported and Experimental Type Hints for Python 3.9+" +groups = ["default", "dev"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] diff --git a/integration-tests/pyproject.toml b/integration-tests/pyproject.toml index 67b0d7f52..b95a0aef7 100644 --- a/integration-tests/pyproject.toml +++ b/integration-tests/pyproject.toml @@ -1,32 +1,36 @@ [project] name = "integration-tests" -version = "0.0.1" +version = "0.1.0" description = "A client library for accessing OpenAPI Test Server" authors = [] readme = "README.md" +requires-python = ">=3.10,<4.0" dependencies = [ - "httpx>=0.20.0,<0.28.0", - "attrs>=21.3.0", + "httpx>=0.23.0,<0.29.0", + "attrs>=22.2.0", "python-dateutil>=2.8.0", ] -requires-python = ">=3.8,<4.0" [tool.pdm] distribution = true [tool.pdm.dev-dependencies] dev = [ - "pytest", - "mypy", + "pytest>8", + "mypy>=1.13", "pytest-asyncio>=0.23.5", + "types-python-dateutil>=2.9", ] [build-system] requires = ["pdm-backend"] build-backend = "pdm.backend" - + [tool.ruff] line-length = 120 [tool.ruff.lint] select = ["F", "I"] + +[tool.mypy] +# Just to get mypy to _not_ look at the parent directory's config diff --git a/integration-tests/tests/test_api/test_body/test_post_body_multipart.py b/integration-tests/tests/test_api/test_body/test_post_body_multipart.py index 54498ebc5..d6c211daf 100644 --- a/integration-tests/tests/test_api/test_body/test_post_body_multipart.py +++ b/integration-tests/tests/test_api/test_body/test_post_body_multipart.py @@ -1,3 +1,4 @@ +from datetime import datetime, timedelta, timezone from io import BytesIO from typing import Any @@ -5,49 +6,67 @@ from integration_tests.api.body import post_body_multipart from integration_tests.client import Client +from integration_tests.models import AnObject, PublicError from integration_tests.models.post_body_multipart_body import PostBodyMultipartBody from integration_tests.models.post_body_multipart_response_200 import PostBodyMultipartResponse200 -from integration_tests.types import File +from integration_tests.types import File, Response + +body = PostBodyMultipartBody( + a_string="a test string", + description="super descriptive thing", + files=[ + File( + payload=BytesIO(b"some file content"), + file_name="cool_stuff.txt", + mime_type="application/openapi-python-client", + ), + File( + payload=BytesIO(b"more file content"), + file_name=None, + mime_type=None, + ), + ], + times=[datetime.now(timezone.utc) - timedelta(days=1), datetime.now(timezone.utc)], + objects=[ + AnObject( + an_int=1, + a_float=2.3, + ), + AnObject( + an_int=4, + a_float=5.6, + ), + ], +) -def test(client: Client) -> None: - a_string = "a test string" - payload = b"some file content" - file_name = "cool_stuff.txt" - mime_type = "application/openapi-python-client" - description = "super descriptive thing" +def check_response(response: Response[PostBodyMultipartResponse200 | PublicError]) -> None: + content = response.parsed + if not isinstance(content, PostBodyMultipartResponse200): + raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") + + assert content.a_string == body.a_string + assert content.description == body.description + assert content.times == body.times + assert content.objects == body.objects + assert len(content.files) == len(body.files) + for i, file in enumerate(content.files): + body.files[i].payload.seek(0) + assert file.data == body.files[i].payload.read().decode() + assert file.name == body.files[i].file_name + assert file.content_type == body.files[i].mime_type + +def test(client: Client) -> None: response = post_body_multipart.sync_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) - content = response.parsed - if not isinstance(content, PostBodyMultipartResponse200): - raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") - - assert content.a_string == a_string - assert content.file_name == file_name - assert content.file_content_type == mime_type - assert content.file_data.encode() == payload - assert content.description == description + check_response(response) def test_custom_hooks() -> None: - a_string = "a test string" - payload = b"some file content" - file_name = "cool_stuff.txt" - mime_type = "application/openapi-python-client" - description = "super descriptive thing" - request_hook_called = False response_hook_called = False @@ -65,15 +84,7 @@ def log_response(*_: Any, **__: Any) -> None: post_body_multipart.sync_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) assert request_hook_called @@ -81,149 +92,51 @@ def log_response(*_: Any, **__: Any) -> None: def test_context_manager(client: Client) -> None: - a_string = "a test string" - payload = b"some file content" - file_name = "cool_stuff.txt" - mime_type = "application/openapi-python-client" - description = "super descriptive thing" - with client as client: post_body_multipart.sync_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) response = post_body_multipart.sync_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) with pytest.raises(RuntimeError): post_body_multipart.sync_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) - content = response.parsed - if not isinstance(content, PostBodyMultipartResponse200): - raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") - - assert content.a_string == a_string - assert content.file_name == file_name - assert content.file_content_type == mime_type - assert content.file_data.encode() == payload - assert content.description == description + check_response(response) @pytest.mark.asyncio async def test_async(client: Client) -> None: - a_string = "a test string" - payload = b"some file content" - file_name = "cool_stuff.txt" - mime_type = "application/openapi-python-client" - description = "super descriptive thing" - response = await post_body_multipart.asyncio_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) - content = response.parsed - if not isinstance(content, PostBodyMultipartResponse200): - raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") - - assert content.a_string == a_string - assert content.file_name == file_name - assert content.file_content_type == mime_type - assert content.file_data.encode() == payload - assert content.description == description + check_response(response) @pytest.mark.asyncio async def test_async_context_manager(client: Client) -> None: - a_string = "a test string" - payload = b"some file content" - file_name = "cool_stuff.txt" - mime_type = "application/openapi-python-client" - description = "super descriptive thing" - async with client as client: await post_body_multipart.asyncio_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) response = await post_body_multipart.asyncio_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) with pytest.raises(RuntimeError): await post_body_multipart.asyncio_detailed( client=client, - body=PostBodyMultipartBody( - a_string=a_string, - file=File( - payload=BytesIO(payload), - file_name=file_name, - mime_type=mime_type, - ), - description=description, - ), + body=body, ) - content = response.parsed - if not isinstance(content, PostBodyMultipartResponse200): - raise AssertionError(f"Received status {response.status_code} from test server with payload: {content!r}") - - assert content.a_string == a_string - assert content.file_name == file_name - assert content.file_content_type == mime_type - assert content.file_data.encode() == payload - assert content.description == description + check_response(response) diff --git a/knope.toml b/knope.toml index 6042399c2..7859e93b5 100644 --- a/knope.toml +++ b/knope.toml @@ -1,48 +1,13 @@ [package] versioned_files = ["pyproject.toml"] changelog = "CHANGELOG.md" - -[[workflows]] -name = "prepare-release" - -[[workflows.steps]] -type = "Command" -command = "git switch -c release" - -[[workflows.steps]] -type = "PrepareRelease" - -[[workflows.steps]] -type = "Command" -command = "git commit -m \"chore: prepare release $version\" && git push --force --set-upstream origin release" - -[workflows.steps.variables] -"$version" = "Version" - -[[workflows.steps]] -type = "CreatePullRequest" -base = "main" - -[workflows.steps.title] -template = "chore: prepare release $version" -variables = { "$version" = "Version" } - -[workflows.steps.body] -template = "This PR was created by Knope. Merging it will create a new release\n\n$changelog" -variables = { "$changelog" = "ChangelogEntry" } - -[[workflows]] -name = "release" - -[[workflows.steps]] -type = "Release" - -[[workflows]] -name = "document-change" - -[[workflows.steps]] -type = "CreateChangeFile" +extra_changelog_sections = [ + { name = "Notes", types = ["note"] } +] [github] owner = "openapi-generators" repo = "openapi-python-client" + +[bot.releases] +enabled = true diff --git a/openapi_python_client/__init__.py b/openapi_python_client/__init__.py index 23d972eac..52d34b4fc 100644 --- a/openapi_python_client/__init__.py +++ b/openapi_python_client/__init__.py @@ -4,21 +4,24 @@ import mimetypes import shutil import subprocess +from collections.abc import Sequence from importlib.metadata import version from pathlib import Path from subprocess import CalledProcessError -from typing import Any, Dict, List, Optional, Sequence, Union +from typing import Any import httpcore import httpx -import yaml from jinja2 import BaseLoader, ChoiceLoader, Environment, FileSystemLoader, PackageLoader +from ruamel.yaml import YAML +from ruamel.yaml.error import YAMLError from openapi_python_client import utils from .config import Config, MetaType from .parser import GeneratorData, import_string_from_class from .parser.errors import ErrorLevel, GeneratorError +from .parser.properties import LiteralEnumProperty __version__ = version(__package__) @@ -39,7 +42,7 @@ def __init__( *, openapi: GeneratorData, config: Config, - custom_template_path: Optional[Path] = None, + custom_template_path: Path | None = None, ) -> None: self.openapi: GeneratorData = openapi self.config = config @@ -64,12 +67,22 @@ def __init__( ) self.project_name: str = config.project_name_override or f"{utils.kebab_case(openapi.title).lower()}-client" - self.project_dir: Path = Path.cwd() - if config.meta_type != MetaType.NONE: - self.project_dir /= self.project_name - self.package_name: str = config.package_name_override or self.project_name.replace("-", "_") - self.package_dir: Path = self.project_dir / self.package_name + self.project_dir: Path # Where the generated code will be placed + self.package_dir: Path # Where the generated Python module will be placed (same as project_dir if no meta) + + if config.output_path is not None: + self.project_dir = config.output_path + elif config.meta_type == MetaType.NONE: + self.project_dir = Path.cwd() / self.package_name + else: + self.project_dir = Path.cwd() / self.project_name + + if config.meta_type == MetaType.NONE: + self.package_dir = self.project_dir + else: + self.package_dir = self.project_dir / self.package_name + self.package_description: str = utils.remove_string_escapes( f"A client library for accessing {self.openapi.title}" ) @@ -77,6 +90,7 @@ def __init__( self.env.filters.update(TEMPLATE_FILTERS) self.env.globals.update( + config=config, utils=utils, python_identifier=lambda x: utils.PythonIdentifier(x, config.field_prefix), class_name=lambda x: utils.ClassName(x, config.field_prefix), @@ -89,19 +103,17 @@ def __init__( openapi=self.openapi, endpoint_collections_by_tag=self.openapi.endpoint_collections_by_tag, ) - self.errors: List[GeneratorError] = [] + self.errors: list[GeneratorError] = [] def build(self) -> Sequence[GeneratorError]: """Create the project from templates""" - if self.config.meta_type == MetaType.NONE: - print(f"Generating {self.package_name}") - else: - print(f"Generating {self.project_name}") - try: - self.project_dir.mkdir() - except FileExistsError: - return [GeneratorError(detail="Directory already exists. Delete it or use the update command.")] + print(f"Generating {self.project_dir}") + try: + self.project_dir.mkdir() + except FileExistsError: + if not self.config.overwrite: + return [GeneratorError(detail="Directory already exists. Delete it or use the --overwrite option.")] self._create_package() self._build_metadata() self._build_models() @@ -109,19 +121,6 @@ def build(self) -> Sequence[GeneratorError]: self._run_post_hooks() return self._get_errors() - def update(self) -> Sequence[GeneratorError]: - """Update an existing project""" - - if not self.package_dir.is_dir(): - return [GeneratorError(detail=f"Directory {self.package_dir} not found")] - print(f"Updating {self.package_name}") - shutil.rmtree(self.package_dir) - self._create_package() - self._build_models() - self._build_api() - self._run_post_hooks() - return self._get_errors() - def _run_post_hooks(self) -> None: for command in self.config.post_hooks: self._run_command(command) @@ -137,7 +136,7 @@ def _run_command(self, cmd: str) -> None: ) return try: - cwd = self.package_dir if self.config.meta_type == MetaType.NONE else self.project_dir + cwd = self.project_dir subprocess.run(cmd, cwd=cwd, shell=True, capture_output=True, check=True) except CalledProcessError as err: self.errors.append( @@ -148,8 +147,8 @@ def _run_command(self, cmd: str) -> None: ) ) - def _get_errors(self) -> List[GeneratorError]: - errors: List[GeneratorError] = [] + def _get_errors(self) -> list[GeneratorError]: + errors: list[GeneratorError] = [] for collection in self.openapi.endpoint_collections_by_tag.values(): errors.extend(collection.parse_errors) errors.extend(self.openapi.errors) @@ -157,7 +156,8 @@ def _get_errors(self) -> List[GeneratorError]: return errors def _create_package(self) -> None: - self.package_dir.mkdir() + if self.package_dir != self.project_dir: + self.package_dir.mkdir(exist_ok=True) # Package __init__.py package_init = self.package_dir / "__init__.py" @@ -184,7 +184,7 @@ def _build_metadata(self) -> None: readme = self.project_dir / "README.md" readme_template = self.env.get_template("README.md.jinja") readme.write_text( - readme_template.render(poetry=self.config.meta_type == MetaType.POETRY), + readme_template.render(meta=self.config.meta_type), encoding=self.config.file_encoding, ) @@ -213,6 +213,7 @@ def _build_setup_py(self) -> None: def _build_models(self) -> None: # Generate models models_dir = self.package_dir / "models" + shutil.rmtree(models_dir, ignore_errors=True) models_dir.mkdir() models_init = models_dir / "__init__.py" imports = [] @@ -228,9 +229,12 @@ def _build_models(self) -> None: # Generate enums str_enum_template = self.env.get_template("str_enum.py.jinja") int_enum_template = self.env.get_template("int_enum.py.jinja") + literal_enum_template = self.env.get_template("literal_enum.py.jinja") for enum in self.openapi.enums: module_path = models_dir / f"{enum.class_info.module_name}.py" - if enum.value_type is int: + if isinstance(enum, LiteralEnumProperty): + module_path.write_text(literal_enum_template.render(enum=enum), encoding=self.config.file_encoding) + elif enum.value_type is int: module_path.write_text(int_enum_template.render(enum=enum), encoding=self.config.file_encoding) else: module_path.write_text(str_enum_template.render(enum=enum), encoding=self.config.file_encoding) @@ -255,6 +259,7 @@ def _build_api(self) -> None: # Generate endpoints api_dir = self.package_dir / "api" + shutil.rmtree(api_dir, ignore_errors=True) api_dir.mkdir() api_init_path = api_dir / "__init__.py" api_init_template = self.env.get_template("api_init.py.jinja") @@ -287,8 +292,8 @@ def _build_api(self) -> None: def _get_project_for_url_or_path( config: Config, - custom_template_path: Optional[Path] = None, -) -> Union[Project, GeneratorError]: + custom_template_path: Path | None = None, +) -> Project | GeneratorError: data_dict = _get_document(source=config.document_source, timeout=config.http_timeout) if isinstance(data_dict, GeneratorError): return data_dict @@ -302,10 +307,10 @@ def _get_project_for_url_or_path( ) -def create_new_client( +def generate( *, config: Config, - custom_template_path: Optional[Path] = None, + custom_template_path: Path | None = None, ) -> Sequence[GeneratorError]: """ Generate the client library @@ -322,27 +327,7 @@ def create_new_client( return project.build() -def update_existing_client( - *, - config: Config, - custom_template_path: Optional[Path] = None, -) -> Sequence[GeneratorError]: - """ - Update an existing client library - - Returns: - A list containing any errors encountered when generating. - """ - project = _get_project_for_url_or_path( - custom_template_path=custom_template_path, - config=config, - ) - if isinstance(project, GeneratorError): - return [project] - return project.update() - - -def _load_yaml_or_json(data: bytes, content_type: Optional[str]) -> Union[Dict[str, Any], GeneratorError]: +def _load_yaml_or_json(data: bytes, content_type: str | None) -> dict[str, Any] | GeneratorError: if content_type == "application/json": try: return json.loads(data.decode()) @@ -350,14 +335,15 @@ def _load_yaml_or_json(data: bytes, content_type: Optional[str]) -> Union[Dict[s return GeneratorError(header=f"Invalid JSON from provided source: {err}") else: try: - return yaml.safe_load(data) - except yaml.YAMLError as err: + yaml = YAML(typ="safe") + return yaml.load(data) + except YAMLError as err: return GeneratorError(header=f"Invalid YAML from provided source: {err}") -def _get_document(*, source: Union[str, Path], timeout: int) -> Union[Dict[str, Any], GeneratorError]: +def _get_document(*, source: str | Path, timeout: int) -> dict[str, Any] | GeneratorError: yaml_bytes: bytes - content_type: Optional[str] + content_type: str | None if isinstance(source, str): try: response = httpx.get(source, timeout=timeout) diff --git a/openapi_python_client/cli.py b/openapi_python_client/cli.py index 22478ab04..3972703ae 100644 --- a/openapi_python_client/cli.py +++ b/openapi_python_client/cli.py @@ -1,29 +1,34 @@ import codecs +from collections.abc import Sequence from pathlib import Path from pprint import pformat -from typing import Optional, Sequence, Union import typer -from openapi_python_client import MetaType +from openapi_python_client import MetaType, __version__ from openapi_python_client.config import Config, ConfigFile from openapi_python_client.parser.errors import ErrorLevel, GeneratorError, ParseError -app = typer.Typer() +app = typer.Typer(name="openapi-python-client") def _version_callback(value: bool) -> None: - from openapi_python_client import __version__ - if value: typer.echo(f"openapi-python-client version: {__version__}") raise typer.Exit() def _process_config( - *, url: Optional[str], path: Optional[Path], config_path: Optional[Path], meta_type: MetaType, file_encoding: str + *, + url: str | None, + path: Path | None, + config_path: Path | None, + meta_type: MetaType, + file_encoding: str, + overwrite: bool, + output_path: Path | None, ) -> Config: - source: Union[Path, str] + source: Path | str if url and not path: source = url elif path and not url: @@ -49,13 +54,13 @@ def _process_config( except Exception as err: raise typer.BadParameter("Unable to parse config") from err - return Config.from_sources(config_file, meta_type, source, file_encoding) + return Config.from_sources(config_file, meta_type, source, file_encoding, overwrite, output_path=output_path) # noinspection PyUnusedLocal -@app.callback(name="openapi-python-client") +@app.callback() def cli( version: bool = typer.Option(False, "--version", callback=_version_callback, help="Print the version and exit"), ) -> None: @@ -64,16 +69,16 @@ def cli( def _print_parser_error(err: GeneratorError, color: str) -> None: typer.secho(err.header, bold=True, fg=color, err=True) - typer.echo() + typer.echo(err=True) if err.detail: typer.secho(err.detail, fg=color, err=True) - typer.echo() + typer.echo(err=True) if isinstance(err, ParseError) and err.data is not None: formatted_data = pformat(err.data) typer.secho(formatted_data, fg=color, err=True) - typer.echo() + typer.echo(err=True) def handle_errors(errors: Sequence[GeneratorError], fail_on_warning: bool = False) -> None: @@ -117,62 +122,46 @@ def handle_errors(errors: Sequence[GeneratorError], fail_on_warning: bool = Fals raise typer.Exit(code=1) -custom_template_path_options = { - "help": "A path to a directory containing custom template(s)", - "file_okay": False, - "dir_okay": True, - "readable": True, - "resolve_path": True, -} - -_meta_option = typer.Option( - MetaType.POETRY, - help="The type of metadata you want to generate.", -) - -CONFIG_OPTION = typer.Option(None, "--config", help="Path to the config file to use") - - @app.command() def generate( - url: Optional[str] = typer.Option(None, help="A URL to read the JSON from"), - path: Optional[Path] = typer.Option(None, help="A path to the JSON file"), - custom_template_path: Optional[Path] = typer.Option(None, **custom_template_path_options), # type: ignore - meta: MetaType = _meta_option, + url: str | None = typer.Option(None, help="A URL to read the OpenAPI document from"), + path: Path | None = typer.Option(None, help="A path to the OpenAPI document"), + custom_template_path: Path | None = typer.Option( + None, + help="A path to a directory containing custom template(s)", + file_okay=False, + dir_okay=True, + readable=True, + resolve_path=True, + ), # type: ignore + meta: MetaType = typer.Option( + MetaType.POETRY, + help="The type of metadata you want to generate.", + ), file_encoding: str = typer.Option("utf-8", help="Encoding used when writing generated"), - config_path: Optional[Path] = CONFIG_OPTION, + config_path: Path | None = typer.Option(None, "--config", help="Path to the config file to use"), fail_on_warning: bool = False, + overwrite: bool = typer.Option(False, help="Overwrite the existing client if it exists"), + output_path: Path | None = typer.Option( + None, + help="Path to write the generated code to. " + "Defaults to the OpenAPI document title converted to kebab or snake case (depending on meta type). " + "Can also be overridden with `project_name_override` or `package_name_override` in config.", + ), ) -> None: """Generate a new OpenAPI Client library""" - from . import create_new_client - - config = _process_config(url=url, path=path, config_path=config_path, meta_type=meta, file_encoding=file_encoding) - errors = create_new_client( - custom_template_path=custom_template_path, - config=config, + from . import generate # noqa: PLC0415 + + config = _process_config( + url=url, + path=path, + config_path=config_path, + meta_type=meta, + file_encoding=file_encoding, + overwrite=overwrite, + output_path=output_path, ) - handle_errors(errors, fail_on_warning) - - -@app.command() -def update( - url: Optional[str] = typer.Option(None, help="A URL to read the JSON from"), - path: Optional[Path] = typer.Option(None, help="A path to the JSON file"), - custom_template_path: Optional[Path] = typer.Option(None, **custom_template_path_options), # type: ignore - meta: MetaType = _meta_option, - file_encoding: str = typer.Option("utf-8", help="Encoding used when writing generated"), - config_path: Optional[Path] = CONFIG_OPTION, - fail_on_warning: bool = False, -) -> None: - """Update an existing OpenAPI Client library - - The update command performs the same operations as generate except it does not overwrite specific metadata for the - generated client such as the README.md, .gitignore, and pyproject.toml. - """ - from . import update_existing_client - - config = _process_config(config_path=config_path, meta_type=meta, url=url, path=path, file_encoding=file_encoding) - errors = update_existing_client( + errors = generate( custom_template_path=custom_template_path, config=config, ) diff --git a/openapi_python_client/config.py b/openapi_python_client/config.py index 73aac11a7..0cba32599 100644 --- a/openapi_python_client/config.py +++ b/openapi_python_client/config.py @@ -2,11 +2,10 @@ import mimetypes from enum import Enum from pathlib import Path -from typing import Dict, List, Optional, Union -import yaml from attr import define from pydantic import BaseModel +from ruamel.yaml import YAML class ClassOverride(BaseModel): @@ -15,8 +14,8 @@ class ClassOverride(BaseModel): See https://github.com/openapi-generators/openapi-python-client#class_overrides """ - class_name: Optional[str] = None - module_name: Optional[str] = None + class_name: str | None = None + module_name: str | None = None class MetaType(str, Enum): @@ -26,6 +25,7 @@ class MetaType(str, Enum): POETRY = "poetry" SETUP = "setup" PDM = "pdm" + UV = "uv" class ConfigFile(BaseModel): @@ -34,14 +34,18 @@ class ConfigFile(BaseModel): See https://github.com/openapi-generators/openapi-python-client#configuration """ - class_overrides: Optional[Dict[str, ClassOverride]] = None - project_name_override: Optional[str] = None - package_name_override: Optional[str] = None - package_version_override: Optional[str] = None + class_overrides: dict[str, ClassOverride] | None = None + content_type_overrides: dict[str, str] | None = None + project_name_override: str | None = None + package_name_override: str | None = None + package_version_override: str | None = None use_path_prefixes_for_title_model_names: bool = True - post_hooks: Optional[List[str]] = None + post_hooks: list[str] | None = None + docstrings_on_attributes: bool = False field_prefix: str = "field_" + generate_all_tags: bool = False http_timeout: int = 5 + literal_enums: bool = False @staticmethod def load_from_path(path: Path) -> "ConfigFile": @@ -50,7 +54,8 @@ def load_from_path(path: Path) -> "ConfigFile": if mime == "application/json": config_data = json.loads(path.read_text()) else: - config_data = yaml.safe_load(path.read_text()) + yaml = YAML(typ="safe") + config_data = yaml.load(path) config = ConfigFile(**config_data) return config @@ -60,45 +65,62 @@ class Config: """Contains all the config values for the generator, from files, defaults, and CLI arguments.""" meta_type: MetaType - class_overrides: Dict[str, ClassOverride] - project_name_override: Optional[str] - package_name_override: Optional[str] - package_version_override: Optional[str] + class_overrides: dict[str, ClassOverride] + project_name_override: str | None + package_name_override: str | None + package_version_override: str | None use_path_prefixes_for_title_model_names: bool - post_hooks: List[str] + post_hooks: list[str] + docstrings_on_attributes: bool field_prefix: str + generate_all_tags: bool http_timeout: int - document_source: Union[Path, str] + literal_enums: bool + document_source: Path | str file_encoding: str + content_type_overrides: dict[str, str] + overwrite: bool + output_path: Path | None @staticmethod def from_sources( - config_file: ConfigFile, meta_type: MetaType, document_source: Union[Path, str], file_encoding: str + config_file: ConfigFile, + meta_type: MetaType, + document_source: Path | str, + file_encoding: str, + overwrite: bool, + output_path: Path | None, ) -> "Config": if config_file.post_hooks is not None: post_hooks = config_file.post_hooks elif meta_type == MetaType.NONE: post_hooks = [ - "ruff check . --fix --extend-select=I", + "ruff check . --fix-only --extend-select=I", "ruff format .", ] else: post_hooks = [ - "ruff check --fix .", + "ruff check --fix-only .", "ruff format .", ] config = Config( meta_type=meta_type, class_overrides=config_file.class_overrides or {}, + content_type_overrides=config_file.content_type_overrides or {}, project_name_override=config_file.project_name_override, package_name_override=config_file.package_name_override, package_version_override=config_file.package_version_override, use_path_prefixes_for_title_model_names=config_file.use_path_prefixes_for_title_model_names, post_hooks=post_hooks, + docstrings_on_attributes=config_file.docstrings_on_attributes, field_prefix=config_file.field_prefix, + generate_all_tags=config_file.generate_all_tags, http_timeout=config_file.http_timeout, + literal_enums=config_file.literal_enums, document_source=document_source, file_encoding=file_encoding, + overwrite=overwrite, + output_path=output_path, ) return config diff --git a/openapi_python_client/parser/bodies.py b/openapi_python_client/parser/bodies.py index 9ab42cb4f..643848cc1 100644 --- a/openapi_python_client/parser/bodies.py +++ b/openapi_python_client/parser/bodies.py @@ -1,5 +1,4 @@ import sys -from typing import List, Tuple, Union import attr @@ -9,6 +8,7 @@ Schemas, property_from_data, ) +from openapi_python_client.parser.properties.schemas import get_reference_simple_name from .. import schema as oai from ..config import Config @@ -44,24 +44,28 @@ def body_from_data( *, data: oai.Operation, schemas: Schemas, + request_bodies: dict[str, oai.RequestBody | oai.Reference], config: Config, endpoint_name: str, -) -> Tuple[List[Union[Body, ParseError]], Schemas]: +) -> tuple[list[Body | ParseError], Schemas]: """Adds form or JSON body to Endpoint if included in data""" - if data.request_body is None or isinstance(data.request_body, oai.Reference): + body = _resolve_reference(data.request_body, request_bodies) + if isinstance(body, ParseError): + return [body], schemas + if body is None: return [], schemas - bodies: List[Union[Body, ParseError]] = [] - body_content = data.request_body.content + bodies: list[Body | ParseError] = [] + body_content = body.content prefix_type_names = len(body_content) > 1 for content_type, media_type in body_content.items(): - simplified_content_type = get_content_type(content_type) + simplified_content_type = get_content_type(content_type, config) if simplified_content_type is None: bodies.append( ParseError( detail="Invalid content type", - data=data.request_body, + data=body, level=ErrorLevel.WARNING, ) ) @@ -71,7 +75,7 @@ def body_from_data( bodies.append( ParseError( detail="Missing schema", - data=data.request_body, + data=body, level=ErrorLevel.WARNING, ) ) @@ -88,14 +92,14 @@ def body_from_data( bodies.append( ParseError( detail=f"Unsupported content type {simplified_content_type}", - data=data.request_body, + data=body, level=ErrorLevel.WARNING, ) ) continue prop, schemas = property_from_data( name="body", - required=True, + required=body.required, data=media_type_schema, schemas=schemas, parent_name=f"{endpoint_name}_{body_type}" if prefix_type_names else endpoint_name, @@ -113,6 +117,7 @@ def body_from_data( **schemas.classes_by_name, prop.class_info.name: prop, }, + models_to_process=[*schemas.models_to_process, prop], ) bodies.append( Body( @@ -123,3 +128,19 @@ def body_from_data( ) return bodies, schemas + + +def _resolve_reference( + body: oai.RequestBody | oai.Reference | None, request_bodies: dict[str, oai.RequestBody | oai.Reference] +) -> oai.RequestBody | ParseError | None: + if body is None: + return None + references_seen = [] + while isinstance(body, oai.Reference) and body.ref not in references_seen: + references_seen.append(body.ref) + body = request_bodies.get(get_reference_simple_name(body.ref)) + if isinstance(body, oai.Reference): + return ParseError(detail="Circular $ref in request body", data=body) + if body is None and references_seen: + return ParseError(detail=f"Could not resolve $ref {references_seen[-1]} in request body") + return body diff --git a/openapi_python_client/parser/errors.py b/openapi_python_client/parser/errors.py index 76a795b24..00f522b9e 100644 --- a/openapi_python_client/parser/errors.py +++ b/openapi_python_client/parser/errors.py @@ -1,8 +1,7 @@ from dataclasses import dataclass from enum import Enum -from typing import Optional -__all__ = ["ErrorLevel", "GeneratorError", "ParseError", "PropertyError", "ParameterError"] +__all__ = ["ErrorLevel", "GeneratorError", "ParameterError", "ParseError", "PropertyError"] from pydantic import BaseModel @@ -18,7 +17,7 @@ class ErrorLevel(Enum): class GeneratorError: """Base data struct containing info on an error that occurred""" - detail: Optional[str] = None + detail: str | None = None level: ErrorLevel = ErrorLevel.ERROR header: str = "Unable to generate the client" @@ -28,7 +27,7 @@ class ParseError(GeneratorError): """An error raised when there's a problem parsing an OpenAPI document""" level: ErrorLevel = ErrorLevel.WARNING - data: Optional[BaseModel] = None + data: BaseModel | None = None header: str = "Unable to parse this part of your OpenAPI document: " diff --git a/openapi_python_client/parser/openapi.py b/openapi_python_client/parser/openapi.py index 015966224..4f83ae93e 100644 --- a/openapi_python_client/parser/openapi.py +++ b/openapi_python_client/parser/openapi.py @@ -1,8 +1,8 @@ import re +from collections.abc import Iterator from copy import deepcopy from dataclasses import dataclass, field -from http import HTTPStatus -from typing import Any, Dict, Iterator, List, Optional, Protocol, Set, Tuple, Union +from typing import Any, Protocol from pydantic import ValidationError @@ -15,6 +15,7 @@ from .properties import ( Class, EnumProperty, + LiteralEnumProperty, ModelProperty, Parameters, Property, @@ -24,7 +25,7 @@ property_from_data, ) from .properties.schemas import parameter_from_reference -from .responses import Response, response_from_data +from .responses import HTTPStatusPattern, Responses, response_from_data _PATH_PARAM_REGEX = re.compile("{([a-zA-Z_-][a-zA-Z0-9_-]*)}") @@ -39,36 +40,45 @@ class EndpointCollection: """A bunch of endpoints grouped under a tag that will become a module""" tag: str - endpoints: List["Endpoint"] = field(default_factory=list) - parse_errors: List[ParseError] = field(default_factory=list) + endpoints: list["Endpoint"] = field(default_factory=list) + parse_errors: list[ParseError] = field(default_factory=list) @staticmethod def from_data( *, - data: Dict[str, oai.PathItem], + data: dict[str, oai.PathItem], schemas: Schemas, parameters: Parameters, + request_bodies: dict[str, oai.RequestBody | oai.Reference], + responses: dict[str, oai.Response | oai.Reference], config: Config, - ) -> Tuple[Dict[utils.PythonIdentifier, "EndpointCollection"], Schemas, Parameters]: + ) -> tuple[dict[utils.PythonIdentifier, "EndpointCollection"], Schemas, Parameters]: """Parse the openapi paths data to get EndpointCollections by tag""" - endpoints_by_tag: Dict[utils.PythonIdentifier, EndpointCollection] = {} + endpoints_by_tag: dict[utils.PythonIdentifier, EndpointCollection] = {} methods = ["get", "put", "post", "delete", "options", "head", "patch", "trace"] for path, path_data in data.items(): for method in methods: - operation: Optional[oai.Operation] = getattr(path_data, method) + operation: oai.Operation | None = getattr(path_data, method) if operation is None: continue - tag = utils.PythonIdentifier(value=(operation.tags or ["default"])[0], prefix="tag") - collection = endpoints_by_tag.setdefault(tag, EndpointCollection(tag=tag)) + + tags = [utils.PythonIdentifier(value=tag, prefix="tag") for tag in operation.tags or ["default"]] + if not config.generate_all_tags: + tags = tags[:1] + + collections = [endpoints_by_tag.setdefault(tag, EndpointCollection(tag=tag)) for tag in tags] + endpoint, schemas, parameters = Endpoint.from_data( data=operation, path=path, method=method, - tag=tag, + tags=tags, schemas=schemas, parameters=parameters, + request_bodies=request_bodies, + responses=responses, config=config, ) # Add `PathItem` parameters @@ -83,15 +93,16 @@ def from_data( if not isinstance(endpoint, ParseError): endpoint = Endpoint.sort_parameters(endpoint=endpoint) if isinstance(endpoint, ParseError): - endpoint.header = ( - f"WARNING parsing {method.upper()} {path} within {tag}. Endpoint will not be generated." - ) - collection.parse_errors.append(endpoint) + endpoint.header = f"WARNING parsing {method.upper()} {path} within {'/'.join(tags)}. Endpoint will not be generated." + for collection in collections: + collection.parse_errors.append(endpoint) continue for error in endpoint.errors: - error.header = f"WARNING parsing {method.upper()} {path} within {tag}." - collection.parse_errors.append(error) - collection.endpoints.append(endpoint) + error.header = f"WARNING parsing {method.upper()} {path} within {'/'.join(tags)}." + for collection in collections: + collection.parse_errors.append(error) + for collection in collections: + collection.endpoints.append(endpoint) return endpoints_by_tag, schemas, parameters @@ -114,7 +125,7 @@ class RequestBodyParser(Protocol): def __call__( self, *, body: oai.RequestBody, schemas: Schemas, parent_name: str, config: Config - ) -> Tuple[Union[Property, PropertyError, None], Schemas]: ... # pragma: no cover + ) -> tuple[Property | PropertyError | None, Schemas]: ... # pragma: no cover @dataclass @@ -125,45 +136,41 @@ class Endpoint: path: str method: str - description: Optional[str] + description: str | None name: str requires_security: bool - tag: str - summary: Optional[str] = "" - relative_imports: Set[str] = field(default_factory=set) - query_parameters: List[Property] = field(default_factory=list) - path_parameters: List[Property] = field(default_factory=list) - header_parameters: List[Property] = field(default_factory=list) - cookie_parameters: List[Property] = field(default_factory=list) - responses: List[Response] = field(default_factory=list) - bodies: List[Body] = field(default_factory=list) - errors: List[ParseError] = field(default_factory=list) + tags: list[PythonIdentifier] + summary: str | None = "" + relative_imports: set[str] = field(default_factory=set) + query_parameters: list[Property] = field(default_factory=list) + path_parameters: list[Property] = field(default_factory=list) + header_parameters: list[Property] = field(default_factory=list) + cookie_parameters: list[Property] = field(default_factory=list) + responses: Responses = field(default_factory=lambda: Responses(patterns=[], default=None)) + bodies: list[Body] = field(default_factory=list) + errors: list[ParseError] = field(default_factory=list) @staticmethod def _add_responses( - *, endpoint: "Endpoint", data: oai.Responses, schemas: Schemas, config: Config - ) -> Tuple["Endpoint", Schemas]: + *, + endpoint: "Endpoint", + data: oai.Responses, + schemas: Schemas, + responses: dict[str, oai.Response | oai.Reference], + config: Config, + ) -> tuple["Endpoint", Schemas]: endpoint = deepcopy(endpoint) for code, response_data in data.items(): - status_code: HTTPStatus - try: - status_code = HTTPStatus(int(code)) - except ValueError: - endpoint.errors.append( - ParseError( - detail=( - f"Invalid response status code {code} (not a valid HTTP " - f"status code), response will be ommitted from generated " - f"client" - ) - ) - ) + status_code = HTTPStatusPattern.parse(code) + if isinstance(status_code, ParseError): + endpoint.errors.append(status_code) continue response, schemas = response_from_data( status_code=status_code, data=response_data, schemas=schemas, + responses=responses, parent_name=endpoint.name, config=config, ) @@ -172,8 +179,8 @@ def _add_responses( endpoint.errors.append( ParseError( detail=( - f"Cannot parse response for status code {status_code}{detail_suffix}, " - f"response will be ommitted from generated client" + f"Cannot parse response for status code {code}{detail_suffix}, " + f"response will be omitted from generated client" ), data=response.data, ) @@ -183,18 +190,22 @@ def _add_responses( # No reasons to use lazy imports in endpoints, so add lazy imports to relative here. endpoint.relative_imports |= response.prop.get_lazy_imports(prefix=models_relative_prefix) endpoint.relative_imports |= response.prop.get_imports(prefix=models_relative_prefix) - endpoint.responses.append(response) + if response.is_default(): + endpoint.responses.default = response + else: + endpoint.responses.patterns.append(response) + endpoint.responses.patterns.sort() return endpoint, schemas @staticmethod def add_parameters( *, endpoint: "Endpoint", - data: Union[oai.Operation, oai.PathItem], + data: oai.Operation | oai.PathItem, schemas: Schemas, parameters: Parameters, config: Config, - ) -> Tuple[Union["Endpoint", ParseError], Schemas, Parameters]: + ) -> tuple["Endpoint | ParseError", Schemas, Parameters]: """Process the defined `parameters` for an Endpoint. Any existing parameters will be ignored, so earlier instances of a parameter take precedence. PathItem @@ -223,8 +234,8 @@ def add_parameters( endpoint = deepcopy(endpoint) - unique_parameters: Set[Tuple[str, oai.ParameterLocation]] = set() - parameters_by_location: Dict[str, List[Property]] = { + unique_parameters: set[tuple[str, oai.ParameterLocation]] = set() + parameters_by_location: dict[str, list[Property]] = { oai.ParameterLocation.QUERY: endpoint.query_parameters, oai.ParameterLocation.PATH: endpoint.path_parameters, oai.ParameterLocation.HEADER: endpoint.header_parameters, @@ -301,8 +312,8 @@ def _check_parameters_for_conflicts( self, *, config: Config, - previously_modified_params: Optional[Set[Tuple[oai.ParameterLocation, str]]] = None, - ) -> Union["Endpoint", ParseError]: + previously_modified_params: set[tuple[oai.ParameterLocation, str]] | None = None, + ) -> "Endpoint | ParseError": """Check for conflicting parameters For parameters that have the same python_name but are in different locations, append the location to the @@ -313,7 +324,7 @@ def _check_parameters_for_conflicts( unique python_name. """ modified_params = previously_modified_params or set() - used_python_names: Dict[PythonIdentifier, Tuple[oai.ParameterLocation, Property]] = {} + used_python_names: dict[PythonIdentifier, tuple[oai.ParameterLocation, Property]] = {} reserved_names = ["client", "url"] for parameter in self.iter_all_parameters(): location, prop = parameter @@ -355,7 +366,7 @@ def _check_parameters_for_conflicts( return self @staticmethod - def sort_parameters(*, endpoint: "Endpoint") -> Union["Endpoint", ParseError]: + def sort_parameters(*, endpoint: "Endpoint") -> "Endpoint | ParseError": """ Sorts the path parameters of an `endpoint` so that they match the order declared in `endpoint.path`. @@ -389,11 +400,13 @@ def from_data( data: oai.Operation, path: str, method: str, - tag: str, + tags: list[PythonIdentifier], schemas: Schemas, parameters: Parameters, + request_bodies: dict[str, oai.RequestBody | oai.Reference], + responses: dict[str, oai.Response | oai.Reference], config: Config, - ) -> Tuple[Union["Endpoint", ParseError], Schemas, Parameters]: + ) -> tuple["Endpoint | ParseError", Schemas, Parameters]: """Construct an endpoint from the OpenAPI data""" if data.operationId is None: @@ -408,7 +421,7 @@ def from_data( description=utils.remove_string_escapes(data.description) if data.description else "", name=name, requires_security=bool(data.security), - tag=tag, + tags=tags, ) result, schemas, parameters = Endpoint.add_parameters( @@ -420,10 +433,18 @@ def from_data( ) if isinstance(result, ParseError): return result, schemas, parameters - result, schemas = Endpoint._add_responses(endpoint=result, data=data.responses, schemas=schemas, config=config) + result, schemas = Endpoint._add_responses( + endpoint=result, + data=data.responses, + schemas=schemas, + responses=responses, + config=config, + ) if isinstance(result, ParseError): return result, schemas, parameters - bodies, schemas = body_from_data(data=data, schemas=schemas, config=config, endpoint_name=result.name) + bodies, schemas = body_from_data( + data=data, schemas=schemas, config=config, endpoint_name=result.name, request_bodies=request_bodies + ) body_errors = [] for body in bodies: if isinstance(body, ParseError): @@ -448,22 +469,22 @@ def from_data( def response_type(self) -> str: """Get the Python type of any response from this endpoint""" - types = sorted({response.prop.get_type_string(quoted=False) for response in self.responses}) + types = sorted({response.prop.get_type_string() for response in self.responses}) if len(types) == 0: return "Any" if len(types) == 1: - return self.responses[0].prop.get_type_string(quoted=False) - return f"Union[{', '.join(types)}]" + return types[0] + return " | ".join(types) - def iter_all_parameters(self) -> Iterator[Tuple[oai.ParameterLocation, Property]]: + def iter_all_parameters(self) -> Iterator[tuple[oai.ParameterLocation, Property]]: """Iterate through all the parameters of this endpoint""" yield from ((oai.ParameterLocation.PATH, param) for param in self.path_parameters) yield from ((oai.ParameterLocation.QUERY, param) for param in self.query_parameters) yield from ((oai.ParameterLocation.HEADER, param) for param in self.header_parameters) yield from ((oai.ParameterLocation.COOKIE, param) for param in self.cookie_parameters) - def list_all_parameters(self) -> List[Property]: - """Return a List of all the parameters of this endpoint""" + def list_all_parameters(self) -> list[Property]: + """Return a list of all the parameters of this endpoint""" return ( self.path_parameters + self.query_parameters @@ -478,15 +499,15 @@ class GeneratorData: """All the data needed to generate a client""" title: str - description: Optional[str] + description: str | None version: str - models: Iterator[ModelProperty] - errors: List[ParseError] - endpoint_collections_by_tag: Dict[utils.PythonIdentifier, EndpointCollection] - enums: Iterator[EnumProperty] + models: list[ModelProperty] + errors: list[ParseError] + endpoint_collections_by_tag: dict[utils.PythonIdentifier, EndpointCollection] + enums: list[EnumProperty | LiteralEnumProperty] @staticmethod - def from_dict(data: Dict[str, Any], *, config: Config) -> Union["GeneratorData", GeneratorError]: + def from_dict(data: dict[str, Any], *, config: Config) -> "GeneratorData | GeneratorError": """Create an OpenAPI from dict""" try: openapi = oai.OpenAPI.model_validate(data) @@ -507,12 +528,21 @@ def from_dict(data: Dict[str, Any], *, config: Config) -> Union["GeneratorData", parameters=parameters, config=config, ) + request_bodies = (openapi.components and openapi.components.requestBodies) or {} + responses = (openapi.components and openapi.components.responses) or {} endpoint_collections_by_tag, schemas, parameters = EndpointCollection.from_data( - data=openapi.paths, schemas=schemas, parameters=parameters, config=config + data=openapi.paths, + schemas=schemas, + parameters=parameters, + request_bodies=request_bodies, + responses=responses, + config=config, ) - enums = (prop for prop in schemas.classes_by_name.values() if isinstance(prop, EnumProperty)) - models = (prop for prop in schemas.classes_by_name.values() if isinstance(prop, ModelProperty)) + enums = [ + prop for prop in schemas.classes_by_name.values() if isinstance(prop, EnumProperty | LiteralEnumProperty) + ] + models = [prop for prop in schemas.classes_by_name.values() if isinstance(prop, ModelProperty)] return GeneratorData( title=openapi.info.title, diff --git a/openapi_python_client/parser/properties/__init__.py b/openapi_python_client/parser/properties/__init__.py index e692ce5bb..ba81d27c6 100644 --- a/openapi_python_client/parser/properties/__init__.py +++ b/openapi_python_client/parser/properties/__init__.py @@ -4,16 +4,17 @@ "AnyProperty", "Class", "EnumProperty", + "LiteralEnumProperty", "ModelProperty", "Parameters", "Property", "Schemas", - "build_schemas", "build_parameters", + "build_schemas", "property_from_data", ] -from typing import Iterable +from collections.abc import Iterable from attrs import evolve @@ -30,6 +31,7 @@ from .float import FloatProperty from .int import IntProperty from .list_property import ListProperty +from .literal_enum_property import LiteralEnumProperty from .model_property import ModelProperty, process_model from .none import NoneProperty from .property import Property @@ -38,17 +40,19 @@ Parameters, ReferencePath, Schemas, + get_reference_simple_name, parse_reference_path, update_parameters_with_data, update_schemas_with_data, ) from .string import StringProperty from .union import UnionProperty +from .uuid import UuidProperty def _string_based_property( name: str, required: bool, data: oai.Schema, config: Config -) -> StringProperty | DateProperty | DateTimeProperty | FileProperty | PropertyError: +) -> StringProperty | DateProperty | DateTimeProperty | FileProperty | UuidProperty | PropertyError: """Construct a Property from the type "string" """ string_format = data.schema_format python_name = utils.PythonIdentifier(value=name, prefix=config.field_prefix) @@ -79,11 +83,19 @@ def _string_based_property( description=data.description, example=data.example, ) + if string_format == "uuid": + return UuidProperty.build( + name=name, + required=required, + default=data.default, + python_name=python_name, + description=data.description, + example=data.example, + ) return StringProperty.build( name=name, default=data.default, required=required, - pattern=data.pattern, python_name=python_name, description=data.description, example=data.example, @@ -126,7 +138,7 @@ def _property_from_ref( return prop, schemas -def property_from_data( # noqa: PLR0911 +def property_from_data( # noqa: PLR0911, PLR0912 name: str, required: bool, data: oai.Reference | oai.Schema, @@ -153,7 +165,7 @@ def property_from_data( # noqa: PLR0911 sub_data: list[oai.Schema | oai.Reference] = data.allOf + data.anyOf + data.oneOf # A union of a single reference should just be passed through to that reference (don't create copy class) if len(sub_data) == 1 and isinstance(sub_data[0], oai.Reference): - return _property_from_ref( + prop, schemas = _property_from_ref( name=name, required=required, parent=data, @@ -162,6 +174,16 @@ def property_from_data( # noqa: PLR0911 config=config, roots=roots, ) + # We won't be generating a separate Python class for this schema - references to it will just use + # the class for the schema it's referencing - so we don't add it to classes_by_name; but we do + # add it to models_to_process, if it's a model, because its properties still need to be resolved. + if isinstance(prop, ModelProperty): + schemas = evolve( + schemas, + models_to_process=[*schemas.models_to_process, prop], + ) + return prop, schemas + if data.type == oai.DataType.BOOLEAN: return ( BooleanProperty.build( @@ -175,6 +197,15 @@ def property_from_data( # noqa: PLR0911 schemas, ) if data.enum: + if config.literal_enums: + return LiteralEnumProperty.build( + data=data, + name=name, + required=required, + schemas=schemas, + parent_name=parent_name, + config=config, + ) return EnumProperty.build( data=data, name=name, @@ -271,7 +302,7 @@ def property_from_data( # noqa: PLR0911 AnyProperty.build( name=name, required=required, - default=None, + default=data.default, python_name=utils.PythonIdentifier(value=name, prefix=config.field_prefix), description=data.description, example=data.example, @@ -294,17 +325,30 @@ def _create_schemas( while still_making_progress: still_making_progress = False errors = [] - next_round = [] + next_round: list[tuple[str, oai.Reference | oai.Schema]] = [] # Only accumulate errors from the last round, since we might fix some along the way for name, data in to_process: - if isinstance(data, oai.Reference): - schemas.errors.append(PropertyError(data=data, detail="Reference schemas are not supported.")) - continue + schema_data: oai.Reference | oai.Schema | None = data ref_path = parse_reference_path(f"#/components/schemas/{name}") if isinstance(ref_path, ParseError): schemas.errors.append(PropertyError(detail=ref_path.detail, data=data)) continue - schemas_or_err = update_schemas_with_data(ref_path=ref_path, data=data, schemas=schemas, config=config) + if isinstance(data, oai.Reference): + # Fully dereference reference schemas + seen = [name] + while isinstance(schema_data, oai.Reference): + data_ref_schema = get_reference_simple_name(schema_data.ref) + if data_ref_schema in seen: + schemas.errors.append(PropertyError(detail="Circular schema references found", data=data)) + break + # use derefenced schema definition for this schema + schema_data = components.get(data_ref_schema) + if isinstance(schema_data, oai.Schema): + schemas_or_err = update_schemas_with_data( + ref_path=ref_path, data=schema_data, schemas=schemas, config=config + ) + else: + schemas.errors.append(PropertyError(detail="Referent schema not found", data=data)) if isinstance(schemas_or_err, PropertyError): next_round.append((name, data)) errors.append(schemas_or_err) @@ -341,7 +385,7 @@ def _process_model_errors( def _process_models(*, schemas: Schemas, config: Config) -> Schemas: - to_process = (prop for prop in schemas.classes_by_name.values() if isinstance(prop, ModelProperty)) + to_process = schemas.models_to_process still_making_progress = True final_model_errors: list[tuple[ModelProperty, PropertyError]] = [] latest_model_errors: list[tuple[ModelProperty, PropertyError]] = [] @@ -368,12 +412,11 @@ def _process_models(*, schemas: Schemas, config: Config) -> Schemas: continue schemas = schemas_or_err still_making_progress = True - to_process = (prop for prop in next_round) + to_process = next_round final_model_errors.extend(latest_model_errors) errors = _process_model_errors(final_model_errors, schemas=schemas) - schemas.errors.extend(errors) - return schemas + return evolve(schemas, errors=[*schemas.errors, *errors], models_to_process=to_process) def build_schemas( diff --git a/openapi_python_client/parser/properties/any.py b/openapi_python_client/parser/properties/any.py index fdeef93a1..65fcf40c4 100644 --- a/openapi_python_client/parser/properties/any.py +++ b/openapi_python_client/parser/properties/any.py @@ -33,9 +33,13 @@ def build( @classmethod def convert_value(cls, value: Any) -> Value | None: - if value is None or isinstance(value, Value): + from .string import StringProperty # noqa: PLC0415 + + if value is None: return value - return Value(str(value)) + if isinstance(value, str): + return StringProperty.convert_value(value) + return Value(python_code=str(value), raw_value=value) name: str required: bool diff --git a/openapi_python_client/parser/properties/boolean.py b/openapi_python_client/parser/properties/boolean.py index e6bb883a8..5fd4235d7 100644 --- a/openapi_python_client/parser/properties/boolean.py +++ b/openapi_python_client/parser/properties/boolean.py @@ -59,9 +59,9 @@ def convert_value(cls, value: Any) -> Value | None | PropertyError: return value if isinstance(value, str): if value.lower() == "true": - return Value("True") + return Value(python_code="True", raw_value=value) elif value.lower() == "false": - return Value("False") + return Value(python_code="False", raw_value=value) if isinstance(value, bool): - return Value(str(value)) + return Value(python_code=str(value), raw_value=value) return PropertyError(f"Invalid boolean value: {value}") diff --git a/openapi_python_client/parser/properties/const.py b/openapi_python_client/parser/properties/const.py index 88a398893..216941eb8 100644 --- a/openapi_python_client/parser/properties/const.py +++ b/openapi_python_client/parser/properties/const.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, overload +from typing import Any, ClassVar, overload from attr import define @@ -12,7 +12,7 @@ @define class ConstProperty(PropertyProtocol): - """A property representing a Union (anyOf) of other properties""" + """A property representing a const value""" name: str required: bool @@ -21,12 +21,13 @@ class ConstProperty(PropertyProtocol): python_name: PythonIdentifier description: str | None example: None + template: ClassVar[str] = "const_property.py.jinja" @classmethod def build( cls, *, - const: str | int, + const: str | int | float | bool, default: Any, name: str, python_name: PythonIdentifier, @@ -62,13 +63,13 @@ def build( return prop def convert_value(self, value: Any) -> Value | None | PropertyError: - if isinstance(value, Value): - return value value = self._convert_value(value) if value is None: return value if value != self.value: - return PropertyError(detail=f"Invalid value for const {self.name}; {value} != {self.value}") + return PropertyError( + detail=f"Invalid value for const {self.name}; {value.raw_value} != {self.value.raw_value}" + ) return value @staticmethod @@ -84,23 +85,18 @@ def _convert_value(value: Any) -> Value: ... # pragma: no cover def _convert_value(value: Any) -> Value | None: if value is None or isinstance(value, Value): return value - if isinstance(value, Value): - return value # pragma: no cover if isinstance(value, str): return StringProperty.convert_value(value) - return Value(str(value)) + return Value(python_code=str(value), raw_value=value) def get_type_string( self, no_optional: bool = False, json: bool = False, - *, - multipart: bool = False, - quoted: bool = False, ) -> str: - lit = f"Literal[{self.value}]" + lit = f"Literal[{self.value.python_code}]" if not no_optional and not self.required: - return f"Union[{lit}, Unset]" + return f"{lit} | Unset" return lit def get_imports(self, *, prefix: str) -> set[str]: @@ -112,8 +108,8 @@ def get_imports(self, *, prefix: str) -> set[str]: back to the root of the generated client. """ if self.required: - return {"from typing import Literal"} + return {"from typing import Literal, cast"} return { - "from typing import Literal, Union", + "from typing import Literal, cast", f"from {prefix}types import UNSET, Unset", } diff --git a/openapi_python_client/parser/properties/date.py b/openapi_python_client/parser/properties/date.py index 24e0c34fe..7261698ea 100644 --- a/openapi_python_client/parser/properties/date.py +++ b/openapi_python_client/parser/properties/date.py @@ -57,7 +57,7 @@ def convert_value(cls, value: Any) -> Value | None | PropertyError: isoparse(value).date() # make sure it's a valid value except ValueError as e: return PropertyError(f"Invalid date: {e}") - return Value(f"isoparse({value!r}).date()") + return Value(python_code=f"isoparse({value!r}).date()", raw_value=value) return PropertyError(f"Cannot convert {value} to a date") def get_imports(self, *, prefix: str) -> set[str]: diff --git a/openapi_python_client/parser/properties/datetime.py b/openapi_python_client/parser/properties/datetime.py index abb28ac22..5924d173c 100644 --- a/openapi_python_client/parser/properties/datetime.py +++ b/openapi_python_client/parser/properties/datetime.py @@ -59,7 +59,7 @@ def convert_value(cls, value: Any) -> Value | None | PropertyError: isoparse(value) # make sure it's a valid value except ValueError as e: return PropertyError(f"Invalid datetime: {e}") - return Value(f"isoparse({value!r})") + return Value(python_code=f"isoparse({value!r})", raw_value=value) return PropertyError(f"Cannot convert {value} to a datetime") def get_imports(self, *, prefix: str) -> set[str]: diff --git a/openapi_python_client/parser/properties/enum_property.py b/openapi_python_client/parser/properties/enum_property.py index 0f0db0d61..725aaceba 100644 --- a/openapi_python_client/parser/properties/enum_property.py +++ b/openapi_python_client/parser/properties/enum_property.py @@ -1,8 +1,8 @@ from __future__ import annotations -__all__ = ["EnumProperty"] +__all__ = ["EnumProperty", "ValueType"] -from typing import Any, ClassVar, List, Union, cast +from typing import Any, ClassVar, cast from attr import evolve from attrs import define @@ -16,7 +16,7 @@ from .schemas import Class, Schemas from .union import UnionProperty -ValueType = Union[str, int] +ValueType = str | int @define @@ -98,9 +98,7 @@ def build( # noqa: PLR0911 value_type = next(iter(value_types)) if value_type not in (str, int): return PropertyError(header=f"Unsupported enum type {value_type}", data=data), schemas - value_list = cast( - Union[List[int], List[str]], unchecked_value_list - ) # We checked this with all the value_types stuff + value_list = cast(list[int] | list[str], unchecked_value_list) # We checked this with all the value_types stuff if len(value_list) < len(enum): # Only one of the values was None, that becomes a union data.oneOf = [ @@ -121,7 +119,8 @@ def build( # noqa: PLR0911 if parent_name: class_name = f"{utils.pascal_case(parent_name)}{utils.pascal_case(class_name)}" class_info = Class.from_string(string=class_name, config=config) - values = EnumProperty.values_from_list(value_list) + var_names = data.model_extra.get("x-enum-varnames", []) if data.model_extra else [] + values = EnumProperty.values_from_list(value_list, class_info, var_names) if class_info.name in schemas.classes_by_name: existing = schemas.classes_by_name[class_info.name] @@ -159,15 +158,15 @@ def convert_value(self, value: Any) -> Value | PropertyError | None: if isinstance(value, self.value_type): inverse_values = {v: k for k, v in self.values.items()} try: - return Value(f"{self.class_info.name}.{inverse_values[value]}") + return Value(python_code=f"{self.class_info.name}.{inverse_values[value]}", raw_value=value) except KeyError: return PropertyError(detail=f"Value {value} is not valid for enum {self.name}") return PropertyError(detail=f"Cannot convert {value} to enum {self.name} of type {self.value_type}") - def get_base_type_string(self, *, quoted: bool = False) -> str: + def get_base_type_string(self) -> str: return self.class_info.name - def get_base_json_type_string(self, *, quoted: bool = False) -> str: + def get_base_json_type_string(self) -> str: return self.value_type.__name__ def get_imports(self, *, prefix: str) -> set[str]: @@ -183,14 +182,21 @@ def get_imports(self, *, prefix: str) -> set[str]: return imports @staticmethod - def values_from_list(values: list[str] | list[int]) -> dict[str, ValueType]: + def values_from_list( + values: list[str] | list[int], class_info: Class, var_names: list[str] + ) -> dict[str, ValueType]: """Convert a list of values into dict of {name: value}, where value can sometimes be None""" output: dict[str, ValueType] = {} + use_var_names = len(var_names) == len(values) for i, value in enumerate(values): - value = cast(Union[str, int], value) + value = cast(str | int, value) if isinstance(value, int): - if value < 0: + if use_var_names: + key = var_names[i] + sanitized_key = utils.snake_case(key).upper() + output[sanitized_key] = value + elif value < 0: output[f"VALUE_NEGATIVE_{-value}"] = value else: output[f"VALUE_{value}"] = value @@ -200,7 +206,10 @@ def values_from_list(values: list[str] | list[int]) -> dict[str, ValueType]: else: key = f"VALUE_{i}" if key in output: - raise ValueError(f"Duplicate key {key} in Enum") + raise ValueError( + f"Duplicate key {key} in enum {class_info.module_name}.{class_info.name}; " + f"consider setting literal_enums in your config" + ) sanitized_key = utils.snake_case(key).upper() output[sanitized_key] = utils.remove_string_escapes(value) return output diff --git a/openapi_python_client/parser/properties/file.py b/openapi_python_client/parser/properties/file.py index 505876b63..90bbf6aec 100644 --- a/openapi_python_client/parser/properties/file.py +++ b/openapi_python_client/parser/properties/file.py @@ -22,7 +22,7 @@ class FileProperty(PropertyProtocol): _type_string: ClassVar[str] = "File" # Return type of File.to_tuple() - _json_type_string: ClassVar[str] = "FileJsonType" + _json_type_string: ClassVar[str] = "FileTypes" template: ClassVar[str] = "file_property.py.jinja" @classmethod @@ -63,5 +63,5 @@ def get_imports(self, *, prefix: str) -> set[str]: back to the root of the generated client. """ imports = super().get_imports(prefix=prefix) - imports.update({f"from {prefix}types import File, FileJsonType", "from io import BytesIO"}) + imports.update({f"from {prefix}types import File, FileTypes", "from io import BytesIO"}) return imports diff --git a/openapi_python_client/parser/properties/float.py b/openapi_python_client/parser/properties/float.py index d8f469c69..a785db6d4 100644 --- a/openapi_python_client/parser/properties/float.py +++ b/openapi_python_client/parser/properties/float.py @@ -61,11 +61,11 @@ def convert_value(cls, value: Any) -> Value | None | PropertyError: if isinstance(value, str): try: parsed = float(value) - return Value(str(parsed)) + return Value(python_code=str(parsed), raw_value=value) except ValueError: return PropertyError(f"Invalid float value: {value}") if isinstance(value, float): - return Value(str(value)) + return Value(python_code=str(value), raw_value=value) if isinstance(value, int) and not isinstance(value, bool): - return Value(str(float(value))) + return Value(python_code=str(float(value)), raw_value=value) return PropertyError(f"Cannot convert {value} to a float") diff --git a/openapi_python_client/parser/properties/int.py b/openapi_python_client/parser/properties/int.py index ab7173d3d..1cd340fbd 100644 --- a/openapi_python_client/parser/properties/int.py +++ b/openapi_python_client/parser/properties/int.py @@ -58,12 +58,16 @@ def build( def convert_value(cls, value: Any) -> Value | None | PropertyError: if value is None or isinstance(value, Value): return value - if isinstance(value, str): + converted = value + if isinstance(converted, str): try: - int(value) + converted = float(converted) except ValueError: - return PropertyError(f"Invalid int value: {value}") - return Value(value) - if isinstance(value, int) and not isinstance(value, bool): - return Value(str(value)) + return PropertyError(f"Invalid int value: {converted}") + if isinstance(converted, float): + as_int = int(converted) + if converted == as_int: + converted = as_int + if isinstance(converted, int) and not isinstance(converted, bool): + return Value(python_code=str(converted), raw_value=value) return PropertyError(f"Invalid int value: {value}") diff --git a/openapi_python_client/parser/properties/list_property.py b/openapi_python_client/parser/properties/list_property.py index c78e50513..26ab0fdab 100644 --- a/openapi_python_client/parser/properties/list_property.py +++ b/openapi_python_client/parser/properties/list_property.py @@ -56,14 +56,30 @@ def build( `(result, schemas)` where `schemas` is an updated version of the input named the same including any inner classes that were defined and `result` is either the `ListProperty` or a `PropertyError`. """ - from . import property_from_data + from . import property_from_data # noqa: PLC0415 + + if data.items is None and not data.prefixItems: + return ( + PropertyError( + data=data, + detail="type array must have items or prefixItems defined", + ), + schemas, + ) + + items = data.prefixItems or [] + if data.items: + items.append(data.items) + + if len(items) == 1: + inner_schema = items[0] + else: + inner_schema = oai.Schema(anyOf=items) - if data.items is None: - return PropertyError(data=data, detail="type array must have items defined"), schemas inner_prop, schemas = property_from_data( name=f"{name}_item", required=True, - data=data.items, + data=inner_schema, schemas=schemas, parent_name=parent_name, config=config, @@ -89,11 +105,11 @@ def build( def convert_value(self, value: Any) -> Value | None | PropertyError: return None # pragma: no cover - def get_base_type_string(self, *, quoted: bool = False) -> str: - return f"List[{self.inner_property.get_type_string(quoted=not self.inner_property.is_base_type)}]" + def get_base_type_string(self) -> str: + return f"list[{self.inner_property.get_type_string()}]" - def get_base_json_type_string(self, *, quoted: bool = False) -> str: - return f"List[{self.inner_property.get_type_string(json=True, quoted=not self.inner_property.is_base_type)}]" + def get_base_json_type_string(self) -> str: + return f"list[{self.inner_property.get_type_string(json=True)}]" def get_instance_type_string(self) -> str: """Get a string representation of runtime type that should be used for `isinstance` checks""" @@ -109,7 +125,7 @@ def get_imports(self, *, prefix: str) -> set[str]: """ imports = super().get_imports(prefix=prefix) imports.update(self.inner_property.get_imports(prefix=prefix)) - imports.add("from typing import cast, List") + imports.add("from typing import cast") return imports def get_lazy_imports(self, *, prefix: str) -> set[str]: @@ -121,9 +137,6 @@ def get_type_string( self, no_optional: bool = False, json: bool = False, - *, - multipart: bool = False, - quoted: bool = False, ) -> str: """ Get a string representation of type that should be used when declaring this property @@ -134,11 +147,9 @@ def get_type_string( """ if json: type_string = self.get_base_json_type_string() - elif multipart: - type_string = "Tuple[None, bytes, str]" else: type_string = self.get_base_type_string() if no_optional or self.required: return type_string - return f"Union[Unset, {type_string}]" + return f"{type_string} | Unset" diff --git a/openapi_python_client/parser/properties/literal_enum_property.py b/openapi_python_client/parser/properties/literal_enum_property.py new file mode 100644 index 000000000..f0e0306a2 --- /dev/null +++ b/openapi_python_client/parser/properties/literal_enum_property.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +__all__ = ["LiteralEnumProperty"] + +from typing import Any, ClassVar, cast + +from attr import evolve +from attrs import define + +from ... import Config, utils +from ... import schema as oai +from ...schema import DataType +from ..errors import PropertyError +from .none import NoneProperty +from .protocol import PropertyProtocol, Value +from .schemas import Class, Schemas +from .union import UnionProperty + +ValueType = str | int + + +@define +class LiteralEnumProperty(PropertyProtocol): + """A property that should use a literal enum""" + + name: str + required: bool + default: Value | None + python_name: utils.PythonIdentifier + description: str | None + example: str | None + values: set[ValueType] + class_info: Class + value_type: type[ValueType] + + template: ClassVar[str] = "literal_enum_property.py.jinja" + + _allowed_locations: ClassVar[set[oai.ParameterLocation]] = { + oai.ParameterLocation.QUERY, + oai.ParameterLocation.PATH, + oai.ParameterLocation.COOKIE, + oai.ParameterLocation.HEADER, + } + + @classmethod + def build( # noqa: PLR0911 + cls, + *, + data: oai.Schema, + name: str, + required: bool, + schemas: Schemas, + parent_name: str, + config: Config, + ) -> tuple[LiteralEnumProperty | NoneProperty | UnionProperty | PropertyError, Schemas]: + """ + Create a LiteralEnumProperty from schema data. + + Args: + data: The OpenAPI Schema which defines this enum. + name: The name to use for variables which receive this Enum's value (e.g. model property name) + required: Whether or not this Property is required in the calling context + schemas: The Schemas which have been defined so far (used to prevent naming collisions) + parent_name: The context in which this LiteralEnumProperty is defined, used to create more specific class names. + config: The global config for this run of the generator + + Returns: + A tuple containing either the created property or a PropertyError AND update schemas. + """ + + enum = data.enum or [] # The outer function checks for this, but mypy doesn't know that + + # OpenAPI allows for null as an enum value, but it doesn't make sense with how enums are constructed in Python. + # So instead, if null is a possible value, make the property nullable. + # Mypy is not smart enough to know that the type is right though + unchecked_value_list = [value for value in enum if value is not None] # type: ignore + + # It's legal to have an enum that only contains null as a value, we don't bother constructing an enum for that + if len(unchecked_value_list) == 0: + return ( + NoneProperty.build( + name=name, + required=required, + default="None", + python_name=utils.PythonIdentifier(value=name, prefix=config.field_prefix), + description=None, + example=None, + ), + schemas, + ) + + value_types = {type(value) for value in unchecked_value_list} + if len(value_types) > 1: + return PropertyError( + header="Enum values must all be the same type", detail=f"Got {value_types}", data=data + ), schemas + value_type = next(iter(value_types)) + if value_type not in (str, int): + return PropertyError(header=f"Unsupported enum type {value_type}", data=data), schemas + value_list = cast(list[int] | list[str], unchecked_value_list) # We checked this with all the value_types stuff + + if len(value_list) < len(enum): # Only one of the values was None, that becomes a union + data.oneOf = [ + oai.Schema(type=DataType.NULL), + data.model_copy(update={"enum": value_list, "default": data.default}), + ] + data.enum = None + return UnionProperty.build( + data=data, + name=name, + required=required, + schemas=schemas, + parent_name=parent_name, + config=config, + ) + + class_name = data.title or name + if parent_name: + class_name = f"{utils.pascal_case(parent_name)}{utils.pascal_case(class_name)}" + class_info = Class.from_string(string=class_name, config=config) + values: set[str | int] = set(value_list) + + if class_info.name in schemas.classes_by_name: + existing = schemas.classes_by_name[class_info.name] + if not isinstance(existing, LiteralEnumProperty) or values != existing.values: + return ( + PropertyError( + detail=f"Found conflicting enums named {class_info.name} with incompatible values.", data=data + ), + schemas, + ) + + prop = LiteralEnumProperty( + name=name, + required=required, + class_info=class_info, + values=values, + value_type=value_type, + default=None, + python_name=utils.PythonIdentifier(value=name, prefix=config.field_prefix), + description=data.description, + example=data.example, + ) + checked_default = prop.convert_value(data.default) + if isinstance(checked_default, PropertyError): + checked_default.data = data + return checked_default, schemas + prop = evolve(prop, default=checked_default) + + schemas = evolve(schemas, classes_by_name={**schemas.classes_by_name, class_info.name: prop}) + return prop, schemas + + def convert_value(self, value: Any) -> Value | PropertyError | None: + if value is None or isinstance(value, Value): + return value + if isinstance(value, self.value_type): + if value in self.values: + return Value(python_code=repr(value), raw_value=value) + else: + return PropertyError(detail=f"Value {value} is not valid for enum {self.name}") + return PropertyError(detail=f"Cannot convert {value} to enum {self.name} of type {self.value_type}") + + def get_base_type_string(self) -> str: + return self.class_info.name + + def get_base_json_type_string(self) -> str: + return self.value_type.__name__ + + def get_instance_type_string(self) -> str: + return self.value_type.__name__ + + def get_imports(self, *, prefix: str) -> set[str]: + """ + Get a set of import strings that should be included when this property is used somewhere + + Args: + prefix: A prefix to put before any relative (local) module names. This should be the number of . to get + back to the root of the generated client. + """ + imports = super().get_imports(prefix=prefix) + imports.add("from typing import cast") + imports.add(f"from {prefix}models.{self.class_info.module_name} import {self.class_info.name}") + imports.add( + f"from {prefix}models.{self.class_info.module_name} import check_{self.get_class_name_snake_case()}" + ) + return imports + + def get_class_name_snake_case(self) -> str: + return utils.snake_case(self.class_info.name) diff --git a/openapi_python_client/parser/properties/merge_properties.py b/openapi_python_client/parser/properties/merge_properties.py new file mode 100644 index 000000000..57b4bccbe --- /dev/null +++ b/openapi_python_client/parser/properties/merge_properties.py @@ -0,0 +1,198 @@ +from __future__ import annotations + +from openapi_python_client.parser.properties.date import DateProperty +from openapi_python_client.parser.properties.datetime import DateTimeProperty +from openapi_python_client.parser.properties.file import FileProperty +from openapi_python_client.parser.properties.literal_enum_property import LiteralEnumProperty + +__all__ = ["merge_properties"] + +from typing import TypeVar, cast + +from attr import evolve + +from ..errors import PropertyError +from . import FloatProperty +from .any import AnyProperty +from .enum_property import EnumProperty +from .int import IntProperty +from .list_property import ListProperty +from .property import Property +from .protocol import PropertyProtocol +from .string import StringProperty + +PropertyT = TypeVar("PropertyT", bound=PropertyProtocol) + + +STRING_WITH_FORMAT_TYPES = (DateProperty, DateTimeProperty, FileProperty) + + +def merge_properties(prop1: Property, prop2: Property) -> Property | PropertyError: # noqa: PLR0911 + """Attempt to create a new property that incorporates the behavior of both. + + This is used when merging schemas with allOf, when two schemas define a property with the same name. + + OpenAPI defines allOf in terms of validation behavior: the input must pass the validation rules + defined in all the listed schemas. Our task here is slightly more difficult, since we must end + up with a single Property object that will be used to generate a single class property in the + generated code. Due to limitations of our internal model, this may not be possible for some + combinations of property attributes that OpenAPI supports (for instance, we have no way to represent + a string property that must match two different regexes). + + Properties can also have attributes that do not represent validation rules, such as "description" + and "example". OpenAPI does not define any overriding/aggregation rules for these in allOf. The + implementation here is, assuming prop1 and prop2 are in the same order that the schemas were in the + allOf, any such attributes that prop2 specifies will override the ones from prop1. + """ + if isinstance(prop2, AnyProperty): + return _merge_common_attributes(prop1, prop2) + + if isinstance(prop1, AnyProperty): + # Use the base type of `prop2`, but keep the override order + return _merge_common_attributes(prop2, prop1, prop2) + + if isinstance(prop1, EnumProperty) or isinstance(prop2, EnumProperty): + return _merge_with_enum(prop1, prop2) + + if isinstance(prop1, LiteralEnumProperty) or isinstance(prop2, LiteralEnumProperty): + return _merge_with_literal_enum(prop1, prop2) + + if (merged := _merge_same_type(prop1, prop2)) is not None: + return merged + + if (merged := _merge_numeric(prop1, prop2)) is not None: + return merged + + if (merged := _merge_string_with_format(prop1, prop2)) is not None: + return merged + + return PropertyError( + detail=f"{prop1.get_type_string(no_optional=True)} can't be merged with {prop2.get_type_string(no_optional=True)}" + ) + + +def _merge_same_type(prop1: Property, prop2: Property) -> Property | None | PropertyError: + if type(prop1) is not type(prop2): + return None + + if prop1 == prop2: + # It's always OK to redefine a property with everything exactly the same + return prop1 + + if isinstance(prop1, ListProperty) and isinstance(prop2, ListProperty): + inner_property = merge_properties(prop1.inner_property, prop2.inner_property) # type: ignore + if isinstance(inner_property, PropertyError): + return PropertyError(detail=f"can't merge list properties: {inner_property.detail}") + prop1.inner_property = inner_property + + # For all other property types, there aren't any special attributes that affect validation, so just + # apply the rules for common attributes like "description". + return _merge_common_attributes(prop1, prop2) + + +def _merge_string_with_format(prop1: Property, prop2: Property) -> Property | None | PropertyError: + """Merge a string that has no format with a string that has a format""" + # Here we need to use the DateProperty/DateTimeProperty/FileProperty as the base so that we preserve + # its class, but keep the correct override order for merging the attributes. + if isinstance(prop1, StringProperty) and isinstance(prop2, STRING_WITH_FORMAT_TYPES): + # Use the more specific class as a base, but keep the correct override order + return _merge_common_attributes(prop2, prop1, prop2) + elif isinstance(prop2, StringProperty) and isinstance(prop1, STRING_WITH_FORMAT_TYPES): + return _merge_common_attributes(prop1, prop2) + else: + return None + + +def _merge_numeric(prop1: Property, prop2: Property) -> IntProperty | None | PropertyError: + """Merge IntProperty with FloatProperty""" + if isinstance(prop1, IntProperty) and isinstance(prop2, IntProperty | FloatProperty): + return _merge_common_attributes(prop1, prop2) + elif isinstance(prop2, IntProperty) and isinstance(prop1, IntProperty | FloatProperty): + # Use the IntProperty as a base since it's more restrictive, but keep the correct override order + return _merge_common_attributes(prop2, prop1, prop2) + else: + return None + + +def _merge_with_enum(prop1: PropertyProtocol, prop2: PropertyProtocol) -> EnumProperty | PropertyError: + if isinstance(prop1, EnumProperty) and isinstance(prop2, EnumProperty): + # We want the narrowest validation rules that fit both, so use whichever values list is a + # subset of the other. + if _values_are_subset(prop1, prop2): + values = prop1.values + class_info = prop1.class_info + elif _values_are_subset(prop2, prop1): + values = prop2.values + class_info = prop2.class_info + else: + return PropertyError(detail="can't redefine an enum property with incompatible lists of values") + return _merge_common_attributes(evolve(prop1, values=values, class_info=class_info), prop2) + + # If enum values were specified for just one of the properties, use those. + enum_prop = prop1 if isinstance(prop1, EnumProperty) else cast(EnumProperty, prop2) + non_enum_prop = prop2 if isinstance(prop1, EnumProperty) else prop1 + if (isinstance(non_enum_prop, IntProperty) and enum_prop.value_type is int) or ( + isinstance(non_enum_prop, StringProperty) and enum_prop.value_type is str + ): + return _merge_common_attributes(enum_prop, prop1, prop2) + return PropertyError( + detail=f"can't combine enum of type {enum_prop.value_type} with {non_enum_prop.get_type_string(no_optional=True)}" + ) + + +def _merge_with_literal_enum(prop1: PropertyProtocol, prop2: PropertyProtocol) -> LiteralEnumProperty | PropertyError: + if isinstance(prop1, LiteralEnumProperty) and isinstance(prop2, LiteralEnumProperty): + # We want the narrowest validation rules that fit both, so use whichever values list is a + # subset of the other. + if prop1.values <= prop2.values: + values = prop1.values + class_info = prop1.class_info + elif prop2.values <= prop1.values: + values = prop2.values + class_info = prop2.class_info + else: + return PropertyError(detail="can't redefine a literal enum property with incompatible lists of values") + return _merge_common_attributes(evolve(prop1, values=values, class_info=class_info), prop2) + + # If enum values were specified for just one of the properties, use those. + enum_prop = prop1 if isinstance(prop1, LiteralEnumProperty) else cast(LiteralEnumProperty, prop2) + non_enum_prop = prop2 if isinstance(prop1, LiteralEnumProperty) else prop1 + if (isinstance(non_enum_prop, IntProperty) and enum_prop.value_type is int) or ( + isinstance(non_enum_prop, StringProperty) and enum_prop.value_type is str + ): + return _merge_common_attributes(enum_prop, prop1, prop2) + return PropertyError( + detail=f"can't combine literal enum of type {enum_prop.value_type} with {non_enum_prop.get_type_string(no_optional=True)}" + ) + + +def _merge_common_attributes(base: PropertyT, *extend_with: PropertyProtocol) -> PropertyT | PropertyError: + """Create a new instance based on base, overriding basic attributes with values from extend_with, in order. + + For "default", "description", and "example", a non-None value overrides any value from a previously + specified property. The behavior is similar to using the spread operator with dictionaries, except + that None means "not specified". + + For "required", any True value overrides all other values (a property that was previously required + cannot become optional). + """ + current = base + for override in extend_with: + if override.default is not None: + override_default = current.convert_value(override.default.raw_value) + else: + override_default = None + if isinstance(override_default, PropertyError): + return override_default + current = evolve( + current, # type: ignore # can't prove that every property type is an attrs class, but it is + required=current.required or override.required, + default=override_default or current.default, + description=override.description or current.description, + example=override.example or current.example, + ) + return current + + +def _values_are_subset(prop1: EnumProperty, prop2: EnumProperty) -> bool: + return set(prop1.values.items()) <= set(prop2.values.items()) diff --git a/openapi_python_client/parser/properties/model_property.py b/openapi_python_client/parser/properties/model_property.py index bde45ac05..636b71a34 100644 --- a/openapi_python_client/parser/properties/model_property.py +++ b/openapi_python_client/parser/properties/model_property.py @@ -7,8 +7,9 @@ from ... import Config, utils from ... import schema as oai +from ...utils import PythonIdentifier from ..errors import ParseError, PropertyError -from .enum_property import EnumProperty +from .any import AnyProperty from .protocol import PropertyProtocol, Value from .schemas import Class, ReferencePath, Schemas, parse_reference_path @@ -30,8 +31,8 @@ class ModelProperty(PropertyProtocol): optional_properties: list[Property] | None relative_imports: set[str] | None lazy_imports: set[str] | None - additional_properties: bool | Property | None - _json_type_string: ClassVar[str] = "Dict[str, Any]" + additional_properties: Property | None + _json_type_string: ClassVar[str] = "dict[str, Any]" template: ClassVar[str] = "model_property.py.jinja" json_is_dict: ClassVar[bool] = True @@ -78,7 +79,7 @@ def build( optional_properties: list[Property] | None = None relative_imports: set[str] | None = None lazy_imports: set[str] | None = None - additional_properties: bool | Property | None = None + additional_properties: Property | None = None if process_properties: data_or_err, schemas = _process_property_data( data=data, schemas=schemas, class_info=class_info, config=config, roots=model_roots @@ -117,7 +118,11 @@ def build( ) return error, schemas - schemas = evolve(schemas, classes_by_name={**schemas.classes_by_name, class_info.name: prop}) + schemas = evolve( + schemas, + classes_by_name={**schemas.classes_by_name, class_info.name: prop}, + models_to_process=[*schemas.models_to_process, prop], + ) return prop, schemas @classmethod @@ -135,8 +140,8 @@ def self_import(self) -> str: """Constructs a self import statement from this ModelProperty's attributes""" return f"models.{self.class_info.module_name} import {self.class_info.name}" - def get_base_type_string(self, *, quoted: bool = False) -> str: - return f'"{self.class_info.name}"' if quoted else self.class_info.name + def get_base_type_string(self) -> str: + return self.class_info.name def get_imports(self, *, prefix: str) -> set[str]: """ @@ -149,7 +154,6 @@ def get_imports(self, *, prefix: str) -> set[str]: imports = super().get_imports(prefix=prefix) imports.update( { - "from typing import Dict", "from typing import cast", } ) @@ -184,9 +188,6 @@ def get_type_string( self, no_optional: bool = False, json: bool = False, - *, - multipart: bool = False, - quoted: bool = False, ) -> str: """ Get a string representation of type that should be used when declaring this property @@ -197,76 +198,17 @@ def get_type_string( """ if json: type_string = self.get_base_json_type_string() - elif multipart: - type_string = "Tuple[None, bytes, str]" else: type_string = self.get_base_type_string() - if quoted: - if type_string == self.class_info.name: - type_string = f"'{type_string}'" - if no_optional or self.required: return type_string - return f"Union[Unset, {type_string}]" + return f"{type_string} | Unset" from .property import Property # noqa: E402 -def _values_are_subset(first: EnumProperty, second: EnumProperty) -> bool: - return set(first.values.items()) <= set(second.values.items()) - - -def _types_are_subset(first: EnumProperty, second: Property) -> bool: - from . import IntProperty, StringProperty - - if first.value_type == int and isinstance(second, IntProperty): - return True - if first.value_type == str and isinstance(second, StringProperty): - return True - return False - - -def _enum_subset(first: Property, second: Property) -> EnumProperty | None: - """Return the EnumProperty that is the subset of the other, if possible.""" - - if isinstance(first, EnumProperty): - if isinstance(second, EnumProperty): - if _values_are_subset(first, second): - return first - if _values_are_subset(second, first): - return second - return None - return first if _types_are_subset(first, second) else None - - if isinstance(second, EnumProperty) and _types_are_subset(second, first): - return second - return None - - -def _merge_properties(first: Property, second: Property) -> Property | PropertyError: - required = first.required or second.required - - err = None - - if first.__class__ == second.__class__: - first = evolve(first, required=required) - second = evolve(second, required=required) - if first == second: - return first - err = PropertyError(header="Cannot merge properties", detail="Properties has conflicting values") - - enum_subset = _enum_subset(first, second) - if enum_subset is not None: - return evolve(enum_subset, required=required) - - return err or PropertyError( - header="Cannot merge properties", - detail=f"{first.__class__}, {second.__class__}Properties have incompatible types", - ) - - def _resolve_naming_conflict(first: Property, second: Property, config: Config) -> PropertyError | None: first.set_python_name(first.name, config=config, skip_snake_case=True) second.set_python_name(second.name, config=config, skip_snake_case=True) @@ -294,7 +236,8 @@ def _process_properties( # noqa: PLR0912, PLR0911 config: Config, roots: set[ReferencePath | utils.ClassName], ) -> _PropertyData | PropertyError: - from . import property_from_data + from . import property_from_data # noqa: PLC0415 + from .merge_properties import merge_properties # noqa: PLC0415 properties: dict[str, Property] = {} relative_imports: set[str] = set() @@ -305,26 +248,26 @@ def _add_if_no_conflict(new_prop: Property) -> PropertyError | None: nonlocal properties name_conflict = properties.get(new_prop.name) - merged_prop_or_error = _merge_properties(name_conflict, new_prop) if name_conflict else new_prop - if isinstance(merged_prop_or_error, PropertyError): - merged_prop_or_error.header = ( - f"Found conflicting properties named {new_prop.name} when creating {class_name}" - ) - return merged_prop_or_error + merged_prop = merge_properties(name_conflict, new_prop) if name_conflict else new_prop + if isinstance(merged_prop, PropertyError): + merged_prop.header = f"Found conflicting properties named {new_prop.name} when creating {class_name}" + return merged_prop for other_prop in properties.values(): - if other_prop.name == merged_prop_or_error.name: + if other_prop.name == merged_prop.name: continue # Same property, probably just got merged - if other_prop.python_name != merged_prop_or_error.python_name: + if other_prop.python_name != merged_prop.python_name: continue - naming_error = _resolve_naming_conflict(merged_prop_or_error, other_prop, config) + naming_error = _resolve_naming_conflict(merged_prop, other_prop, config) if naming_error is not None: return naming_error - properties[merged_prop_or_error.name] = merged_prop_or_error + properties[merged_prop.name] = merged_prop return None - unprocessed_props = data.properties or {} + unprocessed_props: list[tuple[str, oai.Reference | oai.Schema]] = ( + list(data.properties.items()) if data.properties else [] + ) for sub_prop in data.allOf: if isinstance(sub_prop, oai.Reference): ref_path = parse_reference_path(sub_prop.ref) @@ -346,10 +289,15 @@ def _add_if_no_conflict(new_prop: Property) -> PropertyError | None: return err schemas.add_dependencies(ref_path=ref_path, roots=roots) else: - unprocessed_props.update(sub_prop.properties or {}) + unprocessed_props.extend(sub_prop.properties.items() if sub_prop.properties else []) required_set.update(sub_prop.required or []) - for key, value in unprocessed_props.items(): + # Update properties that are marked as required in the schema + for prop_name in required_set: + if prop_name in properties and not properties[prop_name].required: + properties[prop_name] = evolve(properties[prop_name], required=True) + + for key, value in unprocessed_props: prop_required = key in required_set prop_or_error: Property | (PropertyError | None) prop_or_error, schemas = property_from_data( @@ -386,6 +334,16 @@ def _add_if_no_conflict(new_prop: Property) -> PropertyError | None: ) +ANY_ADDITIONAL_PROPERTY = AnyProperty.build( + name="additional", + required=True, + default=None, + description="", + python_name=PythonIdentifier(value="additional", prefix=""), + example=None, +) + + def _get_additional_properties( *, schema_additional: None | (bool | (oai.Reference | oai.Schema)), @@ -393,18 +351,20 @@ def _get_additional_properties( class_name: utils.ClassName, config: Config, roots: set[ReferencePath | utils.ClassName], -) -> tuple[bool | (Property | PropertyError), Schemas]: - from . import property_from_data +) -> tuple[Property | None | PropertyError, Schemas]: + from . import property_from_data # noqa: PLC0415 if schema_additional is None: - return True, schemas + return ANY_ADDITIONAL_PROPERTY, schemas if isinstance(schema_additional, bool): - return schema_additional, schemas + if schema_additional: + return ANY_ADDITIONAL_PROPERTY, schemas + return None, schemas if isinstance(schema_additional, oai.Schema) and not any(schema_additional.model_dump().values()): # An empty schema - return True, schemas + return ANY_ADDITIONAL_PROPERTY, schemas additional_properties, schemas = property_from_data( name="AdditionalProperty", @@ -425,7 +385,7 @@ def _process_property_data( class_info: Class, config: Config, roots: set[ReferencePath | utils.ClassName], -) -> tuple[tuple[_PropertyData, bool | Property] | PropertyError, Schemas]: +) -> tuple[tuple[_PropertyData, Property | None] | PropertyError, Schemas]: property_data = _process_properties( data=data, schemas=schemas, class_name=class_info.name, config=config, roots=roots ) @@ -442,7 +402,7 @@ def _process_property_data( ) if isinstance(additional_properties, PropertyError): return additional_properties, schemas - elif isinstance(additional_properties, bool): + elif additional_properties is None: pass else: property_data.relative_imports.update(additional_properties.get_imports(prefix="..")) diff --git a/openapi_python_client/parser/properties/none.py b/openapi_python_client/parser/properties/none.py index c329dac40..9c473693d 100644 --- a/openapi_python_client/parser/properties/none.py +++ b/openapi_python_client/parser/properties/none.py @@ -57,5 +57,5 @@ def convert_value(cls, value: Any) -> Value | None | PropertyError: return value if isinstance(value, str): if value == "None": - return Value(value) + return Value(python_code=value, raw_value=value) return PropertyError(f"Value {value} is not valid, only None is allowed") diff --git a/openapi_python_client/parser/properties/property.py b/openapi_python_client/parser/properties/property.py index fa3a26beb..5927c6e91 100644 --- a/openapi_python_client/parser/properties/property.py +++ b/openapi_python_client/parser/properties/property.py @@ -1,8 +1,7 @@ __all__ = ["Property"] -from typing import Union -from typing_extensions import TypeAlias +from typing import TypeAlias from .any import AnyProperty from .boolean import BooleanProperty @@ -14,24 +13,28 @@ from .float import FloatProperty from .int import IntProperty from .list_property import ListProperty +from .literal_enum_property import LiteralEnumProperty from .model_property import ModelProperty from .none import NoneProperty from .string import StringProperty from .union import UnionProperty +from .uuid import UuidProperty -Property: TypeAlias = Union[ - AnyProperty, - BooleanProperty, - ConstProperty, - DateProperty, - DateTimeProperty, - EnumProperty, - FileProperty, - FloatProperty, - IntProperty, - ListProperty, - ModelProperty, - NoneProperty, - StringProperty, - UnionProperty, -] +Property: TypeAlias = ( + AnyProperty + | BooleanProperty + | ConstProperty + | DateProperty + | DateTimeProperty + | EnumProperty + | LiteralEnumProperty + | FileProperty + | FloatProperty + | IntProperty + | ListProperty + | ModelProperty + | NoneProperty + | StringProperty + | UnionProperty + | UuidProperty +) diff --git a/openapi_python_client/parser/properties/protocol.py b/openapi_python_client/parser/properties/protocol.py index b8237923d..138fa7002 100644 --- a/openapi_python_client/parser/properties/protocol.py +++ b/openapi_python_client/parser/properties/protocol.py @@ -3,6 +3,7 @@ __all__ = ["PropertyProtocol", "Value"] from abc import abstractmethod +from dataclasses import dataclass from typing import TYPE_CHECKING, Any, ClassVar, Protocol, TypeVar from ... import Config @@ -16,8 +17,15 @@ ModelProperty = "ModelProperty" -class Value(str): - """Represents a valid (converted) value for a property""" +@dataclass +class Value: + """ + Some literal values in OpenAPI documents (like defaults) have to be converted into Python code safely + (with string escaping, for example). We still keep the `raw_value` around for merging `allOf`. + """ + + python_code: str + raw_value: Any PropertyType = TypeVar("PropertyType", bound="PropertyProtocol") @@ -80,21 +88,18 @@ def set_python_name(self, new_name: str, config: Config, skip_snake_case: bool = PythonIdentifier(value=new_name, prefix=config.field_prefix, skip_snake_case=skip_snake_case), ) - def get_base_type_string(self, *, quoted: bool = False) -> str: + def get_base_type_string(self) -> str: """Get the string describing the Python type of this property. Base types no require quoting.""" - return f'"{self._type_string}"' if not self.is_base_type and quoted else self._type_string + return self._type_string - def get_base_json_type_string(self, *, quoted: bool = False) -> str: + def get_base_json_type_string(self) -> str: """Get the string describing the JSON type of this property. Base types no require quoting.""" - return f'"{self._json_type_string}"' if not self.is_base_type and quoted else self._json_type_string + return self._json_type_string def get_type_string( self, no_optional: bool = False, json: bool = False, - *, - multipart: bool = False, - quoted: bool = False, ) -> str: """ Get a string representation of type that should be used when declaring this property @@ -102,21 +107,19 @@ def get_type_string( Args: no_optional: Do not include Optional or Unset even if the value is optional (needed for isinstance checks) json: True if the type refers to the property after JSON serialization - multipart: True if the type should be used in a multipart request - quoted: True if the type should be wrapped in quotes (if not a base type) """ if json: - type_string = self.get_base_json_type_string(quoted=quoted) + type_string = self.get_base_json_type_string() else: - type_string = self.get_base_type_string(quoted=quoted) + type_string = self.get_base_type_string() if no_optional or self.required: return type_string - return f"Union[Unset, {type_string}]" + return f"{type_string} | Unset" def get_instance_type_string(self) -> str: """Get a string representation of runtime type that should be used for `isinstance` checks""" - return self.get_type_string(no_optional=True, quoted=False) + return self.get_type_string(no_optional=True) # noinspection PyUnusedLocal def get_imports(self, *, prefix: str) -> set[str]: @@ -129,7 +132,6 @@ def get_imports(self, *, prefix: str) -> set[str]: """ imports = set() if not self.required: - imports.add("from typing import Union") imports.add(f"from {prefix}types import UNSET, Unset") return imports @@ -146,32 +148,21 @@ def to_string(self) -> str: """How this should be declared in a dataclass""" default: str | None if self.default is not None: - default = self.default + default = self.default.python_code elif not self.required: default = "UNSET" else: default = None if default is not None: - return f"{self.python_name}: {self.get_type_string(quoted=True)} = {default}" - return f"{self.python_name}: {self.get_type_string(quoted=True)}" + return f"{self.python_name}: {self.get_type_string()} = {default}" + return f"{self.python_name}: {self.get_type_string()}" def to_docstring(self) -> str: """Returns property docstring""" doc = f"{self.python_name} ({self.get_type_string()}): {self.description or ''}" if self.default: - doc += f" Default: {self.default}." + doc += f" Default: {self.default.python_code}." if self.example: doc += f" Example: {self.example}." return doc - - @property - def is_base_type(self) -> bool: - """Base types, represented by any other of `Property` than `ModelProperty` should not be quoted.""" - from . import ListProperty, ModelProperty, UnionProperty - - return self.__class__.__name__ not in { - ModelProperty.__name__, - ListProperty.__name__, - UnionProperty.__name__, - } diff --git a/openapi_python_client/parser/properties/schemas.py b/openapi_python_client/parser/properties/schemas.py index 9e4fc545e..c783eb4fe 100644 --- a/openapi_python_client/parser/properties/schemas.py +++ b/openapi_python_client/parser/properties/schemas.py @@ -1,16 +1,16 @@ __all__ = [ "Class", - "Schemas", "Parameters", "ReferencePath", + "Schemas", + "parameter_from_data", + "parameter_from_reference", "parse_reference_path", - "update_schemas_with_data", "update_parameters_with_data", - "parameter_from_reference", - "parameter_from_data", + "update_schemas_with_data", ] -from typing import TYPE_CHECKING, Dict, List, NewType, Set, Tuple, Union, cast +from typing import TYPE_CHECKING, NewType, cast from urllib.parse import urlparse from attrs import define, evolve, field @@ -22,15 +22,17 @@ from ..errors import ParameterError, ParseError, PropertyError if TYPE_CHECKING: # pragma: no cover + from .model_property import ModelProperty from .property import Property else: + ModelProperty = "ModelProperty" Property = "Property" ReferencePath = NewType("ReferencePath", str) -def parse_reference_path(ref_path_raw: str) -> Union[ReferencePath, ParseError]: +def parse_reference_path(ref_path_raw: str) -> ReferencePath | ParseError: """ Takes a raw string provided in a `$ref` and turns it into a validated `_ReferencePath` or a `ParseError` if validation fails. @@ -44,6 +46,13 @@ def parse_reference_path(ref_path_raw: str) -> Union[ReferencePath, ParseError]: return cast(ReferencePath, parsed.fragment) +def get_reference_simple_name(ref_path: str) -> str: + """ + Takes a path like `/components/schemas/NameOfThing` and returns a string like `NameOfThing`. + """ + return ref_path.split("/")[-1] + + @define class Class: """Represents Python class which will be generated from an OpenAPI schema""" @@ -54,7 +63,7 @@ class Class: @staticmethod def from_string(*, string: str, config: Config) -> "Class": """Get a Class from an arbitrary string""" - class_name = string.split("/")[-1] # Get rid of ref path stuff + class_name = get_reference_simple_name(string) # Get rid of ref path stuff class_name = ClassName(class_name, config.field_prefix) override = config.class_overrides.get(class_name) @@ -74,12 +83,13 @@ def from_string(*, string: str, config: Config) -> "Class": class Schemas: """Structure for containing all defined, shareable, and reusable schemas (attr classes and Enums)""" - classes_by_reference: Dict[ReferencePath, Property] = field(factory=dict) - dependencies: Dict[ReferencePath, Set[Union[ReferencePath, ClassName]]] = field(factory=dict) - classes_by_name: Dict[ClassName, Property] = field(factory=dict) - errors: List[ParseError] = field(factory=list) + classes_by_reference: dict[ReferencePath, Property] = field(factory=dict) + dependencies: dict[ReferencePath, set[ReferencePath | ClassName]] = field(factory=dict) + classes_by_name: dict[ClassName, Property] = field(factory=dict) + models_to_process: list[ModelProperty] = field(factory=list) + errors: list[ParseError] = field(factory=list) - def add_dependencies(self, ref_path: ReferencePath, roots: Set[Union[ReferencePath, ClassName]]) -> None: + def add_dependencies(self, ref_path: ReferencePath, roots: set[ReferencePath | ClassName]) -> None: """Record new dependencies on the given ReferencePath Args: @@ -92,7 +102,7 @@ def add_dependencies(self, ref_path: ReferencePath, roots: Set[Union[ReferencePa def update_schemas_with_data( *, ref_path: ReferencePath, data: oai.Schema, schemas: Schemas, config: Config -) -> Union[Schemas, PropertyError]: +) -> Schemas | PropertyError: """ Update a `Schemas` using some new reference. @@ -108,9 +118,9 @@ def update_schemas_with_data( See Also: - https://swagger.io/docs/specification/using-ref/ """ - from . import property_from_data + from . import property_from_data # noqa: PLC0415 - prop: Union[PropertyError, Property] + prop: PropertyError | Property prop, schemas = property_from_data( data=data, name=ref_path, @@ -140,18 +150,18 @@ def update_schemas_with_data( class Parameters: """Structure for containing all defined, shareable, and reusable parameters""" - classes_by_reference: Dict[ReferencePath, Parameter] = field(factory=dict) - classes_by_name: Dict[ClassName, Parameter] = field(factory=dict) - errors: List[ParseError] = field(factory=list) + classes_by_reference: dict[ReferencePath, Parameter] = field(factory=dict) + classes_by_name: dict[ClassName, Parameter] = field(factory=dict) + errors: list[ParseError] = field(factory=list) def parameter_from_data( *, name: str, - data: Union[oai.Reference, oai.Parameter], + data: oai.Reference | oai.Parameter, parameters: Parameters, config: Config, -) -> Tuple[Union[Parameter, ParameterError], Parameters]: +) -> tuple[Parameter | ParameterError, Parameters]: """Generates parameters from an OpenAPI Parameter spec.""" if isinstance(data, oai.Reference): @@ -176,7 +186,7 @@ def parameter_from_data( def update_parameters_with_data( *, ref_path: ReferencePath, data: oai.Parameter, parameters: Parameters, config: Config -) -> Union[Parameters, ParameterError]: +) -> Parameters | ParameterError: """ Update a `Parameters` using some new reference. @@ -209,9 +219,9 @@ def update_parameters_with_data( def parameter_from_reference( *, - param: Union[oai.Reference, Parameter], + param: oai.Reference | Parameter, parameters: Parameters, -) -> Union[Parameter, ParameterError]: +) -> Parameter | ParameterError: """ Returns a Parameter from a Reference or the Parameter itself if one was provided. diff --git a/openapi_python_client/parser/properties/string.py b/openapi_python_client/parser/properties/string.py index 1afe02c62..e40c1eee6 100644 --- a/openapi_python_client/parser/properties/string.py +++ b/openapi_python_client/parser/properties/string.py @@ -21,8 +21,6 @@ class StringProperty(PropertyProtocol): python_name: PythonIdentifier description: str | None example: str | None - max_length: int | None = None - pattern: str | None = None _type_string: ClassVar[str] = "str" _json_type_string: ClassVar[str] = "str" _allowed_locations: ClassVar[set[oai.ParameterLocation]] = { @@ -41,7 +39,6 @@ def build( python_name: PythonIdentifier, description: str | None, example: str | None, - pattern: str | None = None, ) -> StringProperty | PropertyError: checked_default = cls.convert_value(default) return cls( @@ -51,7 +48,6 @@ def build( python_name=python_name, description=description, example=example, - pattern=pattern, ) @classmethod @@ -69,4 +65,4 @@ def convert_value(cls, value: Any) -> Value | None: return value if not isinstance(value, str): value = str(value) - return Value(repr(utils.remove_string_escapes(value))) + return Value(python_code=repr(utils.remove_string_escapes(value)), raw_value=value) diff --git a/openapi_python_client/parser/properties/union.py b/openapi_python_client/parser/properties/union.py index 8b7b02a48..3091c793b 100644 --- a/openapi_python_client/parser/properties/union.py +++ b/openapi_python_client/parser/properties/union.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections.abc import Iterator from itertools import chain from typing import Any, ClassVar, cast @@ -28,7 +29,14 @@ class UnionProperty(PropertyProtocol): @classmethod def build( - cls, *, data: oai.Schema, name: str, required: bool, schemas: Schemas, parent_name: str, config: Config + cls, + *, + data: oai.Schema, + name: str, + required: bool, + schemas: Schemas, + parent_name: str, + config: Config, ) -> tuple[UnionProperty | PropertyError, Schemas]: """ Create a `UnionProperty` the right way. @@ -45,7 +53,7 @@ def build( `(result, schemas)` where `schemas` is the updated version of the input `schemas` and `result` is the constructed `UnionProperty` or a `PropertyError` describing what went wrong. """ - from . import property_from_data + from . import property_from_data # noqa: PLC0415 sub_properties: list[PropertyProtocol] = [] @@ -55,8 +63,19 @@ def build( type_list_data.append(data.model_copy(update={"type": _type, "default": None})) for i, sub_prop_data in enumerate(chain(data.anyOf, data.oneOf, type_list_data)): + # If a schema has a unique title property, we can use that to carry forward a descriptive name instead of "type_0" + subscript: str + if ( + isinstance(sub_prop_data, oai.Schema) + and sub_prop_data.title is not None + and sub_prop_data.title != data.title + ): + subscript = sub_prop_data.title + else: + subscript = f"type_{i}" + sub_prop, schemas = property_from_data( - name=f"{name}_type_{i}", + name=f"{name}_{subscript}", required=True, data=sub_prop_data, schemas=schemas, @@ -64,25 +83,32 @@ def build( config=config, ) if isinstance(sub_prop, PropertyError): - return PropertyError(detail=f"Invalid property in union {name}", data=sub_prop_data), schemas + return ( + PropertyError(detail=f"Invalid property in union {name}", data=sub_prop_data), + schemas, + ) sub_properties.append(sub_prop) - def flatten_union_properties(sub_properties: list[PropertyProtocol]) -> list[PropertyProtocol]: - flattened = [] - for sub_prop in sub_properties: - if isinstance(sub_prop, UnionProperty): - flattened.extend(flatten_union_properties(sub_prop.inner_properties)) + def flatten_union_properties(possibly_nested: list[PropertyProtocol]) -> Iterator[PropertyProtocol]: + for to_flatten in possibly_nested: + if isinstance(to_flatten, UnionProperty): + yield from flatten_union_properties(to_flatten.inner_properties) else: - flattened.append(sub_prop) - return flattened + yield to_flatten - sub_properties = flatten_union_properties(sub_properties) + seen_types = set() + inner_properties: list[PropertyProtocol] = [] + for flattened in flatten_union_properties(sub_properties): + type_string = flattened.get_type_string(no_optional=True) + if type_string not in seen_types: + seen_types.add(type_string) + inner_properties.append(flattened) prop = UnionProperty( name=name, required=required, default=None, - inner_properties=sub_properties, + inner_properties=inner_properties, python_name=PythonIdentifier(value=name, prefix=config.field_prefix), description=data.description, example=data.example, @@ -106,9 +132,12 @@ def convert_value(self, value: Any) -> Value | None | PropertyError: return value_or_error return value_or_error - def _get_inner_type_strings(self, json: bool, multipart: bool) -> set[str]: + def _get_inner_type_strings(self, json: bool) -> set[str]: return { - p.get_type_string(no_optional=True, json=json, multipart=multipart, quoted=not p.is_base_type) + p.get_type_string( + no_optional=True, + json=json, + ) for p in self.inner_properties } @@ -116,15 +145,15 @@ def _get_inner_type_strings(self, json: bool, multipart: bool) -> set[str]: def _get_type_string_from_inner_type_strings(inner_types: set[str]) -> str: if len(inner_types) == 1: return inner_types.pop() - return f"Union[{', '.join(sorted(inner_types))}]" + return " | ".join(sorted(inner_types, key=lambda x: x.lower())) - def get_base_type_string(self, *, quoted: bool = False) -> str: - return self._get_type_string_from_inner_type_strings(self._get_inner_type_strings(json=False, multipart=False)) + def get_base_type_string(self) -> str: + return self._get_type_string_from_inner_type_strings(self._get_inner_type_strings(json=False)) - def get_base_json_type_string(self, *, quoted: bool = False) -> str: - return self._get_type_string_from_inner_type_strings(self._get_inner_type_strings(json=True, multipart=False)) + def get_base_json_type_string(self) -> str: + return self._get_type_string_from_inner_type_strings(self._get_inner_type_strings(json=True)) - def get_type_strings_in_union(self, *, no_optional: bool = False, json: bool, multipart: bool) -> set[str]: + def get_type_strings_in_union(self, *, no_optional: bool = False, json: bool) -> set[str]: """ Get the set of all the types that should appear within the `Union` representing this property. @@ -133,12 +162,11 @@ def get_type_strings_in_union(self, *, no_optional: bool = False, json: bool, mu Args: no_optional: Do not include `None` or `Unset` in this set. json: If True, this returns the JSON types, not the Python types, of this property. - multipart: If True, this returns the multipart types, not the Python types, of this property. Returns: A set of strings containing the types that should appear within `Union`. """ - type_strings = self._get_inner_type_strings(json=json, multipart=multipart) + type_strings = self._get_inner_type_strings(json=json) if no_optional: return type_strings if not self.required: @@ -149,16 +177,13 @@ def get_type_string( self, no_optional: bool = False, json: bool = False, - *, - multipart: bool = False, - quoted: bool = False, ) -> str: """ Get a string representation of type that should be used when declaring this property. This implementation differs slightly from `Property.get_type_string` in order to collapse nested union types. """ - type_strings_in_union = self.get_type_strings_in_union(no_optional=no_optional, json=json, multipart=multipart) + type_strings_in_union = self.get_type_strings_in_union(no_optional=no_optional, json=json) return self._get_type_string_from_inner_type_strings(type_strings_in_union) def get_imports(self, *, prefix: str) -> set[str]: @@ -172,7 +197,7 @@ def get_imports(self, *, prefix: str) -> set[str]: imports = super().get_imports(prefix=prefix) for inner_prop in self.inner_properties: imports.update(inner_prop.get_imports(prefix=prefix)) - imports.add("from typing import cast, Union") + imports.add("from typing import cast") return imports def get_lazy_imports(self, *, prefix: str) -> set[str]: @@ -183,7 +208,7 @@ def get_lazy_imports(self, *, prefix: str) -> set[str]: def validate_location(self, location: oai.ParameterLocation) -> ParseError | None: """Returns an error if this type of property is not allowed in the given location""" - from ..properties import Property + from ..properties import Property # noqa: PLC0415 for inner_prop in self.inner_properties: if evolve(cast(Property, inner_prop), required=self.required).validate_location(location) is not None: diff --git a/openapi_python_client/parser/properties/uuid.py b/openapi_python_client/parser/properties/uuid.py new file mode 100644 index 000000000..86d7d6a0a --- /dev/null +++ b/openapi_python_client/parser/properties/uuid.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import Any, ClassVar +from uuid import UUID + +from attr import define + +from ... import schema as oai +from ...utils import PythonIdentifier +from ..errors import PropertyError +from .protocol import PropertyProtocol, Value + + +@define +class UuidProperty(PropertyProtocol): + """A property of type uuid.UUID""" + + name: str + required: bool + default: Value | None + python_name: PythonIdentifier + description: str | None + example: str | None + + _type_string: ClassVar[str] = "UUID" + _json_type_string: ClassVar[str] = "str" + _allowed_locations: ClassVar[set[oai.ParameterLocation]] = { + oai.ParameterLocation.QUERY, + oai.ParameterLocation.PATH, + oai.ParameterLocation.COOKIE, + oai.ParameterLocation.HEADER, + } + template: ClassVar[str] = "uuid_property.py.jinja" + + @classmethod + def build( + cls, + name: str, + required: bool, + default: Any, + python_name: PythonIdentifier, + description: str | None, + example: str | None, + ) -> UuidProperty | PropertyError: + checked_default = cls.convert_value(default) + if isinstance(checked_default, PropertyError): + return checked_default + + return cls( + name=name, + required=required, + default=checked_default, + python_name=python_name, + description=description, + example=example, + ) + + @classmethod + def convert_value(cls, value: Any) -> Value | None | PropertyError: + if value is None or isinstance(value, Value): + return value + if isinstance(value, str): + try: + UUID(value) + except ValueError: + return PropertyError(f"Invalid UUID value: {value}") + return Value(python_code=f"UUID('{value}')", raw_value=value) + return PropertyError(f"Invalid UUID value: {value}") + + def get_imports(self, *, prefix: str) -> set[str]: + """ + Get a set of import strings that should be included when this property is used somewhere + + Args: + prefix: A prefix to put before any relative (local) module names. This should be the number of . to get + back to the root of the generated client. + """ + imports = super().get_imports(prefix=prefix) + imports.update({"from uuid import UUID"}) + return imports diff --git a/openapi_python_client/parser/responses.py b/openapi_python_client/parser/responses.py index 3a22deb71..f05fbce75 100644 --- a/openapi_python_client/parser/responses.py +++ b/openapi_python_client/parser/responses.py @@ -1,11 +1,12 @@ -__all__ = ["Response", "response_from_data"] +__all__ = ["HTTPStatusPattern", "Response", "Responses", "response_from_data"] -from http import HTTPStatus -from typing import Optional, Tuple, TypedDict, Union +from collections.abc import Iterator +from typing import TypedDict from attrs import define from openapi_python_client import utils +from openapi_python_client.parser.properties.schemas import get_reference_simple_name, parse_reference_path from .. import Config from .. import schema as oai @@ -14,6 +15,20 @@ from .properties import AnyProperty, Property, Schemas, property_from_data +@define +class Responses: + patterns: list["Response"] + default: "Response | None" + + def __iter__(self) -> Iterator["Response"]: + yield from self.patterns + if self.default: + yield self.default + + def __len__(self) -> int: + return len(self.patterns) + (1 if self.default else 0) + + class _ResponseSource(TypedDict): """What data should be pulled from the httpx Response object""" @@ -27,18 +42,94 @@ class _ResponseSource(TypedDict): NONE_SOURCE = _ResponseSource(attribute="None", return_type="None") -@define +class HTTPStatusPattern: + """Status code patterns come in three flavors, in order of precedence: + 1. Specific status codes, such as 200. This is represented by `min` and `max` being the same. + 2. Ranges of status codes, such as 2XX. This is represented by `min` and `max` being different. + 3. The special `default` status code, which is used when no other status codes match. `range` is `None` in this case. + + https://github.com/openapi-generators/openapi-python-client/blob/61b6c54994e2a6285bb422ee3b864c45b5d88c15/openapi_python_client/schema/3.1.0.md#responses-object + """ + + pattern: str + range: tuple[int, int] | None + + def __init__(self, *, pattern: str, code_range: tuple[int, int] | None): + """Initialize with a range of status codes or None for the default case.""" + self.pattern = pattern + self.range = code_range + + @staticmethod + def parse(pattern: str) -> "HTTPStatusPattern | ParseError": + """Parse a status code pattern such as 2XX or 404""" + if pattern == "default": + return HTTPStatusPattern(pattern=pattern, code_range=None) + + if pattern.endswith("XX") and pattern[0].isdigit(): + first_digit = int(pattern[0]) + return HTTPStatusPattern(pattern=pattern, code_range=(first_digit * 100, first_digit * 100 + 99)) + + try: + code = int(pattern) + return HTTPStatusPattern(pattern=pattern, code_range=(code, code)) + except ValueError: + return ParseError( + detail=( + f"Invalid response status code pattern: {pattern}, response will be omitted from generated client" + ) + ) + + def is_range(self) -> bool: + """Check if this is a range of status codes, such as 2XX""" + return self.range is not None and self.range[0] != self.range[1] + + def __lt__(self, other: "HTTPStatusPattern") -> bool: + """Compare two HTTPStatusPattern objects based on the order they should be applied in""" + if self.range is None: + return False # Default gets applied last + if other.range is None: + return True # Other is default, so this one gets applied first + + # Specific codes appear before ranges + if self.is_range() and not other.is_range(): + return False + if not self.is_range() and other.is_range(): + return True + + # Order specific codes numerically + return self.range[0] < other.range[0] + + def __eq__(self, other: object) -> bool: # pragma: no cover + if not isinstance(other, HTTPStatusPattern): + return False + return self.range == other.range + + def __hash__(self) -> int: # pragma: no cover + return hash(self.range) + + def __repr__(self) -> str: # pragma: no cover + return f"" + + +@define(order=False) class Response: """Describes a single response for an endpoint""" - status_code: HTTPStatus + status_code: HTTPStatusPattern prop: Property source: _ResponseSource - data: Union[oai.Response, oai.Reference] # Original data which created this response, useful for custom templates + data: oai.Response | oai.Reference # Original data which created this response, useful for custom templates + + def is_default(self) -> bool: + return self.status_code.range is None + def __lt__(self, other: "Response") -> bool: + """Compare two responses based on the order in which they should be applied in""" + return self.status_code < other.status_code -def _source_by_content_type(content_type: str) -> Optional[_ResponseSource]: - parsed_content_type = utils.get_content_type(content_type) + +def _source_by_content_type(content_type: str, config: Config) -> _ResponseSource | None: + parsed_content_type = utils.get_content_type(content_type, config) if parsed_content_type is None: return None @@ -58,10 +149,10 @@ def _source_by_content_type(content_type: str) -> Optional[_ResponseSource]: def empty_response( *, - status_code: HTTPStatus, + status_code: HTTPStatusPattern, response_name: str, config: Config, - data: Union[oai.Response, oai.Reference], + data: oai.Response | oai.Reference, ) -> Response: """Return an untyped response, for when no response type is defined""" return Response( @@ -79,27 +170,30 @@ def empty_response( ) -def response_from_data( +def response_from_data( # noqa: PLR0911 *, - status_code: HTTPStatus, - data: Union[oai.Response, oai.Reference], + status_code: HTTPStatusPattern, + data: oai.Response | oai.Reference, schemas: Schemas, + responses: dict[str, oai.Response | oai.Reference], parent_name: str, config: Config, -) -> Tuple[Union[Response, ParseError], Schemas]: +) -> tuple[Response | ParseError, Schemas]: """Generate a Response from the OpenAPI dictionary representation of it""" - response_name = f"response_{status_code}" + response_name = f"response_{status_code.pattern}" if isinstance(data, oai.Reference): - return ( - empty_response( - status_code=status_code, - response_name=response_name, - config=config, - data=data, - ), - schemas, - ) + ref_path = parse_reference_path(data.ref) + if isinstance(ref_path, ParseError): + return ref_path, schemas + if not ref_path.startswith("/components/responses/"): + return ParseError(data=data, detail=f"$ref to {data.ref} not allowed in responses"), schemas + resp_data = responses.get(get_reference_simple_name(ref_path), None) + if not resp_data: + return ParseError(data=data, detail=f"Could not find reference: {data.ref}"), schemas + if not isinstance(resp_data, oai.Response): + return ParseError(data=data, detail="Top-level $ref inside components/responses is not supported"), schemas + data = resp_data content = data.content if not content: @@ -114,7 +208,7 @@ def response_from_data( ) for content_type, media_type in content.items(): - source = _source_by_content_type(content_type) + source = _source_by_content_type(content_type, config) if source is not None: schema_data = media_type.media_type_schema break diff --git a/openapi_python_client/schema/__init__.py b/openapi_python_client/schema/__init__.py index d3de0e493..21a90f5fb 100644 --- a/openapi_python_client/schema/__init__.py +++ b/openapi_python_client/schema/__init__.py @@ -1,12 +1,12 @@ __all__ = [ + "DataType", "MediaType", "OpenAPI", "Operation", "Parameter", + "Parameter", "ParameterLocation", - "DataType", "PathItem", - "Parameter", "Reference", "RequestBody", "Response", diff --git a/openapi_python_client/schema/openapi_schema_pydantic/__init__.py b/openapi_python_client/schema/openapi_schema_pydantic/__init__.py index 6b02446a8..b61cefc66 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/__init__.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/__init__.py @@ -7,6 +7,8 @@ """ __all__ = [ + "XML", + "Callback", "Components", "Contact", "Discriminator", @@ -35,8 +37,6 @@ "Server", "ServerVariable", "Tag", - "XML", - "Callback", ] @@ -70,3 +70,14 @@ from .server_variable import ServerVariable from .tag import Tag from .xml import XML + +PathItem.model_rebuild() +Operation.model_rebuild() +Components.model_rebuild() +Encoding.model_rebuild() +MediaType.model_rebuild() +OpenAPI.model_rebuild() +Parameter.model_rebuild() +Header.model_rebuild() +RequestBody.model_rebuild() +Response.model_rebuild() diff --git a/openapi_python_client/schema/openapi_schema_pydantic/callback.py b/openapi_python_client/schema/openapi_schema_pydantic/callback.py index 22426d925..f4593cc8d 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/callback.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/callback.py @@ -1,11 +1,11 @@ -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover from .path_item import PathItem else: PathItem = "PathItem" -Callback = Dict[str, PathItem] +Callback = dict[str, PathItem] """ A map of possible out-of band callbacks related to the parent operation. Each value in the map is a [Path Item Object](#pathItemObject) diff --git a/openapi_python_client/schema/openapi_schema_pydantic/components.py b/openapi_python_client/schema/openapi_schema_pydantic/components.py index f366a2ec8..3023714c9 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/components.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/components.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional, Union - from pydantic import BaseModel, ConfigDict from .callback import Callback @@ -7,7 +5,7 @@ from .header import Header from .link import Link from .parameter import Parameter -from .reference import Reference +from .reference import ReferenceOr from .request_body import RequestBody from .response import Response from .schema import Schema @@ -25,16 +23,18 @@ class Components(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#componentsObject """ - schemas: Optional[Dict[str, Union[Schema, Reference]]] = None - responses: Optional[Dict[str, Union[Response, Reference]]] = None - parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None - headers: Optional[Dict[str, Union[Header, Reference]]] = None - securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None - callbacks: Optional[Dict[str, Union[Callback, Reference]]] = None + schemas: dict[str, ReferenceOr[Schema]] | None = None + responses: dict[str, ReferenceOr[Response]] | None = None + parameters: dict[str, ReferenceOr[Parameter]] | None = None + examples: dict[str, ReferenceOr[Example]] | None = None + requestBodies: dict[str, ReferenceOr[RequestBody]] | None = None + headers: dict[str, ReferenceOr[Header]] | None = None + securitySchemes: dict[str, ReferenceOr[SecurityScheme]] | None = None + links: dict[str, ReferenceOr[Link]] | None = None + callbacks: dict[str, ReferenceOr[Callback]] | None = None model_config = ConfigDict( + # `Callback` contains an unresolvable forward reference, will rebuild in `__init__.py`: + defer_build=True, extra="allow", json_schema_extra={ "examples": [ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/contact.py b/openapi_python_client/schema/openapi_schema_pydantic/contact.py index c04fdbbe0..0ff40fa71 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/contact.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/contact.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict @@ -11,9 +9,9 @@ class Contact(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#contactObject """ - name: Optional[str] = None - url: Optional[str] = None - email: Optional[str] = None + name: str | None = None + url: str | None = None + email: str | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/discriminator.py b/openapi_python_client/schema/openapi_schema_pydantic/discriminator.py index 95161d07a..3fb05ade9 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/discriminator.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/discriminator.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional - from pydantic import BaseModel, ConfigDict @@ -19,7 +17,7 @@ class Discriminator(BaseModel): """ propertyName: str - mapping: Optional[Dict[str, str]] = None + mapping: dict[str, str] | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/encoding.py b/openapi_python_client/schema/openapi_schema_pydantic/encoding.py index b7434c50c..d2725e9f7 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/encoding.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/encoding.py @@ -1,13 +1,11 @@ -from typing import TYPE_CHECKING, Dict, Optional, Union +from typing import TYPE_CHECKING from pydantic import BaseModel, ConfigDict -from .reference import Reference +from .reference import ReferenceOr if TYPE_CHECKING: # pragma: no cover from .header import Header -else: - Header = "Header" class Encoding(BaseModel): @@ -18,12 +16,14 @@ class Encoding(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#encodingObject """ - contentType: Optional[str] = None - headers: Optional[Dict[str, Union[Header, Reference]]] = None - style: Optional[str] = None + contentType: str | None = None + headers: dict[str, ReferenceOr["Header"]] | None = None + style: str | None = None explode: bool = False allowReserved: bool = False model_config = ConfigDict( + # `Header` is an unresolvable forward reference, will rebuild in `__init__.py`: + defer_build=True, extra="allow", json_schema_extra={ "examples": [ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/example.py b/openapi_python_client/schema/openapi_schema_pydantic/example.py index 90db2530e..6469457f7 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/example.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/example.py @@ -1,4 +1,4 @@ -from typing import Any, Optional +from typing import Any from pydantic import BaseModel, ConfigDict @@ -11,10 +11,10 @@ class Example(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#exampleObject """ - summary: Optional[str] = None - description: Optional[str] = None - value: Optional[Any] = None - externalValue: Optional[str] = None + summary: str | None = None + description: str | None = None + value: Any | None = None + externalValue: str | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/external_documentation.py b/openapi_python_client/schema/openapi_schema_pydantic/external_documentation.py index 2c0c39b7c..78df42f67 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/external_documentation.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/external_documentation.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict @@ -10,7 +8,7 @@ class ExternalDocumentation(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#externalDocumentationObject """ - description: Optional[str] = None + description: str | None = None url: str model_config = ConfigDict( extra="allow", diff --git a/openapi_python_client/schema/openapi_schema_pydantic/header.py b/openapi_python_client/schema/openapi_schema_pydantic/header.py index 3223c199b..2deb6f390 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/header.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/header.py @@ -21,6 +21,8 @@ class Header(Parameter): name: str = Field(default="") param_in: ParameterLocation = Field(default=ParameterLocation.HEADER, alias="in") model_config = ConfigDict( + # `Parameter` is not build yet, will rebuild in `__init__.py`: + defer_build=True, extra="allow", populate_by_name=True, json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/info.py b/openapi_python_client/schema/openapi_schema_pydantic/info.py index bec1354da..8b3e59963 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/info.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/info.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict from .contact import Contact @@ -18,10 +16,10 @@ class Info(BaseModel): """ title: str - description: Optional[str] = None - termsOfService: Optional[str] = None - contact: Optional[Contact] = None - license: Optional[License] = None + description: str | None = None + termsOfService: str | None = None + contact: Contact | None = None + license: License | None = None version: str model_config = ConfigDict( extra="allow", diff --git a/openapi_python_client/schema/openapi_schema_pydantic/license.py b/openapi_python_client/schema/openapi_schema_pydantic/license.py index 185eec1db..ee712d92e 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/license.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/license.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict @@ -12,7 +10,7 @@ class License(BaseModel): """ name: str - url: Optional[str] = None + url: str | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/link.py b/openapi_python_client/schema/openapi_schema_pydantic/link.py index 9f823c4a2..77740ceb4 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/link.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/link.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any from pydantic import BaseModel, ConfigDict @@ -23,12 +23,12 @@ class Link(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md#linkObject """ - operationRef: Optional[str] = None - operationId: Optional[str] = None - parameters: Optional[Dict[str, Any]] = None - requestBody: Optional[Any] = None - description: Optional[str] = None - server: Optional[Server] = None + operationRef: str | None = None + operationId: str | None = None + parameters: dict[str, Any] | None = None + requestBody: Any | None = None + description: str | None = None + server: Server | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/media_type.py b/openapi_python_client/schema/openapi_schema_pydantic/media_type.py index 1bda99560..1f4e6090f 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/media_type.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/media_type.py @@ -1,10 +1,10 @@ -from typing import Any, Dict, Optional, Union +from typing import Any from pydantic import BaseModel, ConfigDict, Field from .encoding import Encoding from .example import Example -from .reference import Reference +from .reference import ReferenceOr from .schema import Schema @@ -16,11 +16,13 @@ class MediaType(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#mediaTypeObject """ - media_type_schema: Optional[Union[Reference, Schema]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - encoding: Optional[Dict[str, Encoding]] = None + media_type_schema: ReferenceOr[Schema] | None = Field(default=None, alias="schema") + example: Any | None = None + examples: dict[str, ReferenceOr[Example]] | None = None + encoding: dict[str, Encoding] | None = None model_config = ConfigDict( + # `Encoding` is not build yet, will rebuild in `__init__.py`: + defer_build=True, extra="allow", populate_by_name=True, json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/oauth_flow.py b/openapi_python_client/schema/openapi_schema_pydantic/oauth_flow.py index c7485814f..d9066e383 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/oauth_flow.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/oauth_flow.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional - from pydantic import BaseModel, ConfigDict @@ -12,10 +10,10 @@ class OAuthFlow(BaseModel): - https://swagger.io/docs/specification/authentication/oauth2/ """ - authorizationUrl: Optional[str] = None - tokenUrl: Optional[str] = None - refreshUrl: Optional[str] = None - scopes: Dict[str, str] + authorizationUrl: str | None = None + tokenUrl: str | None = None + refreshUrl: str | None = None + scopes: dict[str, str] model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/oauth_flows.py b/openapi_python_client/schema/openapi_schema_pydantic/oauth_flows.py index dba193713..602a34cb5 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/oauth_flows.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/oauth_flows.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict from .oauth_flow import OAuthFlow @@ -14,8 +12,8 @@ class OAuthFlows(BaseModel): - https://swagger.io/docs/specification/authentication/oauth2/ """ - implicit: Optional[OAuthFlow] = None - password: Optional[OAuthFlow] = None - clientCredentials: Optional[OAuthFlow] = None - authorizationCode: Optional[OAuthFlow] = None + implicit: OAuthFlow | None = None + password: OAuthFlow | None = None + clientCredentials: OAuthFlow | None = None + authorizationCode: OAuthFlow | None = None model_config = ConfigDict(extra="allow") diff --git a/openapi_python_client/schema/openapi_schema_pydantic/open_api.py b/openapi_python_client/schema/openapi_schema_pydantic/open_api.py index 6a1b5ae12..a98158232 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/open_api.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/open_api.py @@ -1,13 +1,8 @@ -from typing import List, Optional - from pydantic import BaseModel, ConfigDict, field_validator from .components import Components from .external_documentation import ExternalDocumentation from .info import Info - -# Required to update forward ref after object creation -from .path_item import PathItem # noqa: F401 from .paths import Paths from .security_requirement import SecurityRequirement from .server import Server @@ -25,14 +20,18 @@ class OpenAPI(BaseModel): """ info: Info - servers: List[Server] = [Server(url="/")] + servers: list[Server] = [Server(url="/")] paths: Paths - components: Optional[Components] = None - security: Optional[List[SecurityRequirement]] = None - tags: Optional[List[Tag]] = None - externalDocs: Optional[ExternalDocumentation] = None + components: Components | None = None + security: list[SecurityRequirement] | None = None + tags: list[Tag] | None = None + externalDocs: ExternalDocumentation | None = None openapi: str - model_config = ConfigDict(extra="allow") + model_config = ConfigDict( + # `Components` is not build yet, will rebuild in `__init__.py`: + defer_build=True, + extra="allow", + ) @field_validator("openapi") @classmethod @@ -46,6 +45,3 @@ def check_openapi_version(cls, value: str) -> str: if int(parts[1]) > 1: raise ValueError(f"Only OpenAPI versions 3.1.* are supported, got {value}") return value - - -OpenAPI.model_rebuild() diff --git a/openapi_python_client/schema/openapi_schema_pydantic/operation.py b/openapi_python_client/schema/openapi_schema_pydantic/operation.py index 41f5e7100..061be3135 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/operation.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/operation.py @@ -1,15 +1,9 @@ -from typing import Dict, List, Optional, Union - from pydantic import BaseModel, ConfigDict, Field from .callback import Callback from .external_documentation import ExternalDocumentation -from .header import Header # noqa: F401 from .parameter import Parameter - -# Required to update forward ref after object creation, as this is not imported yet -from .path_item import PathItem # noqa: F401 -from .reference import Reference +from .reference import ReferenceOr from .request_body import RequestBody from .responses import Responses from .security_requirement import SecurityRequirement @@ -24,20 +18,22 @@ class Operation(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#operationObject """ - tags: Optional[List[str]] = None - summary: Optional[str] = None - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None - operationId: Optional[str] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None - request_body: Optional[Union[RequestBody, Reference]] = Field(None, alias="requestBody") + tags: list[str] | None = None + summary: str | None = None + description: str | None = None + externalDocs: ExternalDocumentation | None = None + operationId: str | None = None + parameters: list[ReferenceOr[Parameter]] | None = None + request_body: ReferenceOr[RequestBody] | None = Field(None, alias="requestBody") responses: Responses - callbacks: Optional[Dict[str, Callback]] = None + callbacks: dict[str, Callback] | None = None deprecated: bool = False - security: Optional[List[SecurityRequirement]] = None - servers: Optional[List[Server]] = None + security: list[SecurityRequirement] | None = None + servers: list[Server] | None = None model_config = ConfigDict( + # `Callback` contains an unresolvable forward reference, will rebuild in `__init__.py`: + defer_build=True, extra="allow", json_schema_extra={ "examples": [ @@ -89,7 +85,3 @@ class Operation(BaseModel): ] }, ) - - -# PathItem in Callback uses Operation, so we need to update forward refs due to circular dependency -Operation.model_rebuild() diff --git a/openapi_python_client/schema/openapi_schema_pydantic/parameter.py b/openapi_python_client/schema/openapi_schema_pydantic/parameter.py index 25ba819f1..a963291fc 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/parameter.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/parameter.py @@ -1,11 +1,11 @@ -from typing import Any, Dict, Optional, Union +from typing import Any from pydantic import BaseModel, ConfigDict, Field from ..parameter_location import ParameterLocation from .example import Example from .media_type import MediaType -from .reference import Reference +from .reference import ReferenceOr from .schema import Schema @@ -23,18 +23,20 @@ class Parameter(BaseModel): name: str param_in: ParameterLocation = Field(alias="in") - description: Optional[str] = None + description: str | None = None required: bool = False deprecated: bool = False allowEmptyValue: bool = False - style: Optional[str] = None + style: str | None = None explode: bool = False allowReserved: bool = False - param_schema: Optional[Union[Reference, Schema]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - content: Optional[Dict[str, MediaType]] = None + param_schema: ReferenceOr[Schema] | None = Field(default=None, alias="schema") + example: Any | None = None + examples: dict[str, ReferenceOr[Example]] | None = None + content: dict[str, MediaType] | None = None model_config = ConfigDict( + # `MediaType` is not build yet, will rebuild in `__init__.py`: + defer_build=True, extra="allow", populate_by_name=True, json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/path_item.py b/openapi_python_client/schema/openapi_schema_pydantic/path_item.py index 36edee0e3..16fee9aeb 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/path_item.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/path_item.py @@ -1,11 +1,14 @@ -from typing import List, Optional, Union +from typing import TYPE_CHECKING from pydantic import BaseModel, ConfigDict, Field from .parameter import Parameter -from .reference import Reference +from .reference import ReferenceOr from .server import Server +if TYPE_CHECKING: + from .operation import Operation # pragma: no cover + class PathItem(BaseModel): """ @@ -19,20 +22,22 @@ class PathItem(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#pathItemObject """ - ref: Optional[str] = Field(default=None, alias="$ref") - summary: Optional[str] = None - description: Optional[str] = None - get: Optional["Operation"] = None - put: Optional["Operation"] = None - post: Optional["Operation"] = None - delete: Optional["Operation"] = None - options: Optional["Operation"] = None - head: Optional["Operation"] = None - patch: Optional["Operation"] = None - trace: Optional["Operation"] = None - servers: Optional[List[Server]] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None + ref: str | None = Field(default=None, alias="$ref") + summary: str | None = None + description: str | None = None + get: "Operation | None" = None + put: "Operation | None" = None + post: "Operation | None" = None + delete: "Operation | None" = None + options: "Operation | None" = None + head: "Operation | None" = None + patch: "Operation | None" = None + trace: "Operation | None" = None + servers: list[Server] | None = None + parameters: list[ReferenceOr[Parameter]] | None = None model_config = ConfigDict( + # `Operation` is an unresolvable forward reference, will rebuild in `__init__.py`: + defer_build=True, extra="allow", populate_by_name=True, json_schema_extra={ @@ -69,9 +74,3 @@ class PathItem(BaseModel): ] }, ) - - -# Operation uses PathItem via Callback, so we need late import and to update forward refs due to circular dependency -from .operation import Operation # noqa: E402 - -PathItem.model_rebuild() diff --git a/openapi_python_client/schema/openapi_schema_pydantic/paths.py b/openapi_python_client/schema/openapi_schema_pydantic/paths.py index d61ea7b18..86c1dfd19 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/paths.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/paths.py @@ -1,8 +1,6 @@ -from typing import Dict - from .path_item import PathItem -Paths = Dict[str, PathItem] +Paths = dict[str, PathItem] """ Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the [`Server Object`](#serverObject) in order to construct the full URL. diff --git a/openapi_python_client/schema/openapi_schema_pydantic/reference.py b/openapi_python_client/schema/openapi_schema_pydantic/reference.py index 50d26064f..c1b41e473 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/reference.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/reference.py @@ -1,4 +1,6 @@ -from pydantic import BaseModel, ConfigDict, Field +from typing import Annotated, Any, Literal, TypeAlias, TypeVar + +from pydantic import BaseModel, ConfigDict, Discriminator, Field, Tag class Reference(BaseModel): @@ -24,3 +26,17 @@ class Reference(BaseModel): "examples": [{"$ref": "#/components/schemas/Pet"}, {"$ref": "Pet.json"}, {"$ref": "definitions.json#/Pet"}] }, ) + + +T = TypeVar("T") + + +def _reference_discriminator(obj: Any) -> Literal["ref", "other"]: + if isinstance(obj, dict): + return "ref" if "$ref" in obj else "other" + return "ref" if isinstance(obj, Reference) else "other" + + +ReferenceOr: TypeAlias = Annotated[ + Annotated[Reference, Tag("ref")] | Annotated[T, Tag("other")], Discriminator(_reference_discriminator) +] diff --git a/openapi_python_client/schema/openapi_schema_pydantic/request_body.py b/openapi_python_client/schema/openapi_schema_pydantic/request_body.py index 6b1847215..60c454b6d 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/request_body.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/request_body.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional - from pydantic import BaseModel, ConfigDict from .media_type import MediaType @@ -13,10 +11,12 @@ class RequestBody(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#requestBodyObject """ - description: Optional[str] = None - content: Dict[str, MediaType] + description: str | None = None + content: dict[str, MediaType] required: bool = False model_config = ConfigDict( + # `MediaType` is not build yet, will rebuild in `__init__.py`: + defer_build=True, extra="allow", json_schema_extra={ "examples": [ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/response.py b/openapi_python_client/schema/openapi_schema_pydantic/response.py index a7c5d08ec..e6d8ebb2c 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/response.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/response.py @@ -1,11 +1,9 @@ -from typing import Dict, Optional, Union - from pydantic import BaseModel, ConfigDict from .header import Header from .link import Link from .media_type import MediaType -from .reference import Reference +from .reference import ReferenceOr class Response(BaseModel): @@ -19,10 +17,12 @@ class Response(BaseModel): """ description: str - headers: Optional[Dict[str, Union[Header, Reference]]] = None - content: Optional[Dict[str, MediaType]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None + headers: dict[str, ReferenceOr[Header]] | None = None + content: dict[str, MediaType] | None = None + links: dict[str, ReferenceOr[Link]] | None = None model_config = ConfigDict( + # `MediaType` is not build yet, will rebuild in `__init__.py`: + defer_build=True, extra="allow", json_schema_extra={ "examples": [ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/responses.py b/openapi_python_client/schema/openapi_schema_pydantic/responses.py index 53306ae1c..823339a54 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/responses.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/responses.py @@ -1,9 +1,7 @@ -from typing import Dict, Union - -from .reference import Reference +from .reference import ReferenceOr from .response import Response -Responses = Dict[str, Union[Response, Reference]] +Responses = dict[str, ReferenceOr[Response]] """ A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. diff --git a/openapi_python_client/schema/openapi_schema_pydantic/schema.py b/openapi_python_client/schema/openapi_schema_pydantic/schema.py index e2201c6e7..990f14a3d 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/schema.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/schema.py @@ -1,11 +1,11 @@ -from typing import Any, Dict, List, Optional, Union +from typing import Any -from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, model_validator +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictFloat, StrictInt, StrictStr, model_validator from ..data_type import DataType from .discriminator import Discriminator from .external_documentation import ExternalDocumentation -from .reference import Reference +from .reference import ReferenceOr from .xml import XML @@ -20,42 +20,43 @@ class Schema(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schemaObject """ - title: Optional[str] = None - multipleOf: Optional[float] = Field(default=None, gt=0.0) - maximum: Optional[float] = None - exclusiveMaximum: Optional[bool] = None - minimum: Optional[float] = None - exclusiveMinimum: Optional[bool] = None - maxLength: Optional[int] = Field(default=None, ge=0) - minLength: Optional[int] = Field(default=None, ge=0) - pattern: Optional[str] = None - maxItems: Optional[int] = Field(default=None, ge=0) - minItems: Optional[int] = Field(default=None, ge=0) - uniqueItems: Optional[bool] = None - maxProperties: Optional[int] = Field(default=None, ge=0) - minProperties: Optional[int] = Field(default=None, ge=0) - required: Optional[List[str]] = Field(default=None, min_length=1) - enum: Union[None, List[Any]] = Field(default=None, min_length=1) - const: Union[None, StrictStr, StrictInt] = None - type: Union[DataType, List[DataType], None] = Field(default=None) - allOf: List[Union[Reference, "Schema"]] = Field(default_factory=list) - oneOf: List[Union[Reference, "Schema"]] = Field(default_factory=list) - anyOf: List[Union[Reference, "Schema"]] = Field(default_factory=list) - schema_not: Optional[Union[Reference, "Schema"]] = Field(default=None, alias="not") - items: Optional[Union[Reference, "Schema"]] = None - properties: Optional[Dict[str, Union[Reference, "Schema"]]] = None - additionalProperties: Optional[Union[bool, Reference, "Schema"]] = None - description: Optional[str] = None - schema_format: Optional[str] = Field(default=None, alias="format") - default: Optional[Any] = None + title: str | None = None + multipleOf: float | None = Field(default=None, gt=0.0) + maximum: float | None = None + exclusiveMaximum: bool | float | None = None + minimum: float | None = None + exclusiveMinimum: bool | float | None = None + maxLength: int | None = Field(default=None, ge=0) + minLength: int | None = Field(default=None, ge=0) + pattern: str | None = None + maxItems: int | None = Field(default=None, ge=0) + minItems: int | None = Field(default=None, ge=0) + uniqueItems: bool | None = None + maxProperties: int | None = Field(default=None, ge=0) + minProperties: int | None = Field(default=None, ge=0) + required: list[str] | None = Field(default=None) + enum: None | list[Any] = Field(default=None, min_length=1) + const: None | StrictStr | StrictInt | StrictFloat | StrictBool = None + type: DataType | list[DataType] | None = Field(default=None) + allOf: list[ReferenceOr["Schema"]] = Field(default_factory=list) + oneOf: list[ReferenceOr["Schema"]] = Field(default_factory=list) + anyOf: list[ReferenceOr["Schema"]] = Field(default_factory=list) + schema_not: ReferenceOr["Schema"] | None = Field(default=None, alias="not") + items: ReferenceOr["Schema"] | None = None + prefixItems: list[ReferenceOr["Schema"]] = Field(default_factory=list) + properties: dict[str, ReferenceOr["Schema"]] | None = None + additionalProperties: bool | ReferenceOr["Schema"] | None = None + description: str | None = None + schema_format: str | None = Field(default=None, alias="format") + default: Any | None = None nullable: bool = Field(default=False) - discriminator: Optional[Discriminator] = None - readOnly: Optional[bool] = None - writeOnly: Optional[bool] = None - xml: Optional[XML] = None - externalDocs: Optional[ExternalDocumentation] = None - example: Optional[Any] = None - deprecated: Optional[bool] = None + discriminator: Discriminator | None = None + readOnly: bool | None = None + writeOnly: bool | None = None + xml: XML | None = None + externalDocs: ExternalDocumentation | None = None + example: Any | None = None + deprecated: bool | None = None model_config = ConfigDict( extra="allow", populate_by_name=True, @@ -160,6 +161,33 @@ class Schema(BaseModel): }, ) + @model_validator(mode="after") + def handle_exclusive_min_max(self) -> "Schema": + """ + Convert exclusiveMinimum/exclusiveMaximum between OpenAPI v3.0 (bool) and v3.1 (numeric). + """ + # Handle exclusiveMinimum + if isinstance(self.exclusiveMinimum, bool) and self.minimum is not None: + if self.exclusiveMinimum: + self.exclusiveMinimum = self.minimum + self.minimum = None + else: + self.exclusiveMinimum = None + elif isinstance(self.exclusiveMinimum, float): + self.minimum = None + + # Handle exclusiveMaximum + if isinstance(self.exclusiveMaximum, bool) and self.maximum is not None: + if self.exclusiveMaximum: + self.exclusiveMaximum = self.maximum + self.maximum = None + else: + self.exclusiveMaximum = None + elif isinstance(self.exclusiveMaximum, float): + self.maximum = None + + return self + @model_validator(mode="after") def handle_nullable(self) -> "Schema": """Convert the old 3.0 `nullable` property into the new 3.1 style""" @@ -178,6 +206,3 @@ def handle_nullable(self) -> "Schema": self.oneOf = [Schema(type=DataType.NULL), Schema(allOf=self.allOf)] self.allOf = [] return self - - -Schema.model_rebuild() diff --git a/openapi_python_client/schema/openapi_schema_pydantic/security_requirement.py b/openapi_python_client/schema/openapi_schema_pydantic/security_requirement.py index b3cca3b08..58a487dc7 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/security_requirement.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/security_requirement.py @@ -1,6 +1,4 @@ -from typing import Dict, List - -SecurityRequirement = Dict[str, List[str]] +SecurityRequirement = dict[str, list[str]] """ Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the diff --git a/openapi_python_client/schema/openapi_schema_pydantic/security_scheme.py b/openapi_python_client/schema/openapi_schema_pydantic/security_scheme.py index df385440c..76b97bd90 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/security_scheme.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/security_scheme.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict, Field from .oauth_flows import OAuthFlows @@ -20,13 +18,13 @@ class SecurityScheme(BaseModel): """ type: str - description: Optional[str] = None - name: Optional[str] = None - security_scheme_in: Optional[str] = Field(default=None, alias="in") - scheme: Optional[str] = None - bearerFormat: Optional[str] = None - flows: Optional[OAuthFlows] = None - openIdConnectUrl: Optional[str] = None + description: str | None = None + name: str | None = None + security_scheme_in: str | None = Field(default=None, alias="in") + scheme: str | None = None + bearerFormat: str | None = None + flows: OAuthFlows | None = None + openIdConnectUrl: str | None = None model_config = ConfigDict( extra="allow", populate_by_name=True, diff --git a/openapi_python_client/schema/openapi_schema_pydantic/server.py b/openapi_python_client/schema/openapi_schema_pydantic/server.py index d573a93fe..abfb77e1b 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/server.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/server.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional - from pydantic import BaseModel, ConfigDict from .server_variable import ServerVariable @@ -14,8 +12,8 @@ class Server(BaseModel): """ url: str - description: Optional[str] = None - variables: Optional[Dict[str, ServerVariable]] = None + description: str | None = None + variables: dict[str, ServerVariable] | None = None model_config = ConfigDict( extra="allow", json_schema_extra={ diff --git a/openapi_python_client/schema/openapi_schema_pydantic/server_variable.py b/openapi_python_client/schema/openapi_schema_pydantic/server_variable.py index 3b63c9ad2..09a705a2b 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/server_variable.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/server_variable.py @@ -1,5 +1,3 @@ -from typing import List, Optional - from pydantic import BaseModel, ConfigDict @@ -11,7 +9,7 @@ class ServerVariable(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#serverVariableObject """ - enum: Optional[List[str]] = None + enum: list[str] | None = None default: str - description: Optional[str] = None + description: str | None = None model_config = ConfigDict(extra="allow") diff --git a/openapi_python_client/schema/openapi_schema_pydantic/tag.py b/openapi_python_client/schema/openapi_schema_pydantic/tag.py index acb5fdc28..799810f36 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/tag.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/tag.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict from .external_documentation import ExternalDocumentation @@ -16,8 +14,8 @@ class Tag(BaseModel): """ name: str - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None + description: str | None = None + externalDocs: ExternalDocumentation | None = None model_config = ConfigDict( extra="allow", json_schema_extra={"examples": [{"name": "pet", "description": "Pets operations"}]} ) diff --git a/openapi_python_client/schema/openapi_schema_pydantic/xml.py b/openapi_python_client/schema/openapi_schema_pydantic/xml.py index 986aa44f4..87082b3c8 100644 --- a/openapi_python_client/schema/openapi_schema_pydantic/xml.py +++ b/openapi_python_client/schema/openapi_schema_pydantic/xml.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, ConfigDict @@ -16,9 +14,9 @@ class XML(BaseModel): - https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#xmlObject """ - name: Optional[str] = None - namespace: Optional[str] = None - prefix: Optional[str] = None + name: str | None = None + namespace: str | None = None + prefix: str | None = None attribute: bool = False wrapped: bool = False model_config = ConfigDict( diff --git a/openapi_python_client/templates/README.md.jinja b/openapi_python_client/templates/README.md.jinja index ea31c83d7..98dcdf3a7 100644 --- a/openapi_python_client/templates/README.md.jinja +++ b/openapi_python_client/templates/README.md.jinja @@ -109,7 +109,7 @@ client = Client( client.set_httpx_client(httpx.Client(base_url="https://api.example.com", proxies="http://localhost:8030")) ``` -{% if poetry %} +{% if meta == "poetry" %} ## Building / publishing this package This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: 1. Update the metadata in pyproject.toml (e.g. authors, version) @@ -123,4 +123,17 @@ If you want to install this client into another project without publishing it (e 1. If that project is not using Poetry: 1. Build a wheel with `poetry build -f wheel` 1. Install that wheel from the other project `pip install ` +{% elif meta == 'uv' %} +## Building / publishing this package +This project uses [uv](https://github.com/astral-sh/uv) to manage dependencies and packaging. Here are the basics: +1. Update the metadata in `pyproject.toml` (e.g. authors, version). +2. If you're using a private repository: https://docs.astral.sh/uv/guides/integration/alternative-indexes/ +3. Build a distribution with `uv build`, builds `sdist` and `wheel` by default. +1. Publish the client with `uv publish`, see documentation for publishing to private indexes. + +If you want to install this client into another project without publishing it (e.g. for development) then: +1. If that project **is using uv**, you can simply do `uv add ` from that project +1. If that project is not using uv: + 1. Build a wheel with `uv build --wheel`. + 1. Install that wheel from the other project `pip install `. {% endif %} \ No newline at end of file diff --git a/openapi_python_client/templates/client.py.jinja b/openapi_python_client/templates/client.py.jinja index 132d765fb..90c90a210 100644 --- a/openapi_python_client/templates/client.py.jinja +++ b/openapi_python_client/templates/client.py.jinja @@ -1,10 +1,35 @@ import ssl -from typing import Any, Dict, Union, Optional +from typing import Any from attrs import define, field, evolve import httpx +{% set attrs_info = { + "raise_on_unexpected_status": namespace( + type="bool", + default="field(default=False, kw_only=True)", + docstring="Whether or not to raise an errors.UnexpectedStatus if the API returns a status code" + " that was not documented in the source OpenAPI document. Can also be provided as a keyword" + " argument to the constructor." + ), + "token": namespace(type="str", default="", docstring="The token to use for authentication"), + "prefix": namespace(type="str", default='"Bearer"', docstring="The prefix to use for the Authorization header"), + "auth_header_name": namespace(type="str", default='"Authorization"', docstring="The name of the Authorization header"), +} %} + +{% macro attr_in_class_docstring(name) %} +{{ name }}: {{ attrs_info[name].docstring }} +{%- endmacro %} + +{% macro declare_attr(name) %} +{% set attr = attrs_info[name] %} +{{ name }}: {{ attr.type }}{% if attr.default %} = {{ attr.default }}{% endif %} +{% if attr.docstring and config.docstrings_on_attributes +%} +"""{{ attr.docstring }}""" +{%- endif %} +{% endmacro %} + @define class Client: """A class for keeping track of data related to the API @@ -29,26 +54,26 @@ class Client: ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. {% endmacro %} {{ httpx_args_docstring() }} +{% if not config.docstrings_on_attributes %} Attributes: - raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a - status code that was not documented in the source OpenAPI document. Can also be provided as a keyword - argument to the constructor. + {{ attr_in_class_docstring("raise_on_unexpected_status") | wordwrap(101) | indent(12) }} +{% endif %} """ {% macro attributes() %} - raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str - _cookies: Dict[str, str] = field(factory=dict, kw_only=True) - _headers: Dict[str, str] = field(factory=dict, kw_only=True) - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True) - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True) - _follow_redirects: bool = field(default=False, kw_only=True) - _httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True) - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + {{ declare_attr("raise_on_unexpected_status") | indent(4) }} + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: httpx.Timeout | None = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: str | bool | ssl.SSLContext = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: httpx.Client | None = field(default=None, init=False) + _async_client: httpx.AsyncClient | None = field(default=None, init=False) {% endmacro %}{{ attributes() }} {% macro builders(self) %} - def with_headers(self, headers: Dict[str, str]) -> "{{ self }}": + def with_headers(self, headers: dict[str, str]) -> "{{ self }}": """Get a new client matching this one with additional headers""" if self._client is not None: self._client.headers.update(headers) @@ -56,7 +81,7 @@ class Client: self._async_client.headers.update(headers) return evolve(self, headers={**self._headers, **headers}) - def with_cookies(self, cookies: Dict[str, str]) -> "{{ self }}": + def with_cookies(self, cookies: dict[str, str]) -> "{{ self }}": """Get a new client matching this one with additional cookies""" if self._client is not None: self._client.cookies.update(cookies) @@ -65,7 +90,7 @@ class Client: return evolve(self, cookies={**self._cookies, **cookies}) def with_timeout(self, timeout: httpx.Timeout) -> "{{ self }}": - """Get a new client matching this one with a new timeout (in seconds)""" + """Get a new client matching this one with a new timeout configuration""" if self._client is not None: self._client.timeout = timeout if self._async_client is not None: @@ -74,7 +99,7 @@ class Client: {% endmacro %}{{ builders("Client") }} {% macro httpx_stuff(name, custom_constructor=None) %} def set_httpx_client(self, client: httpx.Client) -> "{{ name }}": - """Manually the underlying httpx.Client + """Manually set the underlying httpx.Client **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -108,7 +133,7 @@ class Client: self.get_httpx_client().__exit__(*args, **kwargs) def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "{{ name }}": - """Manually the underlying httpx.AsyncClient + """Manually set the underlying httpx.AsyncClient **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. """ @@ -147,20 +172,20 @@ class AuthenticatedClient: """A Client which has been authenticated for use on secured endpoints {{ httpx_args_docstring() }} +{% if not config.docstrings_on_attributes %} Attributes: - raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a - status code that was not documented in the source OpenAPI document. Can also be provided as a keyword - argument to the constructor. - token: The token to use for authentication - prefix: The prefix to use for the Authorization header - auth_header_name: The name of the Authorization header + {{ attr_in_class_docstring("raise_on_unexpected_status") | wordwrap(101) | indent(12) }} + {{ attr_in_class_docstring("token") | indent(8) }} + {{ attr_in_class_docstring("prefix") | indent(8) }} + {{ attr_in_class_docstring("auth_header_name") | indent(8) }} +{% endif %} """ {{ attributes() }} - token: str - prefix: str = "Bearer" - auth_header_name: str = "Authorization" + {{ declare_attr("token") | indent(4) }} + {{ declare_attr("prefix") | indent(4) }} + {{ declare_attr("auth_header_name") | indent(4) }} {{ builders("AuthenticatedClient") }} -{{ httpx_stuff("AuthenticatedClient", "self._headers[self.auth_header_name] = f\"{self.prefix} {self.token}\" if self.prefix else self.token") }} \ No newline at end of file +{{ httpx_stuff("AuthenticatedClient", "self._headers[self.auth_header_name] = f\"{self.prefix} {self.token}\" if self.prefix else self.token") }} diff --git a/openapi_python_client/templates/endpoint_init.py.jinja b/openapi_python_client/templates/endpoint_init.py.jinja index e69de29bb..c9921b5fd 100644 --- a/openapi_python_client/templates/endpoint_init.py.jinja +++ b/openapi_python_client/templates/endpoint_init.py.jinja @@ -0,0 +1 @@ +""" Contains endpoint functions for accessing the API """ diff --git a/openapi_python_client/templates/endpoint_macros.py.jinja b/openapi_python_client/templates/endpoint_macros.py.jinja index da02b5c4a..9688a4ba2 100644 --- a/openapi_python_client/templates/endpoint_macros.py.jinja +++ b/openapi_python_client/templates/endpoint_macros.py.jinja @@ -3,7 +3,7 @@ {% macro header_params(endpoint) %} {% if endpoint.header_parameters or endpoint.bodies | length > 0 %} -headers: Dict[str, Any] = {} +headers: dict[str, Any] = {} {% if endpoint.header_parameters %} {% for parameter in endpoint.header_parameters %} {% import "property_templates/" + parameter.template as param_template %} @@ -37,7 +37,7 @@ if {{ parameter.python_name }} is not UNSET: {% macro query_params(endpoint) %} {% if endpoint.query_parameters %} -params: Dict[str, Any] = {} +params: dict[str, Any] = {} {% for property in endpoint.query_parameters %} {% set destination = property.python_name %} @@ -58,33 +58,46 @@ params = {k: v for k, v in params.items() if v is not UNSET and v is not None} {% endif %} {% endmacro %} -{% macro body_to_kwarg(body, destination) %} +{% macro body_to_kwarg(body) %} {% if body.body_type == "data" %} -{{ destination }} = body.to_dict() + {% if body.prop.required %} +_kwargs["data"] = body.to_dict() + {% else %} +if not isinstance(body, Unset): + _kwargs["data"] = body.to_dict() + {% endif %} {% elif body.body_type == "files"%} -{{ multipart_body(body, destination) }} +{{ multipart_body(body) }} {% elif body.body_type == "json" %} -{{ json_body(body, destination) }} +{{ json_body(body) }} {% elif body.body_type == "content" %} -{{ destination }} = body.payload + {% if body.prop.required %} +_kwargs["content"] = body.payload + {% else %} +if not isinstance(body, Unset): + _kwargs["content"] = body.payload + {% endif %} {% endif %} {% endmacro %} -{% macro json_body(body, destination) %} +{% macro json_body(body) %} {% set property = body.prop %} {% import "property_templates/" + property.template as prop_template %} {% if prop_template.transform %} -{{ prop_template.transform(property, property.python_name, destination) }} +{{ prop_template.transform(property, property.python_name, "_kwargs[\"json\"]", skip_unset=True, declare_type=False) }} +{% elif property.required %} +_kwargs["json"] = {{ property.python_name }} {% else %} -{{ destination }} = {{ property.python_name }} +if not isinstance({{property.python_name}}, Unset): + _kwargs["json"] = {{ property.python_name }} {% endif %} {% endmacro %} -{% macro multipart_body(body, destination) %} +{% macro multipart_body(body) %} {% set property = body.prop %} {% import "property_templates/" + property.template as prop_template %} -{% if prop_template.transform_multipart %} -{{ prop_template.transform_multipart(property, property.python_name, destination) }} +{% if prop_template.transform_multipart_body %} +{{ prop_template.transform_multipart_body(property) }} {% endif %} {% endmacro %} @@ -102,18 +115,18 @@ params = {k: v for k, v in params.items() if v is not UNSET and v is not None} {% if endpoint.requires_security %} client: AuthenticatedClient, {% else %} -client: Union[AuthenticatedClient, Client], +client: AuthenticatedClient | Client, {% endif %} {% endif %} {# Any allowed bodies #} {% if endpoint.bodies | length == 1 %} -body: {{ endpoint.bodies[0].prop.get_type_string() }}, +body: {{ endpoint.bodies[0].prop.get_type_string() }}{% if not endpoint.bodies[0].prop.required %} = UNSET{% endif %}, {% elif endpoint.bodies | length > 1 %} -body: Union[ - {% for body in endpoint.bodies %} - {{ body.prop.get_type_string() }}, - {% endfor %} -], +body: + {%- for body in endpoint.bodies -%}{% set body_required = body_required and body.prop.required %} + {{ body.prop.get_type_string(no_optional=True) }} {% if not loop.last %} | {% endif %} + {%- endfor -%}{% if not body_required %} | Unset = UNSET{% endif %} +, {% endif %} {# query parameters #} {% for parameter in endpoint.query_parameters %} @@ -184,3 +197,18 @@ Returns: {% macro docstring(endpoint, return_string, is_detailed) %} {{ safe_docstring(docstring_content(endpoint, return_string, is_detailed)) }} {% endmacro %} + +{% macro parse_response(parsed_responses, response) %} +{% if parsed_responses %}{% import "property_templates/" + response.prop.template as prop_template %} +{% if prop_template.construct %} +{{ prop_template.construct(response.prop, response.source.attribute) }} +{% elif response.source.return_type == response.prop.get_type_string() %} +{{ response.prop.python_name }} = {{ response.source.attribute }} +{% else %} +{{ response.prop.python_name }} = cast({{ response.prop.get_type_string() }}, {{ response.source.attribute }}) +{% endif %} +return {{ response.prop.python_name }} +{% else %} +return None +{% endif %} +{% endmacro %} diff --git a/openapi_python_client/templates/endpoint_module.py.jinja b/openapi_python_client/templates/endpoint_module.py.jinja index 4db1c3546..23fd2a40d 100644 --- a/openapi_python_client/templates/endpoint_module.py.jinja +++ b/openapi_python_client/templates/endpoint_module.py.jinja @@ -1,5 +1,6 @@ from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, cast +from urllib.parse import quote import httpx @@ -7,7 +8,7 @@ from ...client import AuthenticatedClient, Client from ...types import Response, UNSET from ... import errors -{% for relative in endpoint.relative_imports %} +{% for relative in endpoint.relative_imports | sort %} {{ relative }} {% endfor %} @@ -19,19 +20,19 @@ from ... import errors def _get_kwargs( {{ arguments(endpoint, include_client=False) | indent(4) }} -) -> Dict[str, Any]: +) -> dict[str, Any]: {{ header_params(endpoint) | indent(4) }} {{ cookie_params(endpoint) | indent(4) }} {{ query_params(endpoint) | indent(4) }} - _kwargs: Dict[str, Any] = { + _kwargs: dict[str, Any] = { "method": "{{ endpoint.method }}", {% if endpoint.path_parameters %} "url": "{{ endpoint.path }}".format( {%- for parameter in endpoint.path_parameters -%} - {{parameter.python_name}}={{parameter.python_name}}, + {{parameter.python_name}}=quote(str({{parameter.python_name}}), safe=""), {%- endfor -%} ), {% else %} @@ -47,16 +48,13 @@ def _get_kwargs( {% if endpoint.bodies | length > 1 %} {% for body in endpoint.bodies %} - if isinstance(body, {{body.prop.get_type_string() }}): - {% set destination = "_" + body.body_type + "_body" %} - {{ body_to_kwarg(body, destination) | indent(8) }} - _kwargs["{{ body.body_type.value }}"] = {{ destination }} + if isinstance(body, {{body.prop.get_type_string(no_optional=True) }}): + {{ body_to_kwarg(body) | indent(8) }} headers["Content-Type"] = "{{ body.content_type }}" {% endfor %} {% elif endpoint.bodies | length == 1 %} {% set body = endpoint.bodies[0] %} - {{ body_to_kwarg(body, "_body") | indent(4) }} - _kwargs["{{ body.body_type.value }}"] = _body + {{ body_to_kwarg(body) | indent(4) }} {% if body.content_type != "multipart/form-data" %}{# Need httpx to set the boundary automatically #} headers["Content-Type"] = "{{ body.content_type }}" {% endif %} @@ -67,30 +65,34 @@ def _get_kwargs( {% endif %} return _kwargs +{% if endpoint.responses.default %} + {% set return_type = return_string %} +{% else %} + {% set return_type = return_string + " | None" %} +{% endif %} -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[{{ return_string }}]: - {% for response in endpoint.responses %} - if response.status_code == HTTPStatus.{{ response.status_code.name }}: - {% if parsed_responses %}{% import "property_templates/" + response.prop.template as prop_template %} - {% if prop_template.construct %} - {{ prop_template.construct(response.prop, response.source.attribute) | indent(8) }} - {% elif response.source.return_type == response.prop.get_type_string() %} - {{ response.prop.python_name }} = {{ response.source.attribute }} - {% else %} - {{ response.prop.python_name }} = cast({{ response.prop.get_type_string() }}, {{ response.source.attribute }}) - {% endif %} - return {{ response.prop.python_name }} - {% else %} - return None - {% endif %} + +def _parse_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> {{return_type}}: + {% for response in endpoint.responses.patterns %} + {% set code_range = response.status_code.range %} + {% if code_range[0] == code_range[1] %} + if response.status_code == {{ code_range[0] }}: + {% else %} + if {{ code_range[0] }} <= response.status_code <= {{ code_range[1] }}: + {% endif %} + {{ parse_response(parsed_responses, response) | indent(8) }} {% endfor %} + {% if endpoint.responses.default %} + {{ parse_response(parsed_responses, endpoint.responses.default) | indent(4) }} + {% else %} if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(response.status_code, response.content) else: return None + {% endif %} -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[{{ return_string }}]: +def _build_response(*, client: AuthenticatedClient | Client, response: httpx.Response) -> Response[{{ return_string }}]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, @@ -117,7 +119,7 @@ def sync_detailed( {% if parsed_responses %} def sync( {{ arguments(endpoint) | indent(4) }} -) -> Optional[{{ return_string }}]: +) -> {{ return_string }} | None: {{ docstring(endpoint, return_string, is_detailed=false) | indent(4) }} return sync_detailed( @@ -143,7 +145,7 @@ async def asyncio_detailed( {% if parsed_responses %} async def asyncio( {{ arguments(endpoint) | indent(4) }} -) -> Optional[{{ return_string }}]: +) -> {{ return_string }} | None: {{ docstring(endpoint, return_string, is_detailed=false) | indent(4) }} return (await asyncio_detailed( diff --git a/openapi_python_client/templates/helpers.jinja b/openapi_python_client/templates/helpers.jinja index 180613c02..fd5c3ec86 100644 --- a/openapi_python_client/templates/helpers.jinja +++ b/openapi_python_client/templates/helpers.jinja @@ -1,8 +1,10 @@ -{% macro safe_docstring(content) %} +{% macro safe_docstring(content, omit_if_empty=False) %} {# This macro returns the provided content as a docstring, set to a raw string if it contains a backslash #} +{% if (not omit_if_empty) or (content | trim) %} {% if '\\' in content -%} r""" {{ content }} """ {%- else -%} """ {{ content }} """ {%- endif -%} +{% endif %} {% endmacro %} \ No newline at end of file diff --git a/openapi_python_client/templates/literal_enum.py.jinja b/openapi_python_client/templates/literal_enum.py.jinja new file mode 100644 index 000000000..72207efa3 --- /dev/null +++ b/openapi_python_client/templates/literal_enum.py.jinja @@ -0,0 +1,10 @@ +from typing import Literal, cast + +{{ enum.class_info.name }} = Literal{{ "%r" | format(enum.values|list|sort) }} + +{{ enum.get_class_name_snake_case() | upper }}_VALUES: set[{{ enum.class_info.name }}] = { {% for v in enum.values|list|sort %}{{"%r"|format(v)}}, {% endfor %} } + +def check_{{ enum.get_class_name_snake_case() }}(value: {{ enum.get_instance_type_string() }}) -> {{ enum.class_info.name}}: + if value in {{ enum.get_class_name_snake_case() | upper }}_VALUES: + return cast({{enum.class_info.name}}, value) + raise TypeError(f"Unexpected value {value!r}. Expected one of {{"{"}}{{ enum.get_class_name_snake_case() | upper }}_VALUES!r}") diff --git a/openapi_python_client/templates/model.py.jinja b/openapi_python_client/templates/model.py.jinja index 0df641ea4..f9ac5e3e2 100644 --- a/openapi_python_client/templates/model.py.jinja +++ b/openapi_python_client/templates/model.py.jinja @@ -1,23 +1,22 @@ -from typing import Any, Dict, Type, TypeVar, Tuple, Optional, BinaryIO, TextIO, TYPE_CHECKING +from __future__ import annotations -{% if model.additional_properties %} -from typing import List - -{% endif %} +from collections.abc import Mapping +from typing import Any, TypeVar, BinaryIO, TextIO, TYPE_CHECKING, Generator from attrs import define as _attrs_define from attrs import field as _attrs_field {% if model.is_multipart_body %} import json +from .. import types {% endif %} from ..types import UNSET, Unset -{% for relative in model.relative_imports %} +{% for relative in model.relative_imports | sort %} {{ relative }} {% endfor %} -{% for lazy_import in model.lazy_imports %} +{% for lazy_import in model.lazy_imports | sort %} {% if loop.first %} if TYPE_CHECKING: {% endif %} @@ -26,7 +25,7 @@ if TYPE_CHECKING: {% if model.additional_properties %} -{% set additional_property_type = 'Any' if model.additional_properties == True else model.additional_properties.get_type_string(quoted=not model.additional_properties.is_base_type) %} +{% set additional_property_type = 'Any' if model.additional_properties == True else model.additional_properties.get_type_string() %} {% endif %} {% set class_name = model.class_info.name %} @@ -52,63 +51,79 @@ T = TypeVar("T", bound="{{ class_name }}") {{ model.example | string | wordwrap(112) | indent(12) }} {% endif %} - {% if model.required_properties or model.optional_properties %} + {% if (not config.docstrings_on_attributes) and (model.required_properties or model.optional_properties) %} Attributes: {% for property in model.required_properties + model.optional_properties %} {{ property.to_docstring() | wordwrap(112) | indent(12) }} {% endfor %}{% endif %} {% endmacro %} +{% macro declare_property(property) %} +{%- if config.docstrings_on_attributes and property.description -%} +{{ property.to_string() }} +{{ safe_docstring(property.description, omit_if_empty=True) | wordwrap(112) }} +{%- else -%} +{{ property.to_string() }} +{%- endif -%} +{% endmacro %} + @_attrs_define class {{ class_name }}: - {{ safe_docstring(class_docstring_content(model)) | indent(4) }} + {{ safe_docstring(class_docstring_content(model), omit_if_empty=config.docstrings_on_attributes) | indent(4) }} {% for property in model.required_properties + model.optional_properties %} {% if property.default is none and property.required %} - {{ property.to_string() }} + {{ declare_property(property) | indent(4) }} {% endif %} {% endfor %} {% for property in model.required_properties + model.optional_properties %} {% if property.default is not none or not property.required %} - {{ property.to_string() }} + {{ declare_property(property) | indent(4) }} {% endif %} {% endfor %} {% if model.additional_properties %} - additional_properties: Dict[str, {{ additional_property_type }}] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, {{ additional_property_type }}] = _attrs_field(init=False, factory=dict) {% endif %} -{% macro _to_dict(multipart=False) %} -{% for property in model.required_properties + model.optional_properties %} +{% macro _transform_property(property, content) %} {% import "property_templates/" + property.template as prop_template %} -{% if prop_template.transform %} -{{ prop_template.transform(property, "self." + property.python_name, property.python_name, multipart=multipart) }} -{% elif multipart %} -{{ property.python_name }} = self.{{ property.python_name }} if isinstance(self.{{ property.python_name }}, Unset) else (None, str(self.{{ property.python_name }}).encode(), "text/plain") +{%- if prop_template.transform -%} +{{ prop_template.transform(property=property, source=content, destination=property.python_name) }} +{%- else -%} +{{ property.python_name }} = {{ content }} +{%- endif -%} +{% endmacro %} + +{% macro multipart(property, source, destination) %} +{% import "property_templates/" + property.template as prop_template %} +{% if not property.required %} +if not isinstance({{source}}, Unset): + {{ prop_template.multipart(property, source, destination) | indent(4) }} {% else %} -{{ property.python_name }} = self.{{ property.python_name }} +{{ prop_template.multipart(property, source, destination) }} {% endif %} +{% endmacro %} -{% endfor %} - -field_dict: Dict[str, Any] = {} +{% macro _prepare_field_dict() %} +field_dict: dict[str, Any] = {} {% if model.additional_properties %} -{% if model.additional_properties.template %}{# Can be a bool instead of an object #} - {% import "property_templates/" + model.additional_properties.template as prop_template %} -{% else %} - {% set prop_template = None %} -{% endif %} -{% if prop_template and prop_template.transform %} +{% import "property_templates/" + model.additional_properties.template as prop_template %} +{% if prop_template.transform %} for prop_name, prop in self.additional_properties.items(): - {{ prop_template.transform(model.additional_properties, "prop", "field_dict[prop_name]", multipart=multipart, declare_type=false) | indent(4) }} -{% elif multipart %} -field_dict.update({ - key: (None, str(value).encode(), "text/plain") - for key, value in self.additional_properties.items() -}) + {{ prop_template.transform(model.additional_properties, "prop", "field_dict[prop_name]", declare_type=false) | indent(4) }} {% else %} field_dict.update(self.additional_properties) -{% endif %} -{% endif %} +{%- endif -%} +{%- endif -%} +{% endmacro %} + +{% macro _to_dict() %} +{% for property in model.required_properties + model.optional_properties -%} +{{ _transform_property(property, "self." + property.python_name) }} + +{% endfor %} + +{{ _prepare_field_dict() }} {% if model.required_properties | length > 0 or model.optional_properties | length > 0 %} field_dict.update({ {% for property in model.required_properties + model.optional_properties %} @@ -128,23 +143,41 @@ if {{ property.python_name }} is not UNSET: return field_dict {% endmacro %} - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: {% for lazy_import in model.lazy_imports %} {{ lazy_import }} {% endfor %} {{ _to_dict() | indent(8) }} {% if model.is_multipart_body %} - def to_multipart(self) -> Dict[str, Any]: - {{ _to_dict(multipart=True) | indent(8) }} + def to_multipart(self) -> types.RequestFiles: + {% for lazy_import in model.lazy_imports %} + {{ lazy_import }} + {% endfor %} + files: types.RequestFiles = [] + + {% for property in model.required_properties + model.optional_properties %} + {% set destination = "\"" + property.name + "\"" %} + {{ multipart(property, "self." + property.python_name, destination) | indent(8) }} + + {% endfor %} + + {% if model.additional_properties %} + for prop_name, prop in self.additional_properties.items(): + {{ multipart(model.additional_properties, "prop", "prop_name") | indent(4) }} + {% endif %} + + return files + {% endif %} @classmethod - def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: - {% for lazy_import in model.lazy_imports %} + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + {% for lazy_import in model.lazy_imports | sort %} {{ lazy_import }} {% endfor %} - d = src_dict.copy() +{% if (model.required_properties or model.optional_properties or model.additional_properties) %} + d = dict(src_dict) {% for property in model.required_properties + model.optional_properties %} {% if property.required %} {% set property_source = 'd.pop("' + property.name + '")' %} @@ -159,6 +192,7 @@ return field_dict {% endif %} {% endfor %} +{% endif %} {{ module_name }} = cls( {% for property in model.required_properties + model.optional_properties %} {{ property.python_name }}={{ property.python_name }}, @@ -192,7 +226,7 @@ return field_dict {% if model.additional_properties %} @property - def additional_keys(self) -> List[str]: + def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> {{ additional_property_type }}: diff --git a/openapi_python_client/templates/property_templates/any_property.py.jinja b/openapi_python_client/templates/property_templates/any_property.py.jinja index e69de29bb..ad3f195a4 100644 --- a/openapi_python_client/templates/property_templates/any_property.py.jinja +++ b/openapi_python_client/templates/property_templates/any_property.py.jinja @@ -0,0 +1,3 @@ +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{source}}).encode(), "text/plain"))) +{% endmacro %} \ No newline at end of file diff --git a/openapi_python_client/templates/property_templates/boolean_property.py.jinja b/openapi_python_client/templates/property_templates/boolean_property.py.jinja index 3b16b7d20..e2c3392a1 100644 --- a/openapi_python_client/templates/property_templates/boolean_property.py.jinja +++ b/openapi_python_client/templates/property_templates/boolean_property.py.jinja @@ -1,3 +1,7 @@ {% macro transform_header(source) %} "true" if {{ source }} else "false" {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{source}}).encode(), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/const_property.py.jinja b/openapi_python_client/templates/property_templates/const_property.py.jinja new file mode 100644 index 000000000..d348de0ff --- /dev/null +++ b/openapi_python_client/templates/property_templates/const_property.py.jinja @@ -0,0 +1,9 @@ +{% macro construct(property, source) %} +{{ property.python_name }} = cast({{ property.get_type_string() }} , {{ source }}) +if {{ property.python_name }} != {{ property.value.python_code }}{% if not property.required %}and not isinstance({{ property.python_name }}, Unset){% endif %}: + raise ValueError(f"{{ property.name }} must match const {{ property.value.python_code }}, got '{{'{' + property.python_name + '}' }}'") +{%- endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, {{ source }}, "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/date_property.py.jinja b/openapi_python_client/templates/property_templates/date_property.py.jinja index 5a3fdeafc..8659ab729 100644 --- a/openapi_python_client/templates/property_templates/date_property.py.jinja +++ b/openapi_python_client/templates/property_templates/date_property.py.jinja @@ -10,22 +10,24 @@ isoparse({{ source }}).date() {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, str){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% set transformed = source + ".isoformat()" %} -{% if multipart %}{# Multipart data must be bytes, not str #} -{% set transformed = transformed + ".encode()" %} -{% endif %} {% if property.required %} {{ destination }} = {{ transformed }} {%- else %} -{% if declare_type %} -{% set type_annotation = property.get_type_string(json=True) %} -{% if multipart %}{% set type_annotation = type_annotation | replace("str", "bytes") %}{% endif %} +{% if not skip_unset %} + {% if declare_type %} + {% set type_annotation = property.get_type_string(json=True) %} {{ destination }}: {{ type_annotation }} = UNSET -{% else %} + {% else %} {{ destination }} = UNSET + {% endif %} {% endif %} if not isinstance({{ source }}, Unset): {{ destination }} = {{ transformed }} {%- endif %} {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, {{ source }}.isoformat().encode(), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/datetime_property.py.jinja b/openapi_python_client/templates/property_templates/datetime_property.py.jinja index 2ff54f4dc..1759e593e 100644 --- a/openapi_python_client/templates/property_templates/datetime_property.py.jinja +++ b/openapi_python_client/templates/property_templates/datetime_property.py.jinja @@ -10,22 +10,24 @@ isoparse({{ source }}) {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, str){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% set transformed = source + ".isoformat()" %} -{% if multipart %}{# Multipart data must be bytes, not str #} -{% set transformed = transformed + ".encode()" %} -{% endif %} {% if property.required %} {{ destination }} = {{ transformed }} {%- else %} -{% if declare_type %} -{% set type_annotation = property.get_type_string(json=True) %} -{% if multipart %}{% set type_annotation = type_annotation | replace("str", "bytes") %}{% endif %} + {% if not skip_unset %} + {% if declare_type %} + {% set type_annotation = property.get_type_string(json=True) %} {{ destination }}: {{ type_annotation }} = UNSET -{% else %} + {% else %} {{ destination }} = UNSET -{% endif %} + {% endif %} + {% endif %} if not isinstance({{ source }}, Unset): {{ destination }} = {{ transformed }} {%- endif %} {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, {{ source }}.isoformat().encode(), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/enum_property.py.jinja b/openapi_python_client/templates/property_templates/enum_property.py.jinja index d01137f03..19ea03b28 100644 --- a/openapi_python_client/templates/property_templates/enum_property.py.jinja +++ b/openapi_python_client/templates/property_templates/enum_property.py.jinja @@ -10,22 +10,22 @@ {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, {{ property.value_type.__name__ }}){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% set transformed = source + ".value" %} {% set type_string = property.get_type_string(json=True) %} -{% if multipart %} - {% set transformed = "(None, str(" + transformed + ").encode(), \"text/plain\")" %} - {% set type_string = "Union[Unset, Tuple[None, bytes, str]]" %} -{% endif %} {% if property.required %} {{ destination }} = {{ transformed }} {%- else %} -{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET +{% if not skip_unset %}{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET{% endif +%} if not isinstance({{ source }}, Unset): {{ destination }} = {{ transformed }} {% endif %} {% endmacro %} +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{ source }}.value).encode(), "text/plain"))) +{% endmacro %} + {% macro transform_header(source) %} str({{ source }}) {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/file_property.py.jinja b/openapi_python_client/templates/property_templates/file_property.py.jinja index c19a068c5..ac76470a7 100644 --- a/openapi_python_client/templates/property_templates/file_property.py.jinja +++ b/openapi_python_client/templates/property_templates/file_property.py.jinja @@ -12,12 +12,16 @@ File( {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, bytes){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% if property.required %} {{ destination }} = {{ source }}.to_tuple() {% else %} -{{ destination }}{% if declare_type %}: {{ property.get_type_string(json=True) }}{% endif %} = UNSET +{% if not skip_unset %}{{ destination }}{% if declare_type %}: {{ property.get_type_string(json=True) }}{% endif %} = UNSET{% endif +%} if not isinstance({{ source }}, Unset): {{ destination }} = {{ source }}.to_tuple() {% endif %} {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, {{ source }}.to_tuple())) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/float_property.py.jinja b/openapi_python_client/templates/property_templates/float_property.py.jinja index 0d433c22e..dc982cb68 100644 --- a/openapi_python_client/templates/property_templates/float_property.py.jinja +++ b/openapi_python_client/templates/property_templates/float_property.py.jinja @@ -1,3 +1,7 @@ {% macro transform_header(source) %} str({{ source }}) {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{source}}).encode(), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/int_property.py.jinja b/openapi_python_client/templates/property_templates/int_property.py.jinja index 0d433c22e..dc982cb68 100644 --- a/openapi_python_client/templates/property_templates/int_property.py.jinja +++ b/openapi_python_client/templates/property_templates/int_property.py.jinja @@ -1,3 +1,7 @@ {% macro transform_header(source) %} str({{ source }}) {% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{source}}).encode(), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/list_property.py.jinja b/openapi_python_client/templates/property_templates/list_property.py.jinja index 0e5d1b5e3..e765ed3b2 100644 --- a/openapi_python_client/templates/property_templates/list_property.py.jinja +++ b/openapi_python_client/templates/property_templates/list_property.py.jinja @@ -3,26 +3,28 @@ {% import "property_templates/" + inner_property.template as inner_template %} {% if inner_template.construct %} {% set inner_source = inner_property.python_name + "_data" %} +{% if property.required %} {{ property.python_name }} = [] _{{ property.python_name }} = {{ source }} -{% if property.required %} for {{ inner_source }} in (_{{ property.python_name }}): -{% else %} -for {{ inner_source }} in (_{{ property.python_name }} or []): -{% endif %} {{ inner_template.construct(inner_property, inner_source) | indent(4) }} {{ property.python_name }}.append({{ inner_property.python_name }}) {% else %} +_{{ property.python_name }} = {{ source }} +{{ property.python_name }}: {{ property.get_type_string() }} = UNSET +if _{{ property.python_name }} is not UNSET: + {{ property.python_name }} = [] + for {{ inner_source }} in _{{ property.python_name }}: + {{ inner_template.construct(inner_property, inner_source) | indent(8) }} + {{ property.python_name }}.append({{ inner_property.python_name }}) +{% endif %} +{% else %} {{ property.python_name }} = cast({{ property.get_type_string(no_optional=True) }}, {{ source }}) {% endif %} {% endmacro %} -{% macro _transform(property, source, destination, multipart, transform_method) %} +{% macro _transform(property, source, destination, transform_method) %} {% set inner_property = property.inner_property %} -{% if multipart %} -{% set multipart_destination = destination %} -{% set destination = "_temp_" + destination %} -{% endif %} {% import "property_templates/" + inner_property.template as inner_template %} {% if inner_template.transform %} {% set inner_source = inner_property.python_name + "_data" %} @@ -33,31 +35,26 @@ for {{ inner_source }} in {{ source }}: {% else %} {{ destination }} = {{ source }} {% endif %} -{% if multipart %} -{{ multipart_destination }} = (None, json.dumps({{ destination }}).encode(), 'application/json') -{% endif %} {% endmacro %} {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, list){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False, transform_method="to_dict") %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% set inner_property = property.inner_property %} -{% if multipart %} - {% set type_string = "Union[Unset, Tuple[None, bytes, str]]" %} -{% else %} - {% set type_string = property.get_type_string(json=True) %} -{% endif %} +{% set type_string = property.get_type_string(json=True) %} {% if property.required %} -{{ _transform(property, source, destination, multipart, transform_method) }} +{{ _transform(property, source, destination, "to_dict") }} {% else %} -{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET +{% if not skip_unset %}{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET{% endif +%} if not isinstance({{ source }}, Unset): - {{ _transform(property, source, destination, multipart, transform_method) | indent(4)}} + {{ _transform(property, source, destination, "to_dict") | indent(4)}} {% endif %} - - {% endmacro %} -{% macro transform_multipart(property, source, destination) %} -{{ transform(property, source, destination, transform_method="to_multipart") }} +{% macro multipart(property, source, destination) %} +{% set inner_property = property.inner_property %} +{% import "property_templates/" + inner_property.template as inner_template %} +{% set inner_source = inner_property.python_name + "_element" %} +for {{ inner_source }} in {{ source }}: + {{ inner_template.multipart(inner_property, inner_source, destination) | indent(4) }} {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/literal_enum_property.py.jinja b/openapi_python_client/templates/property_templates/literal_enum_property.py.jinja new file mode 100644 index 000000000..a0dd3d19b --- /dev/null +++ b/openapi_python_client/templates/property_templates/literal_enum_property.py.jinja @@ -0,0 +1,30 @@ +{% macro construct_function(property, source) %} +check_{{ property.get_class_name_snake_case() }}({{ source }}) +{% endmacro %} + +{% from "property_templates/property_macros.py.jinja" import construct_template %} + +{% macro construct(property, source) %} +{{ construct_template(construct_function, property, source) }} +{% endmacro %} + +{% macro check_type_for_construct(property, source) %}isinstance({{ source }}, {{ property.get_instance_type_string() }}){% endmacro %} + +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} +{% set type_string = property.get_type_string(json=True) %} +{% if property.required %} +{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = {{ source }} +{%- else %} +{% if not skip_unset %}{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET{% endif +%} +if not isinstance({{ source }}, Unset): + {{ destination }} = {{ source }} +{% endif %} +{% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{ source }}).encode(), "text/plain"))) +{% endmacro %} + +{% macro transform_header(source) %} +str({{ source }}) +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/model_property.py.jinja b/openapi_python_client/templates/property_templates/model_property.py.jinja index e7f779563..a442ccb70 100644 --- a/openapi_python_client/templates/property_templates/model_property.py.jinja +++ b/openapi_python_client/templates/property_templates/model_property.py.jinja @@ -10,23 +10,29 @@ {% macro check_type_for_construct(property, source) %}isinstance({{ source }}, dict){% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False, transform_method="to_dict") %} -{% set transformed = source + "." + transform_method + "()" %} -{% if multipart %} - {% set transformed = "(None, json.dumps(" + transformed + ").encode(), 'application/json')" %} - {% set type_string = property.get_type_string(multipart=True) %} -{% else %} - {% set type_string = property.get_type_string(json=True) %} -{% endif %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} +{% set transformed = source + ".to_dict()" %} +{% set type_string = property.get_type_string(json=True) %} {% if property.required %} {{ destination }} = {{ transformed }} {%- else %} -{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET +{% if not skip_unset %}{{ destination }}{% if declare_type %}: {{ type_string }}{% endif %} = UNSET{% endif %} + if not isinstance({{ source }}, Unset): {{ destination }} = {{ transformed }} {%- endif %} {% endmacro %} -{% macro transform_multipart(property, source, destination) %} -{{ transform(property, source, destination, transform_method="to_multipart") }} +{% macro transform_multipart_body(property) %} +{% set transformed = property.python_name + ".to_multipart()" %} +{% if property.required %} +_kwargs["files"] = {{ transformed }} +{%- else %} +if not isinstance({{ property.python_name }}, Unset): + _kwargs["files"] = {{ transformed }} +{%- endif %} +{% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, json.dumps( {{source}}.to_dict()).encode(), "application/json"))) {% endmacro %} diff --git a/openapi_python_client/templates/property_templates/union_property.py.jinja b/openapi_python_client/templates/property_templates/union_property.py.jinja index b8ab1962d..f4fd19f34 100644 --- a/openapi_python_client/templates/property_templates/union_property.py.jinja +++ b/openapi_python_client/templates/property_templates/union_property.py.jinja @@ -1,10 +1,10 @@ {% macro construct(property, source) %} def _parse_{{ property.python_name }}(data: object) -> {{ property.get_type_string() }}: - {% if "None" in property.get_type_strings_in_union(json=True, multipart=False) %} + {% if "None" in property.get_type_strings_in_union(json=True) %} if data is None: return data {% endif %} - {% if "Unset" in property.get_type_strings_in_union(json=True, multipart=False) %} + {% if "Unset" in property.get_type_strings_in_union(json=True) %} if isinstance(data, Unset): return data {% endif %} @@ -21,7 +21,7 @@ def _parse_{{ property.python_name }}(data: object) -> {{ property.get_type_stri raise TypeError() {{ inner_template.construct(inner_property, "data") | indent(8) }} return {{ inner_property.python_name }} - except: # noqa: E722 + except (TypeError, ValueError, AttributeError, KeyError): pass {% else %}{# Don't do try/except for the last one nor any properties with no type checking #} {% if inner_template.check_type_for_construct %} @@ -39,11 +39,11 @@ def _parse_{{ property.python_name }}(data: object) -> {{ property.get_type_stri {{ property.python_name }} = _parse_{{ property.python_name }}({{ source }}) {% endmacro %} -{% macro transform(property, source, destination, declare_type=True, multipart=False) %} +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} {% set ns = namespace(contains_properties_without_transform = false, contains_modified_properties = not property.required, has_if = false) %} -{% if declare_type %}{{ destination }}: {{ property.get_type_string(json=not multipart, multipart=multipart) }}{% endif %} +{% if declare_type %}{{ destination }}: {{ property.get_type_string(json=True) }}{% endif %} -{% if not property.required %} +{% if not property.required and not skip_unset %} if isinstance({{ source }}, Unset): {{ destination }} = UNSET {% set ns.has_if = true %} @@ -64,7 +64,7 @@ elif isinstance({{ source }}, {{ inner_property.get_instance_type_string() }}): {% else %} else: {% endif %} - {{ inner_template.transform(inner_property, source, destination, declare_type=False, multipart=multipart) | indent(4) }} + {{ inner_template.transform(inner_property, source, destination, declare_type=False) | indent(4) }} {% endfor %} {% if ns.contains_properties_without_transform and ns.contains_modified_properties %} else: @@ -73,3 +73,30 @@ else: {{ destination }} = {{ source }} {%- endif %} {% endmacro %} + + +{% macro instance_check(inner_property, source) %} +{% if inner_property.get_instance_type_string() == "None" %} +if {{ source }} is None: +{% else %} +if isinstance({{ source }}, {{ inner_property.get_instance_type_string() }}): +{% endif %} +{% endmacro %} + +{% macro multipart(property, source, destination) %} +{% set ns = namespace(has_if = false) %} +{% for inner_property in property.inner_properties %} +{% if not ns.has_if %} +{{ instance_check(inner_property, source) }} +{% set ns.has_if = true %} +{% elif not loop.last %} + +el{{ instance_check(inner_property, source) }} +{% else %} + +else: +{% endif %} +{% import "property_templates/" + inner_property.template as inner_template %} + {{ inner_template.multipart(inner_property, source, destination) | indent(4) | trim }} +{%- endfor -%} +{% endmacro %} diff --git a/openapi_python_client/templates/property_templates/uuid_property.py.jinja b/openapi_python_client/templates/property_templates/uuid_property.py.jinja new file mode 100644 index 000000000..828b12b7f --- /dev/null +++ b/openapi_python_client/templates/property_templates/uuid_property.py.jinja @@ -0,0 +1,33 @@ +{% macro construct_function(property, source) %} +UUID({{ source }}) +{% endmacro %} + +{% from "property_templates/property_macros.py.jinja" import construct_template %} + +{% macro construct(property, source) %} +{{ construct_template(construct_function, property, source) }} +{% endmacro %} + +{% macro check_type_for_construct(property, source) %}isinstance({{ source }}, str){% endmacro %} + +{% macro transform(property, source, destination, declare_type=True, skip_unset=False) %} +{% set transformed = "str(" + source + ")" %} +{% if property.required %} +{{ destination }} = {{ transformed }} +{%- else %} +{% if not skip_unset %} + {% if declare_type %} + {% set type_annotation = property.get_type_string(json=True) %} +{{ destination }}: {{ type_annotation }} = UNSET + {% else %} +{{ destination }} = UNSET + {% endif %} +{% endif %} +if not isinstance({{ source }}, Unset): + {{ destination }} = {{ transformed }} +{%- endif %} +{% endmacro %} + +{% macro multipart(property, source, name) %} +files.append(({{ name }}, (None, str({{ source }}), "text/plain"))) +{% endmacro %} diff --git a/openapi_python_client/templates/pyproject.toml.jinja b/openapi_python_client/templates/pyproject.toml.jinja index 7f68d58e5..9f21f8043 100644 --- a/openapi_python_client/templates/pyproject.toml.jinja +++ b/openapi_python_client/templates/pyproject.toml.jinja @@ -1,49 +1,9 @@ -{% set poetry = meta == "poetry" %} -{% set pdm = meta == "pdm" %} -{% if poetry or pdm %} -{% if poetry %}[tool.poetry] -{% elif pdm %}[project] +{% if meta == "poetry" %} +{% include "pyproject_poetry.toml.jinja" %} +{% elif meta == "pdm" %} +{% include "pyproject_pdm.toml.jinja" %} +{% elif meta == "uv" %} +{% include "pyproject_uv.toml.jinja" %} {% endif %} -name = "{{ project_name }}" -version = "{{ package_version }}" -description = "{{ package_description }}" -authors = [] -readme = "README.md" -{% if pdm %}requires-python = ">=3.8,<4.0"{% endif %} -{% if poetry %} -packages = [ - {include = "{{ package_name }}"}, -] -include = ["CHANGELOG.md", "{{ package_name }}/py.typed"] -{% endif %} - -{% if pdm %} -dependencies = [ - "httpx>=0.20.0,<0.28.0", - "attrs>=21.3.0", - "python-dateutil>=2.8.0", -] - -[tool.pdm] -distribution = true -{% endif %} -{% if poetry %} - -[tool.poetry.dependencies] -python = "^3.8" -httpx = ">=0.20.0,<0.28.0" -attrs = ">=21.3.0" -python-dateutil = "^2.8.0" -{% endif %} - -[build-system] -{% if poetry %} -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" -{% elif pdm %} -requires = ["pdm-backend"] -build-backend = "pdm.backend" -{% endif %} -{% endif %}{# poetry or pdm #} {% include "pyproject_ruff.toml.jinja" %} diff --git a/openapi_python_client/templates/pyproject_pdm.toml.jinja b/openapi_python_client/templates/pyproject_pdm.toml.jinja new file mode 100644 index 000000000..afdb7ce3a --- /dev/null +++ b/openapi_python_client/templates/pyproject_pdm.toml.jinja @@ -0,0 +1,19 @@ +[project] +name = "{{ project_name }}" +version = "{{ package_version }}" +description = "{{ package_description }}" +authors = [] +readme = "README.md" +requires-python = ">=3.10" +dependencies = [ + "httpx>=0.23.0,<0.29.0", + "attrs>=22.2.0", + "python-dateutil>=2.8.0", +] + +[tool.pdm] +distribution = true + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" diff --git a/openapi_python_client/templates/pyproject_poetry.toml.jinja b/openapi_python_client/templates/pyproject_poetry.toml.jinja new file mode 100644 index 000000000..8585b6d89 --- /dev/null +++ b/openapi_python_client/templates/pyproject_poetry.toml.jinja @@ -0,0 +1,20 @@ +[tool.poetry] +name = "{{ project_name }}" +version = "{{ package_version }}" +description = "{{ package_description }}" +authors = [] +readme = "README.md" +packages = [ + { include = "{{ package_name }}" }, +] +include = ["{{ package_name }}/py.typed"] + +[tool.poetry.dependencies] +python = "^3.10" +httpx = ">=0.23.0,<0.29.0" +attrs = ">=22.2.0" +python-dateutil = "^2.8.0" + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/openapi_python_client/templates/pyproject_uv.toml.jinja b/openapi_python_client/templates/pyproject_uv.toml.jinja new file mode 100644 index 000000000..ef6cda77b --- /dev/null +++ b/openapi_python_client/templates/pyproject_uv.toml.jinja @@ -0,0 +1,20 @@ +[project] +name = "{{ project_name }}" +version = "{{ package_version }}" +description = "{{ package_description }}" +authors = [] +requires-python = ">=3.10" +readme = "README.md" +dependencies = [ + "httpx>=0.23.0,<0.29.0", + "attrs>=22.2.0", + "python-dateutil>=2.8.0,<3", +] + +[tool.uv.build-backend] +module-name = "{{ package_name }}" +module-root = "" + +[build-system] +requires = ["uv_build>=0.9.0,<0.10.0"] +build-backend = "uv_build" diff --git a/openapi_python_client/templates/setup.py.jinja b/openapi_python_client/templates/setup.py.jinja index 87c0cc063..8c9b060c4 100644 --- a/openapi_python_client/templates/setup.py.jinja +++ b/openapi_python_client/templates/setup.py.jinja @@ -12,7 +12,7 @@ setup( long_description=long_description, long_description_content_type="text/markdown", packages=find_packages(), - python_requires=">=3.8, <4", - install_requires=["httpx >= 0.20.0, < 0.28.0", "attrs >= 21.3.0", "python-dateutil >= 2.8.0, < 3"], + python_requires=">=3.10, <4", + install_requires=["httpx >= 0.23.0, < 0.29.0", "attrs >= 22.2.0", "python-dateutil >= 2.8.0, < 3"], package_data={"{{ package_name }}": ["py.typed"]}, ) diff --git a/openapi_python_client/templates/types.py.jinja b/openapi_python_client/templates/types.py.jinja index cc151acb3..f74db0ad7 100644 --- a/openapi_python_client/templates/types.py.jinja +++ b/openapi_python_client/templates/types.py.jinja @@ -1,6 +1,8 @@ """ Contains some shared types for properties """ + +from collections.abc import Mapping, MutableMapping from http import HTTPStatus -from typing import Any, BinaryIO, Generic, MutableMapping, Optional, Tuple, TypeVar, Literal +from typing import BinaryIO, Generic, TypeVar, Literal, IO from attrs import define @@ -12,19 +14,25 @@ class Unset: UNSET: Unset = Unset() -{# Used as `FileProperty._json_type_string` #} -FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]] - +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = IO[bytes] | bytes | str +FileTypes = ( + # (filename, file (or bytes), content_type) + tuple[str | None, FileContent, str | None] + # (filename, file (or bytes), content_type, headers) + | tuple[str | None, FileContent, str | None, Mapping[str, str]] +) +RequestFiles = list[tuple[str, FileTypes]] @define class File: """ Contains information for file uploads """ payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None + file_name: str | None = None + mime_type: str | None = None - def to_tuple(self) -> FileJsonType: + def to_tuple(self) -> FileTypes: """ Return a tuple representation that httpx will accept for multipart/form-data """ return self.file_name, self.payload, self.mime_type @@ -39,7 +47,7 @@ class Response(Generic[T]): status_code: HTTPStatus content: bytes headers: MutableMapping[str, str] - parsed: Optional[T] + parsed: T | None -__all__ = ["File", "Response", "FileJsonType", "Unset", "UNSET"] +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/openapi_python_client/utils.py b/openapi_python_client/utils.py index 834e2666c..15e8c9eec 100644 --- a/openapi_python_client/utils.py +++ b/openapi_python_client/utils.py @@ -6,6 +6,8 @@ from keyword import iskeyword from typing import Any +from .config import Config + DELIMITERS = r"\. _-" @@ -55,7 +57,6 @@ def split_words(value: str) -> list[str]: RESERVED_WORDS = (set(dir(builtins)) | {"self", "true", "false", "datetime"}) - { - "type", "id", } @@ -105,10 +106,11 @@ def remove_string_escapes(value: str) -> str: return value.replace('"', r"\"") -def get_content_type(content_type: str) -> str | None: +def get_content_type(content_type: str, config: Config) -> str | None: """ Given a string representing a content type with optional parameters, returns the content type only """ + content_type = config.content_type_overrides.get(content_type, content_type) message = Message() message.add_header("Content-Type", content_type) diff --git a/pdm.lock b/pdm.lock index 341232672..dde6a1dbc 100644 --- a/pdm.lock +++ b/pdm.lock @@ -3,13 +3,16 @@ [metadata] groups = ["default", "dev"] -strategy = ["cross_platform", "inherit_metadata"] -lock_version = "4.4.1" -content_hash = "sha256:84c6cf3b047f230aa92bc01276c772c95ca0d96428fe0d5e01b95839ffe3b66f" +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:f100f590af836736224e58af0663de7e315ab1a0b9b19c368ffd1f7f5fef99a3" + +[[metadata.targets]] +requires_python = "~=3.10" [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" requires_python = ">=3.8" summary = "Reusable constraint types to use with typing.Annotated" groups = ["default"] @@ -17,146 +20,60 @@ dependencies = [ "typing-extensions>=4.0.0; python_version < \"3.9\"", ] files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "anyio" -version = "3.7.1" -requires_python = ">=3.7" -summary = "High level compatibility layer for multiple asynchronous event loop implementations" +version = "4.12.0" +requires_python = ">=3.9" +summary = "High-level concurrency and networking framework on top of asyncio or Trio" groups = ["default"] dependencies = [ - "exceptiongroup; python_version < \"3.11\"", + "exceptiongroup>=1.0.2; python_version < \"3.11\"", "idna>=2.8", - "sniffio>=1.1", + "typing-extensions>=4.5; python_version < \"3.13\"", ] files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb"}, + {file = "anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0"}, ] [[package]] name = "attrs" -version = "23.2.0" -requires_python = ">=3.7" +version = "25.4.0" +requires_python = ">=3.9" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, ] [[package]] name = "certifi" -version = "2023.11.17" -requires_python = ">=3.6" +version = "2026.1.4" +requires_python = ">=3.7" summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default", "dev"] -files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -requires_python = ">=3.7.0" -summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["dev"] +groups = ["default"] files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, ] [[package]] name = "click" -version = "8.1.7" -requires_python = ">=3.7" +version = "8.3.1" +requires_python = ">=3.10" summary = "Composable command line interface toolkit" -groups = ["default", "dev"] +groups = ["default"] dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, ] [[package]] @@ -165,6 +82,7 @@ version = "0.4.6" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Cross-platform colored terminal text." groups = ["default", "dev"] +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -172,187 +90,255 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" -requires_python = ">=3.8" +version = "7.13.1" +requires_python = ">=3.10" summary = "Code coverage measurement for Python" groups = ["dev"] files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147"}, + {file = "coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29"}, + {file = "coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f"}, + {file = "coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba"}, + {file = "coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19"}, + {file = "coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a"}, + {file = "coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c"}, + {file = "coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7"}, + {file = "coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6"}, + {file = "coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784"}, + {file = "coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461"}, + {file = "coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500"}, + {file = "coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53"}, + {file = "coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842"}, + {file = "coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2"}, + {file = "coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9"}, + {file = "coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5"}, + {file = "coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a"}, + {file = "coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416"}, + {file = "coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f"}, + {file = "coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79"}, + {file = "coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4"}, + {file = "coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573"}, + {file = "coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd"}, ] [[package]] name = "coverage" -version = "7.4.0" +version = "7.13.1" extras = ["toml"] -requires_python = ">=3.8" +requires_python = ">=3.10" summary = "Code coverage measurement for Python" groups = ["dev"] dependencies = [ - "coverage==7.4.0", + "coverage==7.13.1", "tomli; python_full_version <= \"3.11.0a6\"", ] files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, -] - -[[package]] -name = "dparse" -version = "0.6.3" -requires_python = ">=3.6" -summary = "A parser for Python dependency files" -groups = ["dev"] -dependencies = [ - "packaging", - "tomli; python_version < \"3.11\"", -] -files = [ - {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, - {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, + {file = "coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147"}, + {file = "coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d"}, + {file = "coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae"}, + {file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29"}, + {file = "coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f"}, + {file = "coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88"}, + {file = "coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf"}, + {file = "coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb"}, + {file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba"}, + {file = "coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19"}, + {file = "coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a"}, + {file = "coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3"}, + {file = "coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968"}, + {file = "coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf"}, + {file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c"}, + {file = "coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7"}, + {file = "coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6"}, + {file = "coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78"}, + {file = "coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4"}, + {file = "coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398"}, + {file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784"}, + {file = "coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461"}, + {file = "coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500"}, + {file = "coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc"}, + {file = "coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1"}, + {file = "coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e"}, + {file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53"}, + {file = "coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842"}, + {file = "coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2"}, + {file = "coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894"}, + {file = "coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4"}, + {file = "coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864"}, + {file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9"}, + {file = "coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5"}, + {file = "coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a"}, + {file = "coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a"}, + {file = "coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d"}, + {file = "coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7"}, + {file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416"}, + {file = "coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f"}, + {file = "coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79"}, + {file = "coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4"}, + {file = "coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573"}, + {file = "coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd"}, ] [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.3.1" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" groups = ["default", "dev"] marker = "python_version < \"3.11\"" +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [[package]] name = "h11" -version = "0.14.0" -requires_python = ">=3.7" +version = "0.16.0" +requires_python = ">=3.8" summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" groups = ["default"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.9" requires_python = ">=3.8" summary = "A minimal low-level HTTP client." groups = ["default"] dependencies = [ "certifi", - "h11<0.15,>=0.13", + "h11>=0.16", ] files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [[package]] name = "httpx" -version = "0.27.0" +version = "0.28.1" requires_python = ">=3.8" summary = "The next generation HTTP client." groups = ["default"] @@ -361,38 +347,37 @@ dependencies = [ "certifi", "httpcore==1.*", "idna", - "sniffio", ] files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [[package]] name = "idna" -version = "3.6" -requires_python = ">=3.5" +version = "3.11" +requires_python = ">=3.8" summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default", "dev"] +groups = ["default"] files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [[package]] name = "iniconfig" -version = "2.0.0" -requires_python = ">=3.7" +version = "2.3.0" +requires_python = ">=3.10" summary = "brain-dead simple config-ini parsing" groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.6" requires_python = ">=3.7" summary = "A very fast and expressive template engine." groups = ["default"] @@ -400,339 +385,478 @@ dependencies = [ "MarkupSafe>=2.0", ] files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[[package]] +name = "librt" +version = "0.7.7" +requires_python = ">=3.9" +summary = "Mypyc runtime library" +groups = ["dev"] +marker = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "librt-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4836c5645f40fbdc275e5670819bde5ab5f2e882290d304e3c6ddab1576a6d0"}, + {file = "librt-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae8aec43117a645a31e5f60e9e3a0797492e747823b9bda6972d521b436b4e8"}, + {file = "librt-0.7.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aea05f701ccd2a76b34f0daf47ca5068176ff553510b614770c90d76ac88df06"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b16ccaeff0ed4355dfb76fe1ea7a5d6d03b5ad27f295f77ee0557bc20a72495"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48c7e150c095d5e3cea7452347ba26094be905d6099d24f9319a8b475fcd3e0"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dcee2f921a8632636d1c37f1bbdb8841d15666d119aa61e5399c5268e7ce02e"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14ef0f4ac3728ffd85bfc58e2f2f48fb4ef4fa871876f13a73a7381d10a9f77c"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e4ab69fa37f8090f2d971a5d2bc606c7401170dbdae083c393d6cbf439cb45b8"}, + {file = "librt-0.7.7-cp310-cp310-win32.whl", hash = "sha256:4bf3cc46d553693382d2abf5f5bd493d71bb0f50a7c0beab18aa13a5545c8900"}, + {file = "librt-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:f0c8fe5aeadd8a0e5b0598f8a6ee3533135ca50fd3f20f130f9d72baf5c6ac58"}, + {file = "librt-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a487b71fbf8a9edb72a8c7a456dda0184642d99cd007bc819c0b7ab93676a8ee"}, + {file = "librt-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4d4efb218264ecf0f8516196c9e2d1a0679d9fb3bb15df1155a35220062eba8"}, + {file = "librt-0.7.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b8bb331aad734b059c4b450cd0a225652f16889e286b2345af5e2c3c625c3d85"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:467dbd7443bda08338fc8ad701ed38cef48194017554f4c798b0a237904b3f99"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50d1d1ee813d2d1a3baf2873634ba506b263032418d16287c92ec1cc9c1a00cb"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7e5070cf3ec92d98f57574da0224f8c73faf1ddd6d8afa0b8c9f6e86997bc74"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bdb9f3d865b2dafe7f9ad7f30ef563c80d0ddd2fdc8cc9b8e4f242f475e34d75"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8185c8497d45164e256376f9da5aed2bb26ff636c798c9dabe313b90e9f25b28"}, + {file = "librt-0.7.7-cp311-cp311-win32.whl", hash = "sha256:44d63ce643f34a903f09ff7ca355aae019a3730c7afd6a3c037d569beeb5d151"}, + {file = "librt-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:7d13cc340b3b82134f8038a2bfe7137093693dcad8ba5773da18f95ad6b77a8a"}, + {file = "librt-0.7.7-cp311-cp311-win_arm64.whl", hash = "sha256:983de36b5a83fe9222f4f7dcd071f9b1ac6f3f17c0af0238dadfb8229588f890"}, + {file = "librt-0.7.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a85a1fc4ed11ea0eb0a632459ce004a2d14afc085a50ae3463cd3dfe1ce43fc"}, + {file = "librt-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c87654e29a35938baead1c4559858f346f4a2a7588574a14d784f300ffba0efd"}, + {file = "librt-0.7.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c9faaebb1c6212c20afd8043cd6ed9de0a47d77f91a6b5b48f4e46ed470703fe"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1908c3e5a5ef86b23391448b47759298f87f997c3bd153a770828f58c2bb4630"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbc4900e95a98fc0729523be9d93a8fedebb026f32ed9ffc08acd82e3e181503"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7ea4e1fbd253e5c68ea0fe63d08577f9d288a73f17d82f652ebc61fa48d878d"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef7699b7a5a244b1119f85c5bbc13f152cd38240cbb2baa19b769433bae98e50"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:955c62571de0b181d9e9e0a0303c8bc90d47670a5eff54cf71bf5da61d1899cf"}, + {file = "librt-0.7.7-cp312-cp312-win32.whl", hash = "sha256:1bcd79be209313b270b0e1a51c67ae1af28adad0e0c7e84c3ad4b5cb57aaa75b"}, + {file = "librt-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:4353ee891a1834567e0302d4bd5e60f531912179578c36f3d0430f8c5e16b456"}, + {file = "librt-0.7.7-cp312-cp312-win_arm64.whl", hash = "sha256:a76f1d679beccccdf8c1958e732a1dfcd6e749f8821ee59d7bec009ac308c029"}, + {file = "librt-0.7.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f4a0b0a3c86ba9193a8e23bb18f100d647bf192390ae195d84dfa0a10fb6244"}, + {file = "librt-0.7.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5335890fea9f9e6c4fdf8683061b9ccdcbe47c6dc03ab8e9b68c10acf78be78d"}, + {file = "librt-0.7.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b4346b1225be26def3ccc6c965751c74868f0578cbcba293c8ae9168483d811"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a10b8eebdaca6e9fdbaf88b5aefc0e324b763a5f40b1266532590d5afb268a4c"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:067be973d90d9e319e6eb4ee2a9b9307f0ecd648b8a9002fa237289a4a07a9e7"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:23d2299ed007812cccc1ecef018db7d922733382561230de1f3954db28433977"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6b6f8ea465524aa4c7420c7cc4ca7d46fe00981de8debc67b1cc2e9957bb5b9d"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8df32a99cc46eb0ee90afd9ada113ae2cafe7e8d673686cf03ec53e49635439"}, + {file = "librt-0.7.7-cp313-cp313-win32.whl", hash = "sha256:86f86b3b785487c7760247bcdac0b11aa8bf13245a13ed05206286135877564b"}, + {file = "librt-0.7.7-cp313-cp313-win_amd64.whl", hash = "sha256:4862cb2c702b1f905c0503b72d9d4daf65a7fdf5a9e84560e563471e57a56949"}, + {file = "librt-0.7.7-cp313-cp313-win_arm64.whl", hash = "sha256:0996c83b1cb43c00e8c87835a284f9057bc647abd42b5871e5f941d30010c832"}, + {file = "librt-0.7.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:23daa1ab0512bafdd677eb1bfc9611d8ffbe2e328895671e64cb34166bc1b8c8"}, + {file = "librt-0.7.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:558a9e5a6f3cc1e20b3168fb1dc802d0d8fa40731f6e9932dcc52bbcfbd37111"}, + {file = "librt-0.7.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2567cb48dc03e5b246927ab35cbb343376e24501260a9b5e30b8e255dca0d1d2"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6066c638cdf85ff92fc6f932d2d73c93a0e03492cdfa8778e6d58c489a3d7259"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a609849aca463074c17de9cda173c276eb8fee9e441053529e7b9e249dc8b8ee"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:add4e0a000858fe9bb39ed55f31085506a5c38363e6eb4a1e5943a10c2bfc3d1"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a3bfe73a32bd0bdb9a87d586b05a23c0a1729205d79df66dee65bb2e40d671ba"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0ecce0544d3db91a40f8b57ae26928c02130a997b540f908cefd4d279d6c5848"}, + {file = "librt-0.7.7-cp314-cp314-win32.whl", hash = "sha256:8f7a74cf3a80f0c3b0ec75b0c650b2f0a894a2cec57ef75f6f72c1e82cdac61d"}, + {file = "librt-0.7.7-cp314-cp314-win_amd64.whl", hash = "sha256:3d1fe2e8df3268dd6734dba33ededae72ad5c3a859b9577bc00b715759c5aaab"}, + {file = "librt-0.7.7-cp314-cp314-win_arm64.whl", hash = "sha256:2987cf827011907d3dfd109f1be0d61e173d68b1270107bb0e89f2fca7f2ed6b"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8e92c8de62b40bfce91d5e12c6e8b15434da268979b1af1a6589463549d491e6"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f683dcd49e2494a7535e30f779aa1ad6e3732a019d80abe1309ea91ccd3230e3"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b15e5d17812d4d629ff576699954f74e2cc24a02a4fc401882dd94f81daba45"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c084841b879c4d9b9fa34e5d5263994f21aea7fd9c6add29194dbb41a6210536"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c8fb9966f84737115513fecbaf257f9553d067a7dd45a69c2c7e5339e6a8dc"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9b5fb1ecb2c35362eab2dbd354fd1efa5a8440d3e73a68be11921042a0edc0ff"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:d1454899909d63cc9199a89fcc4f81bdd9004aef577d4ffc022e600c412d57f3"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7ef28f2e7a016b29792fe0a2dd04dec75725b32a1264e390c366103f834a9c3a"}, + {file = "librt-0.7.7-cp314-cp314t-win32.whl", hash = "sha256:5e419e0db70991b6ba037b70c1d5bbe92b20ddf82f31ad01d77a347ed9781398"}, + {file = "librt-0.7.7-cp314-cp314t-win_amd64.whl", hash = "sha256:d6b7d93657332c817b8d674ef6bf1ab7796b4f7ce05e420fd45bd258a72ac804"}, + {file = "librt-0.7.7-cp314-cp314t-win_arm64.whl", hash = "sha256:142c2cd91794b79fd0ce113bd658993b7ede0fe93057668c2f98a45ca00b7e91"}, + {file = "librt-0.7.7.tar.gz", hash = "sha256:81d957b069fed1890953c3b9c3895c7689960f233eea9a1d9607f71ce7f00b2c"}, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +requires_python = ">=3.10" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["default"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, ] [[package]] name = "markupsafe" -version = "2.1.3" -requires_python = ">=3.7" +version = "3.0.3" +requires_python = ">=3.9" summary = "Safely add untrusted strings to HTML/XML markup." groups = ["default"] files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] -name = "mslex" -version = "1.1.0" -requires_python = ">=3.5" -summary = "shlex for windows" -groups = ["dev"] -marker = "sys_platform == \"win32\"" +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["default"] files = [ - {file = "mslex-1.1.0-py2.py3-none-any.whl", hash = "sha256:8826f4bb8d8c63402203d921dc8c2df0c7fec0d9c91d020ddf02fc9d0dce81bd"}, - {file = "mslex-1.1.0.tar.gz", hash = "sha256:7fe305fbdc9721283875e0b737fdb344374b761338a7f41af91875de278568e4"}, + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] [[package]] name = "mypy" -version = "1.9.0" -requires_python = ">=3.8" +version = "1.19.1" +requires_python = ">=3.9" summary = "Optional static typing for Python" groups = ["dev"] dependencies = [ + "librt>=0.6.2; platform_python_implementation != \"PyPy\"", "mypy-extensions>=1.0.0", + "pathspec>=0.9.0", "tomli>=1.1.0; python_version < \"3.11\"", - "typing-extensions>=4.1.0", + "typing-extensions>=4.6.0", ] files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, ] [[package]] name = "mypy-extensions" -version = "1.0.0" -requires_python = ">=3.5" +version = "1.1.0" +requires_python = ">=3.8" summary = "Type system extensions for programs checked with the mypy type checker." groups = ["dev"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "packaging" -version = "21.3" -requires_python = ">=3.6" +version = "25.0" +requires_python = ">=3.8" summary = "Core utilities for Python packages" groups = ["dev"] -dependencies = [ - "pyparsing!=3.0.5,>=2.0.2", -] files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] -name = "pluggy" -version = "1.4.0" +name = "pathspec" +version = "0.12.1" requires_python = ">=3.8" -summary = "plugin and hook calling mechanisms for python" +summary = "Utility library for gitignore style pattern matching of file paths." groups = ["dev"] files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] -name = "psutil" -version = "5.9.7" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -summary = "Cross-platform lib for process and system monitoring in Python." +name = "pluggy" +version = "1.6.0" +requires_python = ">=3.9" +summary = "plugin and hook calling mechanisms for python" groups = ["dev"] files = [ - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [[package]] name = "pydantic" -version = "2.6.3" -requires_python = ">=3.8" +version = "2.12.5" +requires_python = ">=3.9" summary = "Data validation using Python type hints" groups = ["default"] dependencies = [ - "annotated-types>=0.4.0", - "pydantic-core==2.16.3", - "typing-extensions>=4.6.1", + "annotated-types>=0.6.0", + "pydantic-core==2.41.5", + "typing-extensions>=4.14.1", + "typing-inspection>=0.4.2", ] files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, ] [[package]] name = "pydantic-core" -version = "2.16.3" -requires_python = ">=3.8" -summary = "" +version = "2.41.5" +requires_python = ">=3.9" +summary = "Core functionality for Pydantic validation and serialization" groups = ["default"] dependencies = [ - "typing-extensions!=4.7.0,>=4.6.0", + "typing-extensions>=4.14.1", ] files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, ] [[package]] -name = "pyparsing" -version = "3.1.1" -requires_python = ">=3.6.8" -summary = "pyparsing module - Classes and methods to define and execute parsing grammars" -groups = ["dev"] +name = "pygments" +version = "2.19.2" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default", "dev"] files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [[package]] name = "pytest" -version = "8.1.1" -requires_python = ">=3.8" +version = "9.0.2" +requires_python = ">=3.10" summary = "pytest: simple powerful testing with Python" groups = ["dev"] dependencies = [ - "colorama; sys_platform == \"win32\"", - "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "iniconfig", - "packaging", - "pluggy<2.0,>=1.4", + "colorama>=0.4; sys_platform == \"win32\"", + "exceptiongroup>=1; python_version < \"3.11\"", + "iniconfig>=1.0.1", + "packaging>=22", + "pluggy<2,>=1.5", + "pygments>=2.7.2", "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"}, + {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"}, ] [[package]] name = "pytest-cov" -version = "4.1.0" -requires_python = ">=3.7" +version = "7.0.0" +requires_python = ">=3.9" summary = "Pytest plugin for measuring coverage." groups = ["dev"] dependencies = [ - "coverage[toml]>=5.2.1", - "pytest>=4.6", + "coverage[toml]>=7.10.6", + "pluggy>=1.2", + "pytest>=7", ] files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861"}, + {file = "pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1"}, ] [[package]] name = "pytest-mock" -version = "3.12.0" -requires_python = ">=3.8" +version = "3.15.1" +requires_python = ">=3.9" summary = "Thin-wrapper around the mock package for easier use with pytest" groups = ["dev"] dependencies = [ - "pytest>=5.0", + "pytest>=6.2.5", ] files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d"}, + {file = "pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f"}, ] [[package]] @@ -751,198 +875,67 @@ files = [ [[package]] name = "python-multipart" -version = "0.0.9" -requires_python = ">=3.8" +version = "0.0.21" +requires_python = ">=3.10" summary = "A streaming multipart parser for Python" groups = ["dev"] files = [ - {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, - {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, + {file = "python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090"}, + {file = "python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92"}, ] [[package]] -name = "pyyaml" -version = "6.0.1" -requires_python = ">=3.6" -summary = "YAML parser and emitter for Python" +name = "rich" +version = "14.2.0" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" groups = ["default"] -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -requires_python = ">=3.7" -summary = "Python HTTP for Humans." -groups = ["dev"] dependencies = [ - "certifi>=2017.4.17", - "charset-normalizer<4,>=2", - "idna<4,>=2.5", - "urllib3<3,>=1.21.1", + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", ] files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}, + {file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}, ] [[package]] name = "ruamel-yaml" -version = "0.18.5" -requires_python = ">=3.7" +version = "0.19.1" +requires_python = ">=3.9" summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -groups = ["dev"] -dependencies = [ - "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"", -] -files = [ - {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, - {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, -] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -requires_python = ">=3.6" -summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -groups = ["dev"] -marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" +groups = ["default"] files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, + {file = "ruamel_yaml-0.19.1-py3-none-any.whl", hash = "sha256:27592957fedf6e0b62f281e96effd28043345e0e66001f97683aa9a40c667c93"}, + {file = "ruamel_yaml-0.19.1.tar.gz", hash = "sha256:53eb66cd27849eff968ebf8f0bf61f46cdac2da1d1f3576dd4ccee9b25c31993"}, ] [[package]] name = "ruff" -version = "0.3.2" +version = "0.14.10" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["default"] files = [ - {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"}, - {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"}, - {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"}, - {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"}, - {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"}, - {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"}, -] - -[[package]] -name = "safety" -version = "2.3.5" -summary = "Checks installed dependencies for known vulnerabilities and licenses." -groups = ["dev"] -dependencies = [ - "Click>=8.0.2", - "dparse>=0.6.2", - "packaging<22.0,>=21.0", - "requests", - "ruamel-yaml>=0.17.21", - "setuptools>=19.3", -] -files = [ - {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"}, - {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, -] - -[[package]] -name = "setuptools" -version = "69.0.3" -requires_python = ">=3.8" -summary = "Easily download, build, install, upgrade, and uninstall Python packages" -groups = ["dev"] -files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49"}, + {file = "ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f"}, + {file = "ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f"}, + {file = "ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f"}, + {file = "ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d"}, + {file = "ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405"}, + {file = "ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60"}, + {file = "ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830"}, + {file = "ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6"}, + {file = "ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154"}, + {file = "ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6"}, + {file = "ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4"}, ] [[package]] @@ -958,68 +951,96 @@ files = [ [[package]] name = "six" -version = "1.16.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Python 2 and 3 compatibility utilities" groups = ["default"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.0" -requires_python = ">=3.7" -summary = "Sniff out which async library your code is running under" -groups = ["default"] -files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] -name = "taskipy" -version = "1.12.2" -requires_python = ">=3.6,<4.0" -summary = "tasks runner for python projects" +name = "syrupy" +version = "5.0.0" +requires_python = ">=3.10" +summary = "Pytest Snapshot Test Utility" groups = ["dev"] dependencies = [ - "colorama<0.5.0,>=0.4.4", - "mslex<2.0.0,>=1.1.0; sys_platform == \"win32\"", - "psutil<6.0.0,>=5.7.2", - "tomli<3.0.0,>=2.0.1; python_version ~= \"3.7\"", + "pytest>=8.0.0", ] files = [ - {file = "taskipy-1.12.2-py3-none-any.whl", hash = "sha256:ffdbb0bb0db54c0ec5c424610a3a087eea22706d4d1f6e3e8b4f12ebba05f98f"}, - {file = "taskipy-1.12.2.tar.gz", hash = "sha256:eadfdc20d6bb94d8018eda32f1dbf584cf4aa6cffb71ba5cc2de20d344f8c4fb"}, + {file = "syrupy-5.0.0-py3-none-any.whl", hash = "sha256:c848e1a980ca52a28715cd2d2b4d434db424699c05653bd1158fb31cf56e9546"}, + {file = "syrupy-5.0.0.tar.gz", hash = "sha256:3282fe963fa5d4d3e47231b16d1d4d0f4523705e8199eeb99a22a1bc9f5942f2"}, ] [[package]] name = "tomli" -version = "2.0.1" -requires_python = ">=3.7" +version = "2.3.0" +requires_python = ">=3.8" summary = "A lil' TOML parser" groups = ["dev"] -marker = "python_version < \"4.0\"" +marker = "python_version < \"3.11\"" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] [[package]] name = "typer" -version = "0.9.0" -requires_python = ">=3.6" +version = "0.21.0" +requires_python = ">=3.9" summary = "Typer, build great CLIs. Easy to code. Based on Python type hints." groups = ["default"] dependencies = [ - "click<9.0.0,>=7.1.1", + "click>=8.0.0", + "rich>=10.11.0", + "shellingham>=1.3.0", "typing-extensions>=3.7.4.3", ] files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.21.0-py3-none-any.whl", hash = "sha256:c79c01ca6b30af9fd48284058a7056ba0d3bf5cf10d0ff3d0c5b11b68c258ac6"}, + {file = "typer-0.21.0.tar.gz", hash = "sha256:c87c0d2b6eee3b49c5c64649ec92425492c14488096dfbc8a0c2799b2f6f9c53"}, ] [[package]] @@ -1034,43 +1055,47 @@ files = [ [[package]] name = "types-python-dateutil" -version = "2.8.19.20240106" -requires_python = ">=3.8" +version = "2.9.0.20251115" +requires_python = ">=3.9" summary = "Typing stubs for python-dateutil" groups = ["dev"] files = [ - {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, - {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, + {file = "types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624"}, + {file = "types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20250915" +requires_python = ">=3.9" summary = "Typing stubs for PyYAML" groups = ["dev"] files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6"}, + {file = "types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3"}, ] [[package]] name = "typing-extensions" -version = "4.10.0" -requires_python = ">=3.8" -summary = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.15.0" +requires_python = ">=3.9" +summary = "Backported and Experimental Type Hints for Python 3.9+" groups = ["default", "dev"] files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] -name = "urllib3" -version = "2.1.0" -requires_python = ">=3.8" -summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["dev"] +name = "typing-inspection" +version = "0.4.2" +requires_python = ">=3.9" +summary = "Runtime typing introspection tools" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.12.0", +] files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, ] diff --git a/pdm.minimal.lock b/pdm.minimal.lock new file mode 100644 index 000000000..a413f6a86 --- /dev/null +++ b/pdm.minimal.lock @@ -0,0 +1,971 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev"] +strategy = ["direct_minimal_versions", "inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:f55a39ef7a9c0210dab1151ceda8e273982dbeff1a97cb10553cb7838c8a1f2e" + +[[metadata.targets]] +requires_python = "~=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "3.7.1" +requires_python = ">=3.7" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default"] +dependencies = [ + "exceptiongroup; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, +] + +[[package]] +name = "attrs" +version = "22.2.0" +requires_python = ">=3.6" +summary = "Classes Without Boilerplate" +groups = ["default"] +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +requires_python = ">=3.7" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default"] +files = [ + {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, + {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, +] + +[[package]] +name = "click" +version = "8.3.0" +requires_python = ">=3.10" +summary = "Composable command line interface toolkit" +groups = ["default"] +dependencies = [ + "colorama; platform_system == \"Windows\"", +] +files = [ + {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, + {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, +] + +[[package]] +name = "colorama" +version = "0.4.3" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +summary = "Cross-platform colored terminal text." +groups = ["default", "dev"] +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, + {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, +] + +[[package]] +name = "coverage" +version = "7.11.0" +requires_python = ">=3.10" +summary = "Code coverage measurement for Python" +groups = ["dev"] +files = [ + {file = "coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31"}, + {file = "coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075"}, + {file = "coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab"}, + {file = "coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0"}, + {file = "coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785"}, + {file = "coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591"}, + {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088"}, + {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f"}, + {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866"}, + {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841"}, + {file = "coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf"}, + {file = "coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969"}, + {file = "coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847"}, + {file = "coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc"}, + {file = "coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0"}, + {file = "coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7"}, + {file = "coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623"}, + {file = "coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287"}, + {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552"}, + {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de"}, + {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601"}, + {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e"}, + {file = "coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c"}, + {file = "coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9"}, + {file = "coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745"}, + {file = "coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1"}, + {file = "coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007"}, + {file = "coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46"}, + {file = "coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893"}, + {file = "coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115"}, + {file = "coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415"}, + {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186"}, + {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d"}, + {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d"}, + {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2"}, + {file = "coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5"}, + {file = "coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0"}, + {file = "coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad"}, + {file = "coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1"}, + {file = "coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be"}, + {file = "coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d"}, + {file = "coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82"}, + {file = "coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52"}, + {file = "coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b"}, + {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4"}, + {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd"}, + {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc"}, + {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48"}, + {file = "coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040"}, + {file = "coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05"}, + {file = "coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a"}, + {file = "coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b"}, + {file = "coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37"}, + {file = "coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de"}, + {file = "coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f"}, + {file = "coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c"}, + {file = "coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa"}, + {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740"}, + {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef"}, + {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0"}, + {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca"}, + {file = "coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2"}, + {file = "coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268"}, + {file = "coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836"}, + {file = "coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497"}, + {file = "coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e"}, + {file = "coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1"}, + {file = "coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca"}, + {file = "coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd"}, + {file = "coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43"}, + {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777"}, + {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2"}, + {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d"}, + {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4"}, + {file = "coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721"}, + {file = "coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad"}, + {file = "coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479"}, + {file = "coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f"}, + {file = "coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e"}, + {file = "coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44"}, + {file = "coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3"}, + {file = "coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b"}, + {file = "coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d"}, + {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2"}, + {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e"}, + {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996"}, + {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11"}, + {file = "coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73"}, + {file = "coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547"}, + {file = "coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3"}, + {file = "coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68"}, + {file = "coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["default", "dev"] +marker = "python_version < \"3.11\"" +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[[package]] +name = "execnet" +version = "2.1.1" +requires_python = ">=3.8" +summary = "execnet: rapid multi-Python deployment" +groups = ["dev"] +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[[package]] +name = "h11" +version = "0.12.0" +requires_python = ">=3.6" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["default"] +files = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] + +[[package]] +name = "httpcore" +version = "0.15.0" +requires_python = ">=3.7" +summary = "A minimal low-level HTTP client." +groups = ["default"] +dependencies = [ + "anyio==3.*", + "certifi", + "h11<0.13,>=0.11", + "sniffio==1.*", +] +files = [ + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, +] + +[[package]] +name = "httpx" +version = "0.23.0" +requires_python = ">=3.7" +summary = "The next generation HTTP client." +groups = ["default"] +dependencies = [ + "certifi", + "httpcore<0.16.0,>=0.15.0", + "rfc3986[idna2008]<2,>=1.3", + "sniffio", +] +files = [ + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, +] + +[[package]] +name = "idna" +version = "3.11" +requires_python = ">=3.8" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +requires_python = ">=3.10" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "jinja2" +version = "3.0.0" +requires_python = ">=3.6" +summary = "A very fast and expressive template engine." +groups = ["default"] +dependencies = [ + "MarkupSafe>=2.0.0rc2", +] +files = [ + {file = "Jinja2-3.0.0-py3-none-any.whl", hash = "sha256:2f2de5285cf37f33d33ecd4a9080b75c87cd0c1994d5a9c6df17131ea1f049c6"}, + {file = "Jinja2-3.0.0.tar.gz", hash = "sha256:ea8d7dd814ce9df6de6a761ec7f1cac98afe305b8cdc4aaae4e114b8d8ce24c5"}, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +requires_python = ">=3.10" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["default"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +requires_python = ">=3.9" +summary = "Safely add untrusted strings to HTML/XML markup." +groups = ["default"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["default"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["dev"] +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +requires_python = ">=3.8" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +requires_python = ">=3.9" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[[package]] +name = "py" +version = "1.11.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +summary = "library with cross-python path, ini-parsing, io, code, log facilities" +groups = ["dev"] +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "2.10.0" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default"] +dependencies = [ + "annotated-types>=0.6.0", + "pydantic-core==2.27.0", + "typing-extensions>=4.12.2", +] +files = [ + {file = "pydantic-2.10.0-py3-none-any.whl", hash = "sha256:5e7807ba9201bdf61b1b58aa6eb690916c40a47acfb114b1b4fef3e7fd5b30fc"}, + {file = "pydantic-2.10.0.tar.gz", hash = "sha256:0aca0f045ff6e2f097f1fe89521115335f15049eeb8a7bef3dafe4b19a74e289"}, +] + +[[package]] +name = "pydantic-core" +version = "2.27.0" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["default"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2ac6b919f7fed71b17fe0b4603c092a4c9b5bae414817c9c81d3c22d1e1bcc"}, + {file = "pydantic_core-2.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e015833384ca3e1a0565a79f5d953b0629d9138021c27ad37c92a9fa1af7623c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db72e40628967f6dc572020d04b5f800d71264e0531c6da35097e73bdf38b003"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df45c4073bed486ea2f18757057953afed8dd77add7276ff01bccb79982cf46c"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:836a4bfe0cc6d36dc9a9cc1a7b391265bf6ce9d1eb1eac62ac5139f5d8d9a6fa"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf1340ae507f6da6360b24179c2083857c8ca7644aab65807023cf35404ea8d"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ab325fc86fbc077284c8d7f996d904d30e97904a87d6fb303dce6b3de7ebba9"}, + {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1da0c98a85a6c6ed702d5556db3b09c91f9b0b78de37b7593e2de8d03238807a"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b0202ebf2268954090209a84f9897345719e46a57c5f2c9b7b250ca0a9d3e63"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:35380671c3c921fe8adf31ad349dc6f7588b7e928dbe44e1093789734f607399"}, + {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b4c19525c3538fbc0bbda6229f9682fb8199ce9ac37395880e6952798e00373"}, + {file = "pydantic_core-2.27.0-cp310-none-win32.whl", hash = "sha256:333c840a1303d1474f491e7be0b718226c730a39ead0f7dab2c7e6a2f3855555"}, + {file = "pydantic_core-2.27.0-cp310-none-win_amd64.whl", hash = "sha256:99b2863c1365f43f74199c980a3d40f18a218fbe683dd64e470199db426c4d6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4523c4009c3f39d948e01962223c9f5538602e7087a628479b723c939fab262d"}, + {file = "pydantic_core-2.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84af1cf7bfdcbc6fcf5a5f70cc9896205e0350306e4dd73d54b6a18894f79386"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e65466b31be1070b4a5b7dbfbd14b247884cb8e8b79c64fb0f36b472912dbaea"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5c022bb0d453192426221605efc865373dde43b17822a264671c53b068ac20c"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bb69bf3b6500f195c3deb69c1205ba8fc3cb21d1915f1f158a10d6b1ef29b6a"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aa4d1b2eba9a325897308b3124014a142cdccb9f3e016f31d3ebee6b5ea5e75"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e96ca781e0c01e32115912ebdf7b3fb0780ce748b80d7d28a0802fa9fbaf44e"}, + {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b872c86d8d71827235c7077461c502feb2db3f87d9d6d5a9daa64287d75e4fa0"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:82e1ad4ca170e8af4c928b67cff731b6296e6a0a0981b97b2eb7c275cc4e15bd"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:eb40f828bc2f73f777d1eb8fee2e86cd9692a4518b63b6b5aa8af915dfd3207b"}, + {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9a8fbf506fde1529a1e3698198fe64bfbe2e0c09557bc6a7dcf872e7c01fec40"}, + {file = "pydantic_core-2.27.0-cp311-none-win32.whl", hash = "sha256:24f984fc7762ed5f806d9e8c4c77ea69fdb2afd987b4fd319ef06c87595a8c55"}, + {file = "pydantic_core-2.27.0-cp311-none-win_amd64.whl", hash = "sha256:68950bc08f9735306322bfc16a18391fcaac99ded2509e1cc41d03ccb6013cfe"}, + {file = "pydantic_core-2.27.0-cp311-none-win_arm64.whl", hash = "sha256:3eb8849445c26b41c5a474061032c53e14fe92a11a5db969f722a2716cd12206"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8117839a9bdbba86e7f9df57018fe3b96cec934c3940b591b0fd3fbfb485864a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a291d0b4243a259c8ea7e2b84eb9ccb76370e569298875a7c5e3e71baf49057a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e35afd9e10b2698e6f2f32256678cb23ca6c1568d02628033a837638b3ed12"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ab0d979c969983cdb97374698d847a4acffb217d543e172838864636ef10d9"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d06b667e53320332be2bf6f9461f4a9b78092a079b8ce8634c9afaa7e10cd9f"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78f841523729e43e3928a364ec46e2e3f80e6625a4f62aca5c345f3f626c6e8a"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:400bf470e4327e920883b51e255617dfe4496d4e80c3fea0b5a5d0bf2c404dd4"}, + {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:951e71da6c89d354572098bada5ba5b5dc3a9390c933af8a614e37755d3d1840"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a51ce96224eadd1845150b204389623c8e129fde5a67a84b972bd83a85c6c40"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:483c2213a609e7db2c592bbc015da58b6c75af7360ca3c981f178110d9787bcf"}, + {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:359e7951f04ad35111b5ddce184db3391442345d0ab073aa63a95eb8af25a5ef"}, + {file = "pydantic_core-2.27.0-cp312-none-win32.whl", hash = "sha256:ee7d9d5537daf6d5c74a83b38a638cc001b648096c1cae8ef695b0c919d9d379"}, + {file = "pydantic_core-2.27.0-cp312-none-win_amd64.whl", hash = "sha256:2be0ad541bb9f059954ccf8877a49ed73877f862529575ff3d54bf4223e4dd61"}, + {file = "pydantic_core-2.27.0-cp312-none-win_arm64.whl", hash = "sha256:6e19401742ed7b69e51d8e4df3c03ad5ec65a83b36244479fd70edde2828a5d9"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5f2b19b8d6fca432cb3acf48cf5243a7bf512988029b6e6fd27e9e8c0a204d85"}, + {file = "pydantic_core-2.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c86679f443e7085ea55a7376462553996c688395d18ef3f0d3dbad7838f857a2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510b11e9c3b1a852876d1ccd8d5903684336d635214148637ceb27366c75a467"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb704155e73b833801c247f39d562229c0303f54770ca14fb1c053acb376cf10"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce048deb1e033e7a865ca384770bccc11d44179cf09e5193a535c4c2f497bdc"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58560828ee0951bb125c6f2862fbc37f039996d19ceb6d8ff1905abf7da0bf3d"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb4785894936d7682635726613c44578c420a096729f1978cd061a7e72d5275"}, + {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2883b260f7a93235488699d39cbbd94fa7b175d3a8063fbfddd3e81ad9988cb2"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6fcb3fa3855d583aa57b94cf146f7781d5d5bc06cb95cb3afece33d31aac39b"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:e851a051f7260e6d688267eb039c81f05f23a19431bd7dfa4bf5e3cb34c108cd"}, + {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edb1bfd45227dec8d50bc7c7d86463cd8728bcc574f9b07de7369880de4626a3"}, + {file = "pydantic_core-2.27.0-cp313-none-win32.whl", hash = "sha256:678f66462058dd978702db17eb6a3633d634f7aa0deaea61e0a674152766d3fc"}, + {file = "pydantic_core-2.27.0-cp313-none-win_amd64.whl", hash = "sha256:d28ca7066d6cdd347a50d8b725dc10d9a1d6a1cce09836cf071ea6a2d4908be0"}, + {file = "pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4fb49cfdb53af5041aba909be00cccfb2c0d0a2e09281bf542371c5fd36ad04c"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:49633583eb7dc5cba61aaf7cdb2e9e662323ad394e543ee77af265736bcd3eaa"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:153017e3d6cd3ce979de06d84343ca424bb6092727375eba1968c8b4693c6ecb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff63a92f6e249514ef35bc795de10745be0226eaea06eb48b4bbeaa0c8850a4a"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5982048129f40b082c2654de10c0f37c67a14f5ff9d37cf35be028ae982f26df"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:91bc66f878557313c2a6bcf396e7befcffe5ab4354cfe4427318968af31143c3"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:68ef5377eb582fa4343c9d0b57a5b094046d447b4c73dd9fbd9ffb216f829e7d"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c5726eec789ee38f2c53b10b1821457b82274f81f4f746bb1e666d8741fcfadb"}, + {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0c431e4be5c1a0c6654e0c31c661cd89e0ca956ef65305c3c3fd96f4e72ca39"}, + {file = "pydantic_core-2.27.0.tar.gz", hash = "sha256:f57783fbaf648205ac50ae7d646f27582fc706be3977e87c3c124e7a92407b10"}, +] + +[[package]] +name = "pygments" +version = "2.19.2" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[[package]] +name = "pytest" +version = "8.0.1" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2.0,>=1.3.0", + "tomli>=1.0.0; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, + {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, +] + +[[package]] +name = "pytest-cov" +version = "0.6" +summary = "py.test plugin for coverage reporting with support for both centralised and distributed testing" +groups = ["dev"] +dependencies = [ + "coverage>=3.3.1", + "py>=1.2.2", + "pytest-xdist>=1.2", +] +files = [ + {file = "pytest-cov-0.6.tar.gz", hash = "sha256:ab095420d1845bfb8998e6aa3072141da3b0c97a34475574752bbcf04edbac85"}, +] + +[[package]] +name = "pytest-mock" +version = "3.1.0" +requires_python = ">=3.5" +summary = "Thin-wrapper around the mock package for easier use with pytest" +groups = ["dev"] +dependencies = [ + "pytest>=2.7", +] +files = [ + {file = "pytest-mock-3.1.0.tar.gz", hash = "sha256:ce610831cedeff5331f4e2fc453a5dd65384303f680ab34bee2c6533855b431c"}, + {file = "pytest_mock-3.1.0-py2.py3-none-any.whl", hash = "sha256:997729451dfc36b851a9accf675488c7020beccda15e11c75632ee3d1b1ccd71"}, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +requires_python = ">=3.9" +summary = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +groups = ["dev"] +dependencies = [ + "execnet>=2.1", + "pytest>=7.0.0", +] +files = [ + {file = "pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88"}, + {file = "pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.1" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Extensions to the standard Python datetime module" +groups = ["default"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] + +[[package]] +name = "python-multipart" +version = "0.0.1" +summary = "A streaming multipart parser for Python" +groups = ["dev"] +files = [ + {file = "python-multipart-0.0.1.tar.gz", hash = "sha256:ae940d053341378e53937d6e7f2081d26b4435dbd53dcd901be73ef3d6ff70be"}, +] + +[[package]] +name = "rfc3986" +version = "1.5.0" +summary = "Validating URI References per RFC 3986" +groups = ["default"] +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[[package]] +name = "rfc3986" +version = "1.5.0" +extras = ["idna2008"] +summary = "Validating URI References per RFC 3986" +groups = ["default"] +dependencies = [ + "idna", + "rfc3986==1.5.0", +] +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[[package]] +name = "rich" +version = "14.2.0" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["default"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", +] +files = [ + {file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}, + {file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +requires_python = ">=3.7" +summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +groups = ["default"] +dependencies = [ + "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"", +] +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.14" +requires_python = ">=3.9" +summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +groups = ["default"] +marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" +files = [ + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f8b2acb0ffdd2ce8208accbec2dca4a06937d556fdcaefd6473ba1b5daa7e3c4"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:aef953f3b8bd0b50bd52a2e52fb54a6a2171a1889d8dea4a5959d46c6624c451"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a0ac90efbc7a77b0d796c03c8cc4e62fd710b3f1e4c32947713ef2ef52e09543"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bf6b699223afe6c7fe9f2ef76e0bfa6dd892c21e94ce8c957478987ade76cd8"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73a0187718f6eec5b2f729b0f98e4603f7bd9c48aa65d01227d1a5dcdfbe9e8"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81f6d3b19bc703679a5705c6a16dabdc79823c71d791d73c65949be7f3012c02"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b28caeaf3e670c08cb7e8de221266df8494c169bd6ed8875493fab45be9607a4"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94f3efb718f8f49b031f2071ec7a27dd20cbfe511b4dfd54ecee54c956da2b31"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl", hash = "sha256:27c070cf3888e90d992be75dd47292ff9aa17dafd36492812a6a304a1aedc182"}, + {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl", hash = "sha256:4f4a150a737fccae13fb51234d41304ff2222e3b7d4c8e9428ed1a6ab48389b8"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl", hash = "sha256:915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a"}, + {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl", hash = "sha256:b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54"}, + {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl", hash = "sha256:f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2"}, + {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl", hash = "sha256:090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78"}, + {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f"}, + {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83"}, + {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27"}, + {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640"}, + {file = "ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e"}, +] + +[[package]] +name = "ruff" +version = "0.2.0" +requires_python = ">=3.7" +summary = "An extremely fast Python linter and code formatter, written in Rust." +groups = ["default"] +files = [ + {file = "ruff-0.2.0-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:638ea3294f800d18bae84a492cb5a245c8d29c90d19a91d8e338937a4c27fca0"}, + {file = "ruff-0.2.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3ff35433fcf4dff6d610738712152df6b7d92351a1bde8e00bd405b08b3d5759"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9faafbdcf4f53917019f2c230766da437d4fd5caecd12ddb68bb6a17d74399"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8153a3e4128ed770871c47545f1ae7b055023e0c222ff72a759f5a341ee06483"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a75a98ae989a27090e9c51f763990ad5bbc92d20626d54e9701c7fe597f399"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87057dd2fdde297130ff99553be8549ca38a2965871462a97394c22ed2dfc19d"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d232f99d3ab00094ebaf88e0fb7a8ccacaa54cc7fa3b8993d9627a11e6aed7a"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3c641f95f435fc6754b05591774a17df41648f0daf3de0d75ad3d9f099ab92"}, + {file = "ruff-0.2.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3826fb34c144ef1e171b323ed6ae9146ab76d109960addca730756dc19dc7b22"}, + {file = "ruff-0.2.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eceab7d85d09321b4de18b62d38710cf296cb49e98979960a59c6b9307c18cfe"}, + {file = "ruff-0.2.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:30ad74687e1f4a9ff8e513b20b82ccadb6bd796fe5697f1e417189c5cde6be3e"}, + {file = "ruff-0.2.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7e3818698f8460bd0f8d4322bbe99db8327e9bc2c93c789d3159f5b335f47da"}, + {file = "ruff-0.2.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:edf23041242c48b0d8295214783ef543847ef29e8226d9f69bf96592dba82a83"}, + {file = "ruff-0.2.0-py3-none-win32.whl", hash = "sha256:e155147199c2714ff52385b760fe242bb99ea64b240a9ffbd6a5918eb1268843"}, + {file = "ruff-0.2.0-py3-none-win_amd64.whl", hash = "sha256:ba918e01cdd21e81b07555564f40d307b0caafa9a7a65742e98ff244f5035c59"}, + {file = "ruff-0.2.0-py3-none-win_arm64.whl", hash = "sha256:3fbaff1ba9564a2c5943f8f38bc221f04bac687cc7485e45237579fee7ccda79"}, + {file = "ruff-0.2.0.tar.gz", hash = "sha256:63856b91837606c673537d2889989733d7dffde553828d3b0f0bacfa6def54be"}, +] + +[[package]] +name = "shellingham" +version = "1.3.2" +requires_python = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6" +summary = "Tool to Detect Surrounding Shell" +groups = ["default"] +files = [ + {file = "shellingham-1.3.2-py2.py3-none-any.whl", hash = "sha256:7f6206ae169dc1a03af8a138681b3f962ae61cc93ade84d0585cca3aaf770044"}, + {file = "shellingham-1.3.2.tar.gz", hash = "sha256:576c1982bea0ba82fb46c36feb951319d7f42214a82634233f58b40d858a751e"}, +] + +[[package]] +name = "six" +version = "1.17.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "syrupy" +version = "4.6.1" +requires_python = ">=3.8.1,<4" +summary = "Pytest Snapshot Test Utility" +groups = ["dev"] +dependencies = [ + "pytest<9.0.0,>=7.0.0", +] +files = [ + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, +] + +[[package]] +name = "tomli" +version = "2.3.0" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] + +[[package]] +name = "typer" +version = "0.16.1" +requires_python = ">=3.7" +summary = "Typer, build great CLIs. Easy to code. Based on Python type hints." +groups = ["default"] +dependencies = [ + "click>=8.0.0", + "rich>=10.11.0", + "shellingham>=1.3.0", + "typing-extensions>=3.7.4.3", +] +files = [ + {file = "typer-0.16.1-py3-none-any.whl", hash = "sha256:90ee01cb02d9b8395ae21ee3368421faf21fa138cb2a541ed369c08cec5237c9"}, + {file = "typer-0.16.1.tar.gz", hash = "sha256:d358c65a464a7a90f338e3bb7ff0c74ac081449e53884b12ba658cbd72990614"}, +] + +[[package]] +name = "types-certifi" +version = "2020.4.0" +summary = "Typing stubs for certifi" +groups = ["dev"] +files = [ + {file = "types-certifi-2020.4.0.tar.gz", hash = "sha256:787d1a0c7897a1c658f8f7958ae57141b3fff13acb866e5bcd31cfb45037546f"}, + {file = "types_certifi-2020.4.0-py3-none-any.whl", hash = "sha256:0ffdbe451d3b02f6d2cfd87bcfb2f086a4ff1fa76a35d51cfc3771e261d7a8fd"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.8.0" +summary = "Typing stubs for python-dateutil" +groups = ["dev"] +files = [ + {file = "types-python-dateutil-2.8.0.tar.gz", hash = "sha256:540c6c53c3a52433d7088254e3afdc3f6c86b5ae452aaa1b796c26d01c9fd73c"}, + {file = "types_python_dateutil-2.8.0-py3-none-any.whl", hash = "sha256:9954d87dc982344bb2aad73a7fe505bdca72f89088ef653c4c40f52649183437"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.3" +summary = "Typing stubs for PyYAML" +groups = ["dev"] +files = [ + {file = "types-PyYAML-6.0.3.tar.gz", hash = "sha256:6ea4eefa8579e0ce022f785a62de2bcd647fad4a81df5cf946fd67e4b059920b"}, + {file = "types_PyYAML-6.0.3-py3-none-any.whl", hash = "sha256:8b50294b55a9db89498cdc5a65b1b4545112b6cd1cf4465bd693d828b0282a17"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +requires_python = ">=3.9" +summary = "Backported and Experimental Type Hints for Python 3.9+" +groups = ["default", "dev"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] diff --git a/pyproject.toml b/pyproject.toml index 0d8117d3f..f5027a6c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,22 +3,21 @@ authors = [ { name = "Dylan Anthony", email = "contact@dylananthony.com" }, ] license = { text = "MIT" } -requires-python = ">=3.8,<4.0" +requires-python = ">=3.10,<4.0" dependencies = [ "jinja2>=3.0.0,<4.0.0", - "typer>0.6,<0.10", + "typer>0.16,<0.22", "colorama>=0.4.3; sys_platform == \"win32\"", "shellingham>=1.3.2,<2.0.0", - "pydantic>=2.1.1,<3.0.0", - "attrs>=21.3.0", + "pydantic>=2.10,<3.0.0", + "attrs>=22.2.0", "python-dateutil>=2.8.1,<3.0.0", - "httpx>=0.20.0,<0.28.0", - "PyYAML>=6.0,<7.0", - "ruff>=0.2,<0.4", - "typing-extensions>=4.8.0,<5.0.0", + "httpx>=0.23.0,<0.29.0", + "ruamel.yaml>=0.18.6,<0.20.0", + "ruff>=0.2", ] name = "openapi-python-client" -version = "0.19.0" +version = "0.28.1" description = "Generate modern Python clients from OpenAPI" keywords = [ "OpenAPI", @@ -30,11 +29,11 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Topic :: Software Development :: Code Generators", "Typing :: Typed", ] @@ -59,13 +58,14 @@ exclude = [ [tool.ruff.lint] select = ["E", "F", "I", "UP", "B", "PL", "RUF"] -ignore = ["E501", "PLR0913"] +ignore = ["E501", "PLR0913", "PLR2004"] [tool.ruff.lint.per-file-ignores] "openapi_python_client/cli.py" = ["B008"] +"tests/*" = ["PLR2004"] [tool.coverage.run] -omit = ["openapi_python_client/templates/*"] +omit = ["openapi_python_client/__main__.py", "openapi_python_client/templates/*", "end_to_end_tests/*", "integration_tests/*", "tests/*"] [tool.mypy] plugins = ["pydantic.mypy"] @@ -84,19 +84,17 @@ ignore_missing_imports = true [tool.pytest.ini_options] junit_family = "xunit2" - [tool.pdm.dev-dependencies] dev = [ - "pytest", - "pytest-mock", - "mypy", - "taskipy", - "safety", + "pytest>8", + "pytest-mock>3", + "mypy>=1.13", "pytest-cov", "python-multipart", "types-PyYAML<7.0.0,>=6.0.3", "types-certifi<2021.10.9,>=2020.0.0", "types-python-dateutil<3.0.0,>=2.0.0", + "syrupy>=4", ] [tool.pdm.build] @@ -107,18 +105,18 @@ includes = [ ] [tool.pdm.scripts] -lint = "ruff check --fix ." +lint = "ruff check ." format = "ruff format ." -safety_check = { shell = "pdm export -o requirements.txt && safety check -r requirements.txt --bare && rm requirements.txt" } mypy = "mypy openapi_python_client" -check = { composite = ["lint", "format", "safety_check", "mypy", "test"] } +check = { composite = ["lint", "format", "mypy", "test"] } regen = {composite = ["regen_e2e", "regen_integration"]} e2e = "pytest openapi_python_client end_to_end_tests/test_end_to_end.py" -re = {composite = ["regen_e2e", "e2e"]} +re = {composite = ["regen_e2e", "e2e --snapshot-update"]} regen_e2e = "python -m end_to_end_tests.regen_golden_record" +unit_test = "pytest tests" [tool.pdm.scripts.test] -cmd = "pytest tests end_to_end_tests/test_end_to_end.py --basetemp=tests/tmp" +cmd = "pytest tests end_to_end_tests/test_end_to_end.py end_to_end_tests/functional_tests --basetemp=tests/tmp" [tool.pdm.scripts.test.env] "TEST_RELATIVE" = "true" @@ -130,7 +128,7 @@ composite = ["test --cov openapi_python_client tests --cov-report=term-missing"] [tool.pdm.scripts.regen_integration] shell = """ -openapi-python-client update --url https://raw.githubusercontent.com/openapi-generators/openapi-test-server/main/openapi.json --config integration-tests/config.yaml --meta pdm \ +openapi-python-client generate --overwrite --url https://raw.githubusercontent.com/openapi-generators/openapi-test-server/refs/tags/v0.2.1/openapi.yaml --config integration-tests/config.yaml --meta none --output-path integration-tests/integration_tests \ """ [build-system] diff --git a/tests/conftest.py b/tests/conftest.py index 44fbe717c..500ebf911 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,51 +1,68 @@ +from __future__ import annotations + +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable, Dict +from typing import Any import pytest +from mypy.semanal_shared import Protocol -from openapi_python_client import Config, MetaType +from openapi_python_client import Config, MetaType, utils from openapi_python_client import schema as oai from openapi_python_client.config import ConfigFile from openapi_python_client.parser.properties import ( AnyProperty, BooleanProperty, + Class, DateProperty, DateTimeProperty, EnumProperty, FileProperty, IntProperty, ListProperty, + LiteralEnumProperty, ModelProperty, NoneProperty, StringProperty, UnionProperty, ) +from openapi_python_client.parser.properties.float import FloatProperty +from openapi_python_client.parser.properties.protocol import PropertyType, Value from openapi_python_client.schema.openapi_schema_pydantic import Parameter from openapi_python_client.schema.parameter_location import ParameterLocation +from openapi_python_client.utils import ClassName, PythonIdentifier @pytest.fixture(scope="session") def config() -> Config: """Create a default config for when it doesn't matter""" return Config.from_sources( - ConfigFile(), MetaType.POETRY, document_source=Path("openapi.yaml"), file_encoding="utf-8" + ConfigFile(), + MetaType.POETRY, + document_source=Path("openapi.yaml"), + file_encoding="utf-8", + overwrite=False, + output_path=None, ) +class ModelFactory(Protocol): + def __call__(self, *args, **kwargs): ... + + @pytest.fixture -def model_property_factory() -> Callable[..., ModelProperty]: +def model_property_factory() -> ModelFactory: """ This fixture surfaces in the test as a function which manufactures ModelProperties with defaults. You can pass the same params into this as the ModelProperty constructor to override defaults. """ - from openapi_python_client.parser.properties import Class def _factory(**kwargs): kwargs = _common_kwargs(kwargs) kwargs = { "description": "", - "class_info": Class(name="MyClass", module_name="my_module"), + "class_info": Class(name=ClassName("MyClass", ""), module_name=PythonIdentifier("my_module", "")), "data": oai.Schema.model_construct(), "roots": set(), "required_properties": None, @@ -62,180 +79,218 @@ def _factory(**kwargs): return _factory +def _simple_factory( + cls: type[PropertyType], default_kwargs: dict | Callable[[dict], dict] | None = None +) -> Callable[..., PropertyType]: + def _factory(**kwargs): + kwargs = _common_kwargs(kwargs) + defaults = default_kwargs + if defaults: + if callable(defaults): + defaults = defaults(kwargs) + kwargs = {**defaults, **kwargs} + rv = cls(**kwargs) + return rv + + return _factory + + +class SimpleFactory(Protocol[PropertyType]): + def __call__( + self, + *, + default: Value | None = None, + name: str | None = None, + required: bool | None = None, + description: str | None = None, + example: str | None = None, + ) -> PropertyType: ... + + +class EnumFactory(Protocol[PropertyType]): + def __call__( + self, + *, + default: Value | None = None, + name: str | None = None, + required: bool | None = None, + values: dict[str, str | int] | None = None, + class_info: Class | None = None, + value_type: type | None = None, + python_name: PythonIdentifier | None = None, + description: str | None = None, + example: str | None = None, + ) -> PropertyType: ... + + @pytest.fixture -def enum_property_factory() -> Callable[..., EnumProperty]: +def enum_property_factory() -> EnumFactory[EnumProperty]: """ This fixture surfaces in the test as a function which manufactures EnumProperties with defaults. You can pass the same params into this as the EnumProerty constructor to override defaults. """ - from openapi_python_client.parser.properties import Class - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - kwargs = { + return _simple_factory( + EnumProperty, + lambda kwargs: { "class_info": Class(name=kwargs["name"], module_name=kwargs["name"]), "values": {}, "value_type": str, - **kwargs, - } - return EnumProperty(**kwargs) + }, + ) - return _factory + +@pytest.fixture +def literal_enum_property_factory() -> EnumFactory[LiteralEnumProperty]: + """ + This fixture surfaces in the test as a function which manufactures LiteralEnumProperties with defaults. + + You can pass the same params into this as the LiteralEnumProerty constructor to override defaults. + """ + + return _simple_factory( + LiteralEnumProperty, + lambda kwargs: { + "class_info": Class(name=kwargs["name"], module_name=kwargs["name"]), + "values": set(), + "value_type": str, + }, + ) @pytest.fixture -def any_property_factory() -> Callable[..., AnyProperty]: +def any_property_factory() -> SimpleFactory[AnyProperty]: """ This fixture surfaces in the test as a function which manufactures AnyProperty with defaults. You can pass the same params into this as the AnyProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return AnyProperty(**kwargs) - - return _factory + return _simple_factory(AnyProperty) @pytest.fixture -def string_property_factory() -> Callable[..., StringProperty]: +def string_property_factory() -> SimpleFactory[StringProperty]: """ This fixture surfaces in the test as a function which manufactures StringProperties with defaults. You can pass the same params into this as the StringProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return StringProperty(**kwargs) - - return _factory + return _simple_factory(StringProperty) @pytest.fixture -def int_property_factory() -> Callable[..., IntProperty]: +def int_property_factory() -> SimpleFactory[IntProperty]: """ - This fixture surfaces in the test as a function which manufactures StringProperties with defaults. + This fixture surfaces in the test as a function which manufactures IntProperties with defaults. - You can pass the same params into this as the StringProperty constructor to override defaults. + You can pass the same params into this as the IntProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return IntProperty(**kwargs) - - return _factory + return _simple_factory(IntProperty) @pytest.fixture -def none_property_factory() -> Callable[..., NoneProperty]: +def float_property_factory() -> SimpleFactory[FloatProperty]: """ - This fixture surfaces in the test as a function which manufactures StringProperties with defaults. + This fixture surfaces in the test as a function which manufactures FloatProperties with defaults. - You can pass the same params into this as the StringProperty constructor to override defaults. + You can pass the same params into this as the FloatProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return NoneProperty(**kwargs) - - return _factory + return _simple_factory(FloatProperty) @pytest.fixture -def boolean_property_factory() -> Callable[..., BooleanProperty]: +def none_property_factory() -> SimpleFactory[NoneProperty]: """ - This fixture surfaces in the test as a function which manufactures StringProperties with defaults. + This fixture surfaces in the test as a function which manufactures NoneProperties with defaults. - You can pass the same params into this as the StringProperty constructor to override defaults. + You can pass the same params into this as the NoneProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return BooleanProperty(**kwargs) + return _simple_factory(NoneProperty) - return _factory + +@pytest.fixture +def boolean_property_factory() -> SimpleFactory[BooleanProperty]: + """ + This fixture surfaces in the test as a function which manufactures BooleanProperties with defaults. + + You can pass the same params into this as the BooleanProperty constructor to override defaults. + """ + + return _simple_factory(BooleanProperty) @pytest.fixture -def date_time_property_factory() -> Callable[..., DateTimeProperty]: +def date_time_property_factory() -> SimpleFactory[DateTimeProperty]: """ This fixture surfaces in the test as a function which manufactures DateTimeProperties with defaults. You can pass the same params into this as the DateTimeProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return DateTimeProperty(**kwargs) - - return _factory + return _simple_factory(DateTimeProperty) @pytest.fixture -def date_property_factory() -> Callable[..., DateProperty]: +def date_property_factory() -> SimpleFactory[DateProperty]: """ This fixture surfaces in the test as a function which manufactures DateProperties with defaults. You can pass the same params into this as the DateProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return DateProperty(**kwargs) - - return _factory + return _simple_factory(DateProperty) @pytest.fixture -def file_property_factory() -> Callable[..., FileProperty]: +def file_property_factory() -> SimpleFactory[FileProperty]: """ This fixture surfaces in the test as a function which manufactures FileProperties with defaults. You can pass the same params into this as the FileProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - return FileProperty(**kwargs) - - return _factory + return _simple_factory(FileProperty) @pytest.fixture -def list_property_factory(string_property_factory) -> Callable[..., ListProperty]: +def list_property_factory(string_property_factory) -> SimpleFactory[ListProperty]: """ This fixture surfaces in the test as a function which manufactures ListProperties with defaults. You can pass the same params into this as the ListProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - if "inner_property" not in kwargs: - kwargs["inner_property"] = string_property_factory() - return ListProperty(**kwargs) + return _simple_factory(ListProperty, {"inner_property": string_property_factory()}) - return _factory + +class UnionFactory(SimpleFactory): + def __call__( + self, + *, + default: Value | None = None, + name: str | None = None, + required: bool | None = None, + inner_properties: list[PropertyType] | None = None, + ) -> UnionProperty: ... @pytest.fixture -def union_property_factory(date_time_property_factory, string_property_factory) -> Callable[..., UnionProperty]: +def union_property_factory(date_time_property_factory, string_property_factory) -> UnionFactory: """ This fixture surfaces in the test as a function which manufactures UnionProperties with defaults. You can pass the same params into this as the UnionProperty constructor to override defaults. """ - def _factory(**kwargs): - kwargs = _common_kwargs(kwargs) - if "inner_properties" not in kwargs: - kwargs["inner_properties"] = [date_time_property_factory(), string_property_factory()] - return UnionProperty(**kwargs) - - return _factory + return _simple_factory( + UnionProperty, {"inner_properties": [date_time_property_factory(), string_property_factory()]} + ) @pytest.fixture @@ -257,7 +312,7 @@ def _factory(**kwargs): return _factory -def _common_kwargs(kwargs: Dict[str, Any]) -> Dict[str, Any]: +def _common_kwargs(kwargs: dict[str, Any]) -> dict[str, Any]: kwargs = { "name": "test", "required": True, @@ -267,5 +322,5 @@ def _common_kwargs(kwargs: Dict[str, Any]) -> Dict[str, Any]: **kwargs, } if not kwargs.get("python_name"): - kwargs["python_name"] = kwargs["name"] + kwargs["python_name"] = utils.PythonIdentifier(value=kwargs["name"], prefix="") return kwargs diff --git a/tests/test___init__.py b/tests/test___init__.py index c419fdf32..34ad3188f 100644 --- a/tests/test___init__.py +++ b/tests/test___init__.py @@ -1,3 +1,5 @@ +from unittest.mock import MagicMock + import pytest from openapi_python_client import Config, ErrorLevel, Project @@ -7,10 +9,6 @@ def make_project(config: Config) -> Project: - from unittest.mock import MagicMock - - from openapi_python_client import Project - return Project(openapi=MagicMock(title="My Test API"), config=config) @@ -42,23 +40,23 @@ def test__run_post_hooks_reports_missing_commands(self, project_with_dir: Projec assert fake_command_name in error.detail def test__run_post_hooks_reports_stdout_of_commands_that_error_with_no_stderr(self, project_with_dir): - failing_command = "python -c \"print('a message'); exit(1)\"" + failing_command = "python3 -c \"print('a message'); exit(1)\"" project_with_dir.config.post_hooks = [failing_command] project_with_dir._run_post_hooks() assert len(project_with_dir.errors) == 1 error = project_with_dir.errors[0] assert error.level == ErrorLevel.ERROR - assert error.header == "python failed" + assert error.header == "python3 failed" assert "a message" in error.detail def test__run_post_hooks_reports_stderr_of_commands_that_error(self, project_with_dir): - failing_command = "python -c \"print('a message'); raise Exception('some exception')\"" + failing_command = "python3 -c \"print('a message'); raise Exception('some exception')\"" project_with_dir.config.post_hooks = [failing_command] project_with_dir._run_post_hooks() assert len(project_with_dir.errors) == 1 error = project_with_dir.errors[0] assert error.level == ErrorLevel.ERROR - assert error.header == "python failed" + assert error.header == "python3 failed" assert "some exception" in error.detail diff --git a/tests/test___main__.py b/tests/test___main__.py deleted file mode 100644 index 0673c2062..000000000 --- a/tests/test___main__.py +++ /dev/null @@ -1,6 +0,0 @@ -def test_main(mocker): - app = mocker.patch("openapi_python_client.cli.app") - - from openapi_python_client import __main__ # noqa: F401 - - app.assert_called_once() diff --git a/tests/test_cli.py b/tests/test_cli.py index 1b3c21501..9712b2a04 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,157 +1,43 @@ -from unittest.mock import MagicMock - -import pytest from typer.testing import CliRunner -from openapi_python_client.parser.errors import GeneratorError, ParseError +from openapi_python_client.cli import app runner = CliRunner() -def test_version(mocker): - generate = mocker.patch("openapi_python_client.cli.generate") - from openapi_python_client.cli import app - +def test_version() -> None: result = runner.invoke(app, ["--version", "generate"]) - generate.assert_not_called() assert result.exit_code == 0 assert "openapi-python-client version: " in result.stdout -@pytest.fixture -def _create_new_client(mocker) -> MagicMock: - return mocker.patch("openapi_python_client.create_new_client", return_value=[]) - - -def test_bad_config(_create_new_client): - from openapi_python_client.cli import app - +def test_bad_config() -> None: config_path = "config/path" path = "cool/path" result = runner.invoke(app, ["generate", f"--config={config_path}", f"--path={path}"]) - assert result.exit_code == 2 # noqa: PLR2004 - assert "Unable to parse config" in result.stdout + assert result.exit_code == 2 + assert "Unable to parse config" in result.output class TestGenerate: - def test_generate_no_params(self, _create_new_client): - from openapi_python_client.cli import app - + def test_generate_no_params(self) -> None: result = runner.invoke(app, ["generate"]) assert result.exit_code == 1, result.output - _create_new_client.assert_not_called() - - def test_generate_url_and_path(self, _create_new_client): - from openapi_python_client.cli import app + def test_generate_url_and_path(self) -> None: result = runner.invoke(app, ["generate", "--path=blah", "--url=otherblah"]) assert result.exit_code == 1 - _create_new_client.assert_not_called() + assert result.output == "Provide either --url or --path, not both\n" - def test_generate_encoding_errors(self, _create_new_client): + def test_generate_encoding_errors(self) -> None: path = "cool/path" file_encoding = "error-file-encoding" - from openapi_python_client.cli import app - result = runner.invoke(app, ["generate", f"--path={path}", f"--file-encoding={file_encoding}"]) assert result.exit_code == 1 assert result.output == f"Unknown encoding : {file_encoding}\n" - - def test_generate_handle_errors(self, _create_new_client): - _create_new_client.return_value = [GeneratorError(detail="this is a message")] - path = "cool/path" - from openapi_python_client.cli import app - - result = runner.invoke(app, ["generate", f"--path={path}"]) - - assert result.exit_code == 1 - assert result.output == ( - "Error(s) encountered while generating, client was not created\n\n" - "Unable to generate the client\n\n" - "this is a message\n\n\n" - "If you believe this was a mistake or this tool is missing a feature you need, please open an issue at " - "https://github.com/openapi-generators/openapi-python-client/issues/new/choose\n" - ) - - def test_generate_handle_multiple_warnings(self, _create_new_client): - error_1 = ParseError(data={"test": "data"}, detail="this is a message") - error_2 = ParseError(data={"other": "data"}, detail="this is another message", header="Custom Header") - _create_new_client.return_value = [error_1, error_2] - path = "cool/path" - from openapi_python_client.cli import app - - result = runner.invoke(app, ["generate", f"--path={path}"]) - - assert result.exit_code == 0 - assert result.output == ( - "Warning(s) encountered while generating. Client was generated, but some pieces may be missing\n\n" - "Unable to parse this part of your OpenAPI document: \n\n" - "this is a message\n\n" - "{'test': 'data'}\n\n" - "Custom Header\n\n" - "this is another message\n\n" - "{'other': 'data'}\n\n" - "If you believe this was a mistake or this tool is missing a feature you need, please open an issue at " - "https://github.com/openapi-generators/openapi-python-client/issues/new/choose\n" - ) - - def test_generate_fail_on_warning(self, _create_new_client): - error_1 = ParseError(data={"test": "data"}, detail="this is a message") - error_2 = ParseError(data={"other": "data"}, detail="this is another message", header="Custom Header") - _create_new_client.return_value = [error_1, error_2] - path = "cool/path" - from openapi_python_client.cli import app - - result = runner.invoke(app, ["generate", f"--path={path}", "--fail-on-warning"]) - - assert result.exit_code == 1 - assert result.output == ( - "Warning(s) encountered while generating. Client was generated, but some pieces may be missing\n\n" - "Unable to parse this part of your OpenAPI document: \n\n" - "this is a message\n\n" - "{'test': 'data'}\n\n" - "Custom Header\n\n" - "this is another message\n\n" - "{'other': 'data'}\n\n" - "If you believe this was a mistake or this tool is missing a feature you need, please open an issue at " - "https://github.com/openapi-generators/openapi-python-client/issues/new/choose\n" - ) - - -@pytest.fixture -def _update_existing_client(mocker): - return mocker.patch("openapi_python_client.update_existing_client") - - -class TestUpdate: - def test_update_no_params(self, _update_existing_client): - from openapi_python_client.cli import app - - result = runner.invoke(app, ["update"]) - - assert result.exit_code == 1 - _update_existing_client.assert_not_called() - - def test_update_url_and_path(self, _update_existing_client): - from openapi_python_client.cli import app - - result = runner.invoke(app, ["update", "--path=blah", "--url=otherblah"]) - - assert result.exit_code == 1 - _update_existing_client.assert_not_called() - - def test_update_encoding_errors(self, _update_existing_client): - path = "cool/path" - file_encoding = "error-file-encoding" - from openapi_python_client.cli import app - - result = runner.invoke(app, ["update", f"--path={path}", f"--file-encoding={file_encoding}"]) - - assert result.exit_code == 1 - assert result.output == f"Unknown encoding : {file_encoding}\n" diff --git a/tests/test_config.py b/tests/test_config.py index ea03dda47..be2e8bf59 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,28 +1,40 @@ import json import os +from io import StringIO from pathlib import Path +from typing import Any import pytest -import yaml +from ruamel.yaml import YAML as _YAML from openapi_python_client.config import ConfigFile -def json_with_tabs(d): +class YAML(_YAML): + def dump_to_string(self, data: Any, **kwargs: Any) -> str: + stream = StringIO() + self.dump(data=data, stream=stream, **kwargs) + return stream.getvalue() + + +yaml = YAML(typ=["safe", "string"]) + + +def json_with_tabs(d: dict) -> str: return json.dumps(d, indent=4).replace(" ", "\t") @pytest.mark.parametrize( "filename,dump", [ - ("example.yml", yaml.dump), + ("example.yml", yaml.dump_to_string), ("example.json", json.dumps), - ("example.yaml", yaml.dump), + ("example.yaml", yaml.dump_to_string), ("example.json", json_with_tabs), ], ) @pytest.mark.parametrize("relative", (True, False), ids=("relative", "absolute")) -def test_load_from_path(tmp_path: Path, filename, dump, relative): +def test_load_from_path(tmp_path: Path, filename, dump, relative) -> None: yml_file = tmp_path.joinpath(filename) if relative: if not os.getenv("TEST_RELATIVE"): diff --git a/tests/test_parser/test_bodies.py b/tests/test_parser/test_bodies.py index 699ed00cf..0956d11f6 100644 --- a/tests/test_parser/test_bodies.py +++ b/tests/test_parser/test_bodies.py @@ -32,7 +32,9 @@ def test_errors(config): responses={}, ) - errs, _ = body_from_data(data=operation, schemas=Schemas(), config=config, endpoint_name="this will not succeed") + errs, _ = body_from_data( + data=operation, schemas=Schemas(), config=config, endpoint_name="this will not succeed", request_bodies={} + ) assert len(errs) == len(operation.request_body.content) assert all(isinstance(err, ParseError) for err in errs) diff --git a/tests/test_parser/test_openapi.py b/tests/test_parser/test_openapi.py index 43a9e8c42..3bd743804 100644 --- a/tests/test_parser/test_openapi.py +++ b/tests/test_parser/test_openapi.py @@ -4,192 +4,27 @@ import pytest import openapi_python_client.schema as oai -from openapi_python_client import GeneratorError from openapi_python_client.parser.errors import ParseError -from openapi_python_client.parser.openapi import Endpoint, EndpointCollection -from openapi_python_client.parser.properties import IntProperty, Parameters, Schemas +from openapi_python_client.parser.openapi import Endpoint, EndpointCollection, import_string_from_class +from openapi_python_client.parser.properties import Class, IntProperty, Parameters, Schemas from openapi_python_client.schema import DataType MODULE_NAME = "openapi_python_client.parser.openapi" -class TestGeneratorData: - def test_from_dict(self, mocker, model_property_factory, enum_property_factory): - from openapi_python_client.parser.properties import Schemas - - build_schemas = mocker.patch(f"{MODULE_NAME}.build_schemas") - build_parameters = mocker.patch(f"{MODULE_NAME}.build_parameters") - EndpointCollection = mocker.patch(f"{MODULE_NAME}.EndpointCollection") - schemas = mocker.MagicMock() - schemas.classes_by_name = { - "Model": model_property_factory(), - "Enum": enum_property_factory(), - } - parameters = Parameters() - - endpoints_collections_by_tag = mocker.MagicMock() - EndpointCollection.from_data.return_value = (endpoints_collections_by_tag, schemas, parameters) - OpenAPI = mocker.patch(f"{MODULE_NAME}.oai.OpenAPI") - openapi = OpenAPI.model_validate.return_value - openapi.openapi = mocker.MagicMock(major=3) - config = mocker.MagicMock() - in_dict = mocker.MagicMock() - - from openapi_python_client.parser.openapi import GeneratorData - - generator_data = GeneratorData.from_dict(in_dict, config=config) - - OpenAPI.model_validate.assert_called_once_with(in_dict) - build_schemas.assert_called_once_with(components=openapi.components.schemas, config=config, schemas=Schemas()) - build_parameters.assert_called_once_with( - components=openapi.components.parameters, - parameters=parameters, - config=config, - ) - EndpointCollection.from_data.assert_called_once_with( - data=openapi.paths, - schemas=build_schemas.return_value, - parameters=build_parameters.return_value, - config=config, - ) - assert generator_data.title == openapi.info.title - assert generator_data.description == openapi.info.description - assert generator_data.version == openapi.info.version - assert generator_data.endpoint_collections_by_tag == endpoints_collections_by_tag - assert generator_data.errors == schemas.errors + parameters.errors - assert list(generator_data.models) == [schemas.classes_by_name["Model"]] - assert list(generator_data.enums) == [schemas.classes_by_name["Enum"]] - - # Test no components - openapi.components = None - build_schemas.reset_mock() - build_parameters.reset_mock() - - GeneratorData.from_dict(in_dict, config=config) - - build_schemas.assert_not_called() - build_parameters.assert_not_called() - - def test_from_dict_invalid_schema(self, mocker): - Schemas = mocker.patch(f"{MODULE_NAME}.Schemas") - config = mocker.MagicMock() - - in_dict = {} - - from openapi_python_client.parser.openapi import GeneratorData - - generator_data = GeneratorData.from_dict(in_dict, config=config) - - assert isinstance(generator_data, GeneratorError) - assert generator_data.header == "Failed to parse OpenAPI document" - keywords = ["3 validation errors for OpenAPI", "info", "paths", "openapi", "Field required"] - assert generator_data.detail and all(keyword in generator_data.detail for keyword in keywords) - - Schemas.build.assert_not_called() - Schemas.assert_not_called() - - def test_swagger_document_invalid_schema(self, mocker): - Schemas = mocker.patch(f"{MODULE_NAME}.Schemas") - config = mocker.MagicMock() - - in_dict = {"swagger": "2.0"} - - from openapi_python_client.parser.openapi import GeneratorData - - generator_data = GeneratorData.from_dict(in_dict, config=config) - - assert isinstance(generator_data, GeneratorError) - assert generator_data.header == "Failed to parse OpenAPI document" - keywords = [ - "You may be trying to use a Swagger document; this is not supported by this project.", - "info", - "paths", - "openapi", - "Field required", - ] - assert generator_data.detail and all(keyword in generator_data.detail for keyword in keywords) - - Schemas.build.assert_not_called() - Schemas.assert_not_called() - - class TestEndpoint: def make_endpoint(self): - from openapi_python_client.parser.openapi import Endpoint - return Endpoint( path="path", method="method", description=None, name="name", requires_security=False, - tag="tag", + tags=["tag"], relative_imports={"import_3"}, ) - @pytest.mark.parametrize("response_status_code", ["not_a_number", 499]) - def test__add_responses_status_code_error(self, response_status_code, mocker): - from openapi_python_client.parser.openapi import Endpoint, Schemas - - schemas = Schemas() - response_1_data = mocker.MagicMock() - data = { - response_status_code: response_1_data, - } - endpoint = self.make_endpoint() - parse_error = ParseError(data=mocker.MagicMock()) - response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", return_value=(parse_error, schemas)) - config = MagicMock() - - response, schemas = Endpoint._add_responses(endpoint=endpoint, data=data, schemas=schemas, config=config) - - assert response.errors == [ - ParseError( - detail=f"Invalid response status code {response_status_code} (not a valid HTTP status code), " - "response will be ommitted from generated client" - ) - ] - response_from_data.assert_not_called() - - def test__add_responses_error(self, mocker): - from openapi_python_client.parser.openapi import Endpoint, Schemas - - schemas = Schemas() - response_1_data = mocker.MagicMock() - response_2_data = mocker.MagicMock() - data = { - "200": response_1_data, - "404": response_2_data, - } - endpoint = self.make_endpoint() - parse_error = ParseError(data=mocker.MagicMock(), detail="some problem") - response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", return_value=(parse_error, schemas)) - config = MagicMock() - - response, schemas = Endpoint._add_responses(endpoint=endpoint, data=data, schemas=schemas, config=config) - - response_from_data.assert_has_calls( - [ - mocker.call(status_code=200, data=response_1_data, schemas=schemas, parent_name="name", config=config), - mocker.call(status_code=404, data=response_2_data, schemas=schemas, parent_name="name", config=config), - ] - ) - assert response.errors == [ - ParseError( - detail="Cannot parse response for status code 200 (some problem), " - "response will be ommitted from generated client", - data=parse_error.data, - ), - ParseError( - detail="Cannot parse response for status code 404 (some problem), " - "response will be ommitted from generated client", - data=parse_error.data, - ), - ] - def test_add_parameters_handles_no_params(self): - from openapi_python_client.parser.openapi import Endpoint, Schemas - endpoint = self.make_endpoint() schemas = Schemas() parameters = Parameters() @@ -205,8 +40,6 @@ def test_add_parameters_handles_no_params(self): ) == (endpoint, schemas, parameters) def test_add_parameters_parse_error(self, mocker): - from openapi_python_client.parser.openapi import Endpoint - endpoint = self.make_endpoint() initial_schemas = mocker.MagicMock() initial_parameters = mocker.MagicMock() @@ -246,8 +79,6 @@ def test_add_parameters_parse_error(self, mocker): ], ) def test_add_parameters_header_types(self, data_type, allowed, config): - from openapi_python_client.parser.openapi import Endpoint - endpoint = self.make_endpoint() initial_schemas = Schemas() parameters = Parameters() @@ -406,7 +237,7 @@ def test__add_parameters_query_optionality(self, config): endpoint=endpoint, data=data, schemas=Schemas(), parameters=Parameters(), config=config ) - assert len(endpoint.query_parameters) == 2, "Not all query params were added" # noqa: PLR2004 + assert len(endpoint.query_parameters) == 2, "Not all query params were added" for param in endpoint.query_parameters: if param.name == "required": assert param.required @@ -414,8 +245,6 @@ def test__add_parameters_query_optionality(self, config): assert not param.required def test_add_parameters_duplicate_properties(self, config): - from openapi_python_client.parser.openapi import Endpoint, Schemas - endpoint = self.make_endpoint() param = oai.Parameter.model_construct( name="test", required=True, param_schema=oai.Schema.model_construct(type="string"), param_in="path" @@ -439,8 +268,6 @@ def test_add_parameters_duplicate_properties(self, config): ) def test_add_parameters_duplicate_properties_different_location(self, config): - from openapi_python_client.parser.openapi import Endpoint, Schemas - endpoint = self.make_endpoint() path_param = oai.Parameter.model_construct( name="test", required=True, param_schema=oai.Schema.model_construct(type="string"), param_in="path" @@ -463,8 +290,6 @@ def test_add_parameters_duplicate_properties_different_location(self, config): assert result.query_parameters[0].name == "test" def test_sort_parameters(self, string_property_factory): - from openapi_python_client.parser.openapi import Endpoint - endpoint = self.make_endpoint() endpoint.path = "/multiple-path-parameters/{param4}/{param2}/{param1}/{param3}" @@ -479,8 +304,6 @@ def test_sort_parameters(self, string_property_factory): assert result_names == expected_names def test_sort_parameters_missing_param(self, string_property_factory): - from openapi_python_client.parser.openapi import Endpoint - endpoint = self.make_endpoint() endpoint.path = "/multiple-path-parameters/{param1}/{param2}" param = string_property_factory(name="param1") @@ -493,8 +316,6 @@ def test_sort_parameters_missing_param(self, string_property_factory): assert endpoint.path in result.detail def test_sort_parameters_extra_param(self, string_property_factory): - from openapi_python_client.parser.openapi import Endpoint - endpoint = self.make_endpoint() endpoint.path = "/multiple-path-parameters" param = string_property_factory(name="param1") @@ -507,8 +328,6 @@ def test_sort_parameters_extra_param(self, string_property_factory): assert endpoint.path in result.detail def test_from_data_bad_params(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint - path = mocker.MagicMock() method = mocker.MagicMock() parse_error = ParseError(data=mocker.MagicMock()) @@ -528,17 +347,17 @@ def test_from_data_bad_params(self, mocker, config): data=data, path=path, method=method, - tag="default", + tags=["default"], schemas=initial_schemas, + responses={}, parameters=parameters, config=config, + request_bodies={}, ) assert result == (parse_error, return_schemas, return_parameters) def test_from_data_bad_responses(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint - path = mocker.MagicMock() method = mocker.MagicMock() parse_error = ParseError(data=mocker.MagicMock()) @@ -562,17 +381,17 @@ def test_from_data_bad_responses(self, mocker, config): data=data, path=path, method=method, - tag="default", + tags=["default"], schemas=initial_schemas, + responses={}, parameters=initial_parameters, config=config, + request_bodies={}, ) assert result == (parse_error, response_schemas, return_parameters) def test_from_data_standard(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint - path = mocker.MagicMock() method = mocker.MagicMock() param_schemas = mocker.MagicMock() @@ -601,10 +420,12 @@ def test_from_data_standard(self, mocker, config): data=data, path=path, method=method, - tag="default", + tags=["default"], schemas=initial_schemas, + responses={}, parameters=initial_parameters, config=config, + request_bodies={}, ) add_parameters.assert_called_once_with( @@ -615,7 +436,7 @@ def test_from_data_standard(self, mocker, config): summary="", name=data.operationId, requires_security=True, - tag="default", + tags=["default"], ), data=data, schemas=initial_schemas, @@ -623,12 +444,10 @@ def test_from_data_standard(self, mocker, config): config=config, ) _add_responses.assert_called_once_with( - endpoint=param_endpoint, data=data.responses, schemas=param_schemas, config=config + endpoint=param_endpoint, data=data.responses, schemas=param_schemas, responses={}, config=config ) def test_from_data_no_operation_id(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint - path = "/path/with/{param}/" method = "get" add_parameters = mocker.patch.object( @@ -647,8 +466,16 @@ def test_from_data_no_operation_id(self, mocker, config): mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=data.description) parameters = mocker.MagicMock() - endpoint, return_schemas, return_params = Endpoint.from_data( - data=data, path=path, method=method, tag="default", schemas=schemas, parameters=parameters, config=config + Endpoint.from_data( + data=data, + path=path, + method=method, + tags=["default"], + schemas=schemas, + responses={}, + parameters=parameters, + config=config, + request_bodies={}, ) add_parameters.assert_called_once_with( @@ -659,7 +486,7 @@ def test_from_data_no_operation_id(self, mocker, config): summary="", name="get_path_with_param", requires_security=True, - tag="default", + tags=["default"], ), data=data, schemas=schemas, @@ -670,12 +497,11 @@ def test_from_data_no_operation_id(self, mocker, config): endpoint=add_parameters.return_value[0], data=data.responses, schemas=add_parameters.return_value[1], + responses={}, config=config, ) def test_from_data_no_security(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint - data = oai.Operation.model_construct( description=mocker.MagicMock(), operationId=mocker.MagicMock(), @@ -695,7 +521,15 @@ def test_from_data_no_security(self, mocker, config): parameters = mocker.MagicMock() Endpoint.from_data( - data=data, path=path, method=method, tag="a", schemas=schemas, parameters=parameters, config=config + data=data, + path=path, + method=method, + tags=["a"], + schemas=schemas, + responses={}, + parameters=parameters, + config=config, + request_bodies={}, ) add_parameters.assert_called_once_with( @@ -706,7 +540,7 @@ def test_from_data_no_security(self, mocker, config): summary="", name=data.operationId, requires_security=False, - tag="a", + tags=["a"], ), data=data, parameters=parameters, @@ -717,6 +551,7 @@ def test_from_data_no_security(self, mocker, config): endpoint=add_parameters.return_value[0], data=data.responses, schemas=add_parameters.return_value[1], + responses={}, config=config, ) @@ -732,11 +567,13 @@ def test_from_data_some_bad_bodies(self, config): ), ), schemas=Schemas(), + responses={}, config=config, parameters=Parameters(), - tag="tag", + tags=["tag"], path="/", method="get", + request_bodies={}, ) assert isinstance(endpoint, Endpoint) @@ -754,43 +591,39 @@ def test_from_data_all_bodies_bad(self, config): ), ), schemas=Schemas(), + responses={}, config=config, parameters=Parameters(), - tag="tag", + tags=["tag"], path="/", method="get", + request_bodies={}, ) assert isinstance(endpoint, ParseError) @pytest.mark.parametrize( "response_types, expected", - (([], "Any"), (["Something"], "Something"), (["First", "Second", "Second"], "Union[First, Second]")), + (([], "Any"), (["Something"], "Something"), (["First", "Second", "Second"], "First | Second")), ) def test_response_type(self, response_types, expected): endpoint = self.make_endpoint() for response_type in response_types: mock_response = MagicMock() mock_response.prop.get_type_string.return_value = response_type - endpoint.responses.append(mock_response) + endpoint.responses.patterns.append(mock_response) assert endpoint.response_type() == expected class TestImportStringFromReference: def test_import_string_from_reference_no_prefix(self, mocker): - from openapi_python_client.parser.openapi import import_string_from_class - from openapi_python_client.parser.properties import Class - class_ = mocker.MagicMock(autospec=Class) result = import_string_from_class(class_) assert result == f"from .{class_.module_name} import {class_.name}" def test_import_string_from_reference_with_prefix(self, mocker): - from openapi_python_client.parser.openapi import import_string_from_class - from openapi_python_client.parser.properties import Class - prefix = mocker.MagicMock(autospec=str) class_ = mocker.MagicMock(autospec=Class) result = import_string_from_class(class_=class_, prefix=prefix) @@ -799,79 +632,6 @@ def test_import_string_from_reference_with_prefix(self, mocker): class TestEndpointCollection: - def test_from_data(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint, EndpointCollection - - path_1_put = oai.Operation.model_construct() - path_1_post = oai.Operation.model_construct(tags=["tag_2", "tag_3"]) - path_2_get = oai.Operation.model_construct() - data = { - "path_1": oai.PathItem.model_construct(post=path_1_post, put=path_1_put), - "path_2": oai.PathItem.model_construct(get=path_2_get), - } - endpoint_1 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"}, path="path_1") - endpoint_2 = mocker.MagicMock(autospec=Endpoint, tag="tag_2", relative_imports={"2"}, path="path_1") - endpoint_3 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"}, path="path_2") - schemas_1 = mocker.MagicMock() - schemas_2 = mocker.MagicMock() - schemas_3 = mocker.MagicMock() - parameters_1 = mocker.MagicMock() - parameters_2 = mocker.MagicMock() - parameters_3 = mocker.MagicMock() - endpoint_from_data = mocker.patch.object( - Endpoint, - "from_data", - side_effect=[ - (endpoint_1, schemas_1, parameters_1), - (endpoint_2, schemas_2, parameters_2), - (endpoint_3, schemas_3, parameters_3), - ], - ) - schemas = mocker.MagicMock() - parameters = mocker.MagicMock() - - result = EndpointCollection.from_data(data=data, schemas=schemas, parameters=parameters, config=config) - - endpoint_from_data.assert_has_calls( - [ - mocker.call( - data=path_1_put, - path="path_1", - method="put", - tag="default", - schemas=schemas, - parameters=parameters, - config=config, - ), - mocker.call( - data=path_1_post, - path="path_1", - method="post", - tag="tag_2", - schemas=schemas_1, - parameters=parameters_1, - config=config, - ), - mocker.call( - data=path_2_get, - path="path_2", - method="get", - tag="default", - schemas=schemas_2, - parameters=parameters_2, - config=config, - ), - ], - ) - assert result == ( - { - "default": EndpointCollection("default", endpoints=[endpoint_1, endpoint_3]), - "tag_2": EndpointCollection("tag_2", endpoints=[endpoint_2]), - }, - schemas_3, - parameters_3, - ) - def test_from_data_overrides_path_item_params_with_operation_params(self, config): data = { "/": oai.PathItem.model_construct( @@ -891,157 +651,13 @@ def test_from_data_overrides_path_item_params_with_operation_params(self, config ) } - collections, schemas, parameters = EndpointCollection.from_data( + collections, _schemas, _parameters = EndpointCollection.from_data( data=data, schemas=Schemas(), parameters=Parameters(), config=config, + request_bodies={}, + responses={}, ) collection: EndpointCollection = collections["default"] assert isinstance(collection.endpoints[0].query_parameters[0], IntProperty) - - def test_from_data_errors(self, mocker, config): - from openapi_python_client.parser.openapi import ParseError - - path_1_put = oai.Operation.model_construct() - path_1_post = oai.Operation.model_construct(tags=["tag_2", "tag_3"]) - path_2_get = oai.Operation.model_construct() - data = { - "path_1": oai.PathItem.model_construct(post=path_1_post, put=path_1_put), - "path_2": oai.PathItem.model_construct(get=path_2_get), - } - schemas_1 = mocker.MagicMock() - schemas_2 = mocker.MagicMock() - schemas_3 = mocker.MagicMock() - parameters_1 = mocker.MagicMock() - parameters_2 = mocker.MagicMock() - parameters_3 = mocker.MagicMock() - endpoint_from_data = mocker.patch.object( - Endpoint, - "from_data", - side_effect=[ - (ParseError(data="1"), schemas_1, parameters_1), - (ParseError(data="2"), schemas_2, parameters_2), - (mocker.MagicMock(errors=[ParseError(data="3")], path="path_2"), schemas_3, parameters_3), - ], - ) - schemas = mocker.MagicMock() - parameters = mocker.MagicMock() - - result, result_schemas, result_parameters = EndpointCollection.from_data( - data=data, schemas=schemas, config=config, parameters=parameters - ) - - endpoint_from_data.assert_has_calls( - [ - mocker.call( - data=path_1_put, - path="path_1", - method="put", - tag="default", - schemas=schemas, - parameters=parameters, - config=config, - ), - mocker.call( - data=path_1_post, - path="path_1", - method="post", - tag="tag_2", - schemas=schemas_1, - parameters=parameters_1, - config=config, - ), - mocker.call( - data=path_2_get, - path="path_2", - method="get", - tag="default", - schemas=schemas_2, - parameters=parameters_2, - config=config, - ), - ], - ) - assert result["default"].parse_errors[0].data == "1" - assert result["default"].parse_errors[1].data == "3" - assert result["tag_2"].parse_errors[0].data == "2" - assert result_schemas == schemas_3 - - def test_from_data_tags_snake_case_sanitizer(self, mocker, config): - from openapi_python_client.parser.openapi import Endpoint, EndpointCollection - - path_1_put = oai.Operation.model_construct() - path_1_post = oai.Operation.model_construct(tags=["AMF Subscription Info (Document)", "tag_3"]) - path_2_get = oai.Operation.model_construct(tags=["3. ABC"]) - data = { - "path_1": oai.PathItem.model_construct(post=path_1_post, put=path_1_put), - "path_2": oai.PathItem.model_construct(get=path_2_get), - } - endpoint_1 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"}, path="path_1") - endpoint_2 = mocker.MagicMock( - autospec=Endpoint, tag="AMFSubscriptionInfo (Document)", relative_imports={"2"}, path="path_1" - ) - endpoint_3 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"}, path="path_2") - schemas_1 = mocker.MagicMock() - schemas_2 = mocker.MagicMock() - schemas_3 = mocker.MagicMock() - parameters_1 = mocker.MagicMock() - parameters_2 = mocker.MagicMock() - parameters_3 = mocker.MagicMock() - endpoint_from_data = mocker.patch.object( - Endpoint, - "from_data", - side_effect=[ - (endpoint_1, schemas_1, parameters_1), - (endpoint_2, schemas_2, parameters_2), - (endpoint_3, schemas_3, parameters_3), - ], - ) - schemas = mocker.MagicMock() - parameters = mocker.MagicMock() - - result = EndpointCollection.from_data(data=data, schemas=schemas, parameters=parameters, config=config) - - endpoint_from_data.assert_has_calls( - [ - mocker.call( - data=path_1_put, - path="path_1", - method="put", - tag="default", - schemas=schemas, - parameters=parameters, - config=config, - ), - mocker.call( - data=path_1_post, - path="path_1", - method="post", - tag="amf_subscription_info_document", - schemas=schemas_1, - parameters=parameters_1, - config=config, - ), - mocker.call( - data=path_2_get, - path="path_2", - method="get", - tag="tag3_abc", - schemas=schemas_2, - parameters=parameters_2, - config=config, - ), - ], - ) - assert result == ( - { - "default": EndpointCollection("default", endpoints=[endpoint_1]), - "amf_subscription_info_document": EndpointCollection( - "amf_subscription_info_document", endpoints=[endpoint_2] - ), - "tag3_abc": EndpointCollection("tag3_abc", endpoints=[endpoint_3]), - }, - schemas_3, - parameters_3, - ) diff --git a/tests/test_parser/test_properties/test_any.py b/tests/test_parser/test_properties/test_any.py deleted file mode 100644 index 7738a24f9..000000000 --- a/tests/test_parser/test_properties/test_any.py +++ /dev/null @@ -1,12 +0,0 @@ -from openapi_python_client.parser.properties import AnyProperty - - -def test_default(): - AnyProperty.build( - name="test", - required=True, - default=42, - python_name="test", - description="test", - example="test", - ) diff --git a/tests/test_parser/test_properties/test_boolean.py b/tests/test_parser/test_properties/test_boolean.py deleted file mode 100644 index 0c4abf0f3..000000000 --- a/tests/test_parser/test_properties/test_boolean.py +++ /dev/null @@ -1,54 +0,0 @@ -import pytest - -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import BooleanProperty - - -def test_invalid_default_value(): - err = BooleanProperty.build( - default="not a boolean", - description=None, - example=None, - required=False, - python_name="not_a_boolean", - name="not_a_boolean", - ) - - assert isinstance(err, PropertyError) - - -@pytest.mark.parametrize( - ("value", "expected"), - ( - ("true", "True"), - ("True", "True"), - ("false", "False"), - ("False", "False"), - ), -) -def test_string_default(value, expected): - prop = BooleanProperty.build( - default=value, - description=None, - example=None, - required=False, - python_name="not_a_boolean", - name="not_a_boolean", - ) - - assert isinstance(prop, BooleanProperty) - assert prop.default == expected - - -def test_bool_default(): - prop = BooleanProperty.build( - default=True, - description=None, - example=None, - required=False, - python_name="not_a_boolean", - name="not_a_boolean", - ) - - assert isinstance(prop, BooleanProperty) - assert prop.default == "True" diff --git a/tests/test_parser/test_properties/test_const.py b/tests/test_parser/test_properties/test_const.py deleted file mode 100644 index 6d2ad0bfe..000000000 --- a/tests/test_parser/test_properties/test_const.py +++ /dev/null @@ -1,55 +0,0 @@ -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import ConstProperty -from openapi_python_client.parser.properties.protocol import Value - - -def test_default_doesnt_match_const(): - err = ConstProperty.build( - name="test", - required=True, - default="not the value", - python_name="test", - description=None, - const="the value", - ) - - assert isinstance(err, PropertyError) - - -def test_non_string_const(): - prop = ConstProperty.build( - name="test", - required=True, - default=123, - python_name="test", - description=None, - const=123, - ) - - assert isinstance(prop, ConstProperty) - - -def test_const_already_converted(): - prop = ConstProperty.build( - name="test", - required=True, - default=123, - python_name="test", - description=None, - const=Value("123"), - ) - - assert isinstance(prop, ConstProperty) - - -def test_default_already_converted(): - prop = ConstProperty.build( - name="test", - required=True, - default=Value("123"), - python_name="test", - description=None, - const=123, - ) - - assert isinstance(prop, ConstProperty) diff --git a/tests/test_parser/test_properties/test_date.py b/tests/test_parser/test_properties/test_date.py deleted file mode 100644 index 0c70b5c30..000000000 --- a/tests/test_parser/test_properties/test_date.py +++ /dev/null @@ -1,33 +0,0 @@ -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import DateProperty -from openapi_python_client.parser.properties.protocol import Value - - -def test_invalid_default_value(): - err = DateProperty.build( - default="not a date", - description=None, - example=None, - required=False, - python_name="not_a_date", - name="not_a_date", - ) - - assert isinstance(err, PropertyError) - - -def test_default_with_bad_type(): - err = DateProperty.build( - default=123, - description=None, - example=None, - required=False, - python_name="not_a_date", - name="not_a_date", - ) - - assert isinstance(err, PropertyError) - - -def test_dont_recheck_value(): - DateProperty.convert_value(Value("not a date but trust me")) diff --git a/tests/test_parser/test_properties/test_datetime.py b/tests/test_parser/test_properties/test_datetime.py deleted file mode 100644 index 7853208d7..000000000 --- a/tests/test_parser/test_properties/test_datetime.py +++ /dev/null @@ -1,33 +0,0 @@ -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import DateTimeProperty -from openapi_python_client.parser.properties.protocol import Value - - -def test_invalid_default_value(): - err = DateTimeProperty.build( - default="not a date", - description=None, - example=None, - required=False, - python_name="not_a_date", - name="not_a_date", - ) - - assert isinstance(err, PropertyError) - - -def test_default_with_bad_type(): - err = DateTimeProperty.build( - default=123, - description=None, - example=None, - required=False, - python_name="not_a_date", - name="not_a_date", - ) - - assert isinstance(err, PropertyError) - - -def test_dont_recheck_value(): - DateTimeProperty.convert_value(Value("not a date but trust me")) diff --git a/tests/test_parser/test_properties/test_enum_property.py b/tests/test_parser/test_properties/test_enum_property.py deleted file mode 100644 index 704f48b3b..000000000 --- a/tests/test_parser/test_properties/test_enum_property.py +++ /dev/null @@ -1,68 +0,0 @@ -import openapi_python_client.schema as oai -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import EnumProperty, Schemas - - -def test_conflict(config): - schemas = Schemas() - - _, schemas = EnumProperty.build( - data=oai.Schema(enum=["a"]), name="Existing", required=True, schemas=schemas, parent_name="", config=config - ) - err, new_schemas = EnumProperty.build( - data=oai.Schema(enum=["a", "b"]), - name="Existing", - required=True, - schemas=schemas, - parent_name="", - config=config, - ) - - assert schemas == new_schemas - assert err.detail == "Found conflicting enums named Existing with incompatible values." - - -def test_bad_default_value(config): - data = oai.Schema(default="B", enum=["A"]) - schemas = Schemas() - - err, new_schemas = EnumProperty.build( - data=data, name="Existing", required=True, schemas=schemas, parent_name="parent", config=config - ) - - assert schemas == new_schemas - assert err == PropertyError(detail="Value B is not valid for enum Existing", data=data) - - -def test_bad_default_type(config): - data = oai.Schema(default=123, enum=["A"]) - schemas = Schemas() - - err, new_schemas = EnumProperty.build( - data=data, name="Existing", required=True, schemas=schemas, parent_name="parent", config=config - ) - - assert schemas == new_schemas - assert isinstance(err, PropertyError) - - -def test_mixed_types(config): - data = oai.Schema(enum=["A", 1]) - schemas = Schemas() - - err, _ = EnumProperty.build( - data=data, name="Enum", required=True, schemas=schemas, parent_name="parent", config=config - ) - - assert isinstance(err, PropertyError) - - -def test_unsupported_type(config): - data = oai.Schema(enum=[1.4, 1.5]) - schemas = Schemas() - - err, _ = EnumProperty.build( - data=data, name="Enum", required=True, schemas=schemas, parent_name="parent", config=config - ) - - assert isinstance(err, PropertyError) diff --git a/tests/test_parser/test_properties/test_file.py b/tests/test_parser/test_properties/test_file.py index 87298ba03..f399e8278 100644 --- a/tests/test_parser/test_properties/test_file.py +++ b/tests/test_parser/test_properties/test_file.py @@ -3,6 +3,8 @@ def test_no_default_allowed(): + # currently this is testing an unused code path: + # https://github.com/openapi-generators/openapi-python-client/issues/1162 err = FileProperty.build( default="not none", description=None, diff --git a/tests/test_parser/test_properties/test_float.py b/tests/test_parser/test_properties/test_float.py deleted file mode 100644 index 9d1159409..000000000 --- a/tests/test_parser/test_properties/test_float.py +++ /dev/null @@ -1,43 +0,0 @@ -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import FloatProperty -from openapi_python_client.parser.properties.protocol import Value - - -def test_invalid_default(): - err = FloatProperty.build( - default="not a float", - description=None, - example=None, - required=False, - python_name="not_a_float", - name="not_a_float", - ) - - assert isinstance(err, PropertyError) - - -def test_convert_from_string(): - val = FloatProperty.convert_value("1.0") - assert isinstance(val, Value) - assert val == "1.0" - assert FloatProperty.convert_value("1") == "1.0" - - -def test_convert_from_float(): - val = FloatProperty.convert_value(1.0) - assert isinstance(val, Value) - assert val == "1.0" - assert FloatProperty.convert_value(1) == "1.0" - - -def test_invalid_type_default(): - err = FloatProperty.build( - default=True, - description=None, - example=None, - required=False, - python_name="not_a_float", - name="not_a_float", - ) - - assert isinstance(err, PropertyError) diff --git a/tests/test_parser/test_properties/test_init.py b/tests/test_parser/test_properties/test_init.py index 3290dcd39..1aa0d1432 100644 --- a/tests/test_parser/test_properties/test_init.py +++ b/tests/test_parser/test_properties/test_init.py @@ -1,210 +1,39 @@ -from unittest.mock import MagicMock, call +from unittest.mock import call -import attr import pytest import openapi_python_client.schema as oai from openapi_python_client.parser.errors import ParameterError, PropertyError from openapi_python_client.parser.properties import ( - ListProperty, + Class, + Parameters, + ReferencePath, Schemas, - StringProperty, - UnionProperty, + _create_schemas, + _process_model_errors, + _process_models, + _propogate_removal, + build_parameters, + build_schemas, + property_from_data, ) -from openapi_python_client.parser.properties.protocol import ModelProperty -from openapi_python_client.schema import DataType +from openapi_python_client.schema import Parameter, Reference, Schema from openapi_python_client.utils import ClassName, PythonIdentifier MODULE_NAME = "openapi_python_client.parser.properties" -class TestStringProperty: - def test_is_base_type(self, string_property_factory): - assert string_property_factory().is_base_type is True - - @pytest.mark.parametrize( - "required, expected", - ( - (True, "str"), - (False, "Union[Unset, str]"), - ), - ) - def test_get_type_string(self, string_property_factory, required, expected): - p = string_property_factory(required=required) - - assert p.get_type_string() == expected - - -class TestDateTimeProperty: - def test_is_base_type(self, date_time_property_factory): - assert date_time_property_factory().is_base_type is True - - @pytest.mark.parametrize("required", (True, False)) - def test_get_imports(self, date_time_property_factory, required): - p = date_time_property_factory(required=required) - - expected = { - "import datetime", - "from typing import cast", - "from dateutil.parser import isoparse", - } - if not required: - expected |= { - "from typing import Union", - "from ...types import UNSET, Unset", - } - - assert p.get_imports(prefix="...") == expected - - -class TestDateProperty: - def test_is_base_type(self, date_property_factory): - assert date_property_factory().is_base_type is True - - @pytest.mark.parametrize("required", (True, False)) - def test_get_imports(self, date_property_factory, required): - p = date_property_factory(required=required) - - expected = { - "import datetime", - "from typing import cast", - "from dateutil.parser import isoparse", - } - if not required: - expected |= { - "from typing import Union", - "from ...types import UNSET, Unset", - } - - assert p.get_imports(prefix="...") == expected - - class TestFileProperty: - def test_is_base_type(self, file_property_factory): - assert file_property_factory().is_base_type is True - @pytest.mark.parametrize("required", (True, False)) def test_get_imports(self, file_property_factory, required): p = file_property_factory(required=required) expected = { "from io import BytesIO", - "from ...types import File, FileJsonType", + "from ...types import File, FileTypes", } if not required: expected |= { - "from typing import Union", - "from ...types import UNSET, Unset", - } - - assert p.get_imports(prefix="...") == expected - - -class TestNoneProperty: - def test_is_base_type(self, none_property_factory): - assert none_property_factory().is_base_type is True - - -class TestBooleanProperty: - def test_is_base_type(self, boolean_property_factory): - assert boolean_property_factory().is_base_type is True - - -class TestAnyProperty: - def test_is_base_type(self, any_property_factory): - assert any_property_factory().is_base_type is True - - -class TestIntProperty: - def test_is_base_type(self, int_property_factory): - assert int_property_factory().is_base_type is True - - -class TestListProperty: - def test_is_base_type(self, list_property_factory): - assert list_property_factory().is_base_type is False - - @pytest.mark.parametrize("quoted", (True, False)) - def test_get_base_json_type_string_base_inner(self, list_property_factory, quoted): - p = list_property_factory() - assert p.get_base_json_type_string(quoted=quoted) == "List[str]" - - @pytest.mark.parametrize("quoted", (True, False)) - def test_get_base_json_type_string_model_inner(self, list_property_factory, model_property_factory, quoted): - m = model_property_factory() - p = list_property_factory(inner_property=m) - assert p.get_base_json_type_string(quoted=quoted) == "List[Dict[str, Any]]" - - def test_get_lazy_import_base_inner(self, list_property_factory): - p = list_property_factory() - assert p.get_lazy_imports(prefix="..") == set() - - def test_get_lazy_import_model_inner(self, list_property_factory, model_property_factory): - m = model_property_factory() - p = list_property_factory(inner_property=m) - assert p.get_lazy_imports(prefix="..") == {"from ..models.my_module import MyClass"} - - @pytest.mark.parametrize( - "required, expected", - ( - (True, "List[str]"), - (False, "Union[Unset, List[str]]"), - ), - ) - def test_get_type_string_base_inner(self, list_property_factory, required, expected): - p = list_property_factory(required=required) - - assert p.get_type_string() == expected - - @pytest.mark.parametrize( - "required, expected", - ( - (True, "List['MyClass']"), - (False, "Union[Unset, List['MyClass']]"), - ), - ) - def test_get_type_string_model_inner(self, list_property_factory, model_property_factory, required, expected): - m = model_property_factory() - p = list_property_factory(required=required, inner_property=m) - - assert p.get_type_string() == expected - - @pytest.mark.parametrize( - "quoted,expected", - [ - (False, "List[str]"), - (True, "List[str]"), - ], - ) - def test_get_base_type_string_base_inner(self, list_property_factory, quoted, expected): - p = list_property_factory() - assert p.get_base_type_string(quoted=quoted) == expected - - @pytest.mark.parametrize( - "quoted,expected", - [ - (False, "List['MyClass']"), - (True, "List['MyClass']"), - ], - ) - def test_get_base_type_string_model_inner(self, list_property_factory, model_property_factory, quoted, expected): - m = model_property_factory() - p = list_property_factory(inner_property=m) - assert p.get_base_type_string(quoted=quoted) == expected - - @pytest.mark.parametrize("required", (True, False)) - def test_get_type_imports(self, list_property_factory, date_time_property_factory, required): - inner_property = date_time_property_factory() - p = list_property_factory(inner_property=inner_property, required=required) - expected = { - "import datetime", - "from typing import cast", - "from dateutil.parser import isoparse", - "from typing import cast, List", - } - if not required: - expected |= { - "from typing import Union", "from ...types import UNSET, Unset", } @@ -212,9 +41,6 @@ def test_get_type_imports(self, list_property_factory, date_time_property_factor class TestUnionProperty: - def test_is_base_type(self, union_property_factory): - assert union_property_factory().is_base_type is False - def test_get_lazy_import_base_inner(self, union_property_factory): p = union_property_factory() assert p.get_lazy_imports(prefix="..") == set() @@ -227,11 +53,11 @@ def test_get_lazy_import_model_inner(self, union_property_factory, model_propert @pytest.mark.parametrize( "required,no_optional,json,expected", [ - (False, False, False, "Union[Unset, datetime.datetime, str]"), - (False, True, False, "Union[datetime.datetime, str]"), - (True, False, False, "Union[datetime.datetime, str]"), - (True, True, False, "Union[datetime.datetime, str]"), - (False, False, True, "Union[Unset, str]"), + (False, False, False, "datetime.datetime | str | Unset"), + (False, True, False, "datetime.datetime | str"), + (True, False, False, "datetime.datetime | str"), + (True, True, False, "datetime.datetime | str"), + (False, False, True, "str | Unset"), (False, True, True, "str"), (True, False, True, "str"), (True, True, True, "str"), @@ -252,7 +78,7 @@ def test_get_type_string( inner_properties=[date_time_property_factory(), string_property_factory()], ) - assert p.get_base_type_string() == "Union[datetime.datetime, str]" + assert p.get_base_type_string() == "datetime.datetime | str" assert p.get_type_string(no_optional=no_optional, json=json) == expected @@ -261,7 +87,7 @@ def test_get_base_type_string_base_inners( ): p = union_property_factory(inner_properties=[date_time_property_factory(), string_property_factory()]) - assert p.get_base_type_string() == "Union[datetime.datetime, str]" + assert p.get_base_type_string() == "datetime.datetime | str" def test_get_base_type_string_one_base_inner(self, union_property_factory, date_time_property_factory): p = union_property_factory( @@ -275,14 +101,14 @@ def test_get_base_type_string_one_model_inner(self, union_property_factory, mode inner_properties=[model_property_factory()], ) - assert p.get_base_type_string() == "'MyClass'" + assert p.get_base_type_string() == "MyClass" def test_get_base_type_string_model_inners( self, union_property_factory, date_time_property_factory, model_property_factory ): p = union_property_factory(inner_properties=[date_time_property_factory(), model_property_factory()]) - assert p.get_base_type_string() == "Union['MyClass', datetime.datetime]" + assert p.get_base_type_string() == "datetime.datetime | MyClass" def test_get_base_json_type_string(self, union_property_factory, date_time_property_factory): p = union_property_factory( @@ -301,282 +127,28 @@ def test_get_type_imports(self, union_property_factory, date_time_property_facto "import datetime", "from typing import cast", "from dateutil.parser import isoparse", - "from typing import cast, Union", } if not required: expected |= { - "from typing import Union", "from ...types import UNSET, Unset", } assert p.get_imports(prefix="...") == expected -class TestEnumProperty: - def test_is_base_type(self, enum_property_factory): - assert enum_property_factory().is_base_type is True - - @pytest.mark.parametrize( - "required, expected", - ( - (False, "Union[Unset, {}]"), - (True, "{}"), - ), - ) - def test_get_type_string(self, mocker, enum_property_factory, required, expected): - fake_class = mocker.MagicMock() - fake_class.name = "MyTestEnum" - - p = enum_property_factory(class_info=fake_class, required=required) - - assert p.get_type_string() == expected.format(fake_class.name) - assert p.get_type_string(no_optional=True) == fake_class.name - assert p.get_type_string(json=True) == expected.format("str") - - def test_get_imports(self, mocker, enum_property_factory): - fake_class = mocker.MagicMock(module_name="my_test_enum") - fake_class.name = "MyTestEnum" - prefix = "..." - - enum_property = enum_property_factory(class_info=fake_class, required=False) - - assert enum_property.get_imports(prefix=prefix) == { - f"from {prefix}models.{fake_class.module_name} import {fake_class.name}", - "from typing import Union", # Makes sure unset is handled via base class - "from ...types import UNSET, Unset", - } - - def test_values_from_list(self): - from openapi_python_client.parser.properties import EnumProperty - - data = ["abc", "123", "a23", "1bc", 4, -3, "a Thing WIth spaces", ""] - - result = EnumProperty.values_from_list(data) - - assert result == { - "ABC": "abc", - "VALUE_1": "123", - "A23": "a23", - "VALUE_3": "1bc", - "VALUE_4": 4, - "VALUE_NEGATIVE_3": -3, - "A_THING_WITH_SPACES": "a Thing WIth spaces", - "VALUE_7": "", - } - - def test_values_from_list_duplicate(self): - from openapi_python_client.parser.properties import EnumProperty - - data = ["abc", "123", "a23", "abc"] - - with pytest.raises(ValueError): - EnumProperty.values_from_list(data) - - class TestPropertyFromData: - def test_property_from_data_str_enum(self, enum_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - from openapi_python_client.schema import Schema - - existing = enum_property_factory() - data = Schema(title="AnEnum", enum=["A", "B", "C"], default="B") - name = "my_enum" - required = True - - schemas = Schemas(classes_by_name={"AnEnum": existing}) - - prop, new_schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - ) - - assert prop == enum_property_factory( - name=name, - required=required, - values={"A": "A", "B": "B", "C": "C"}, - class_info=Class(name="ParentAnEnum", module_name="parent_an_enum"), - value_type=str, - default="ParentAnEnum.B", - ) - assert schemas != new_schemas, "Provided Schemas was mutated" - assert new_schemas.classes_by_name == { - "AnEnum": existing, - "ParentAnEnum": prop, - } - - def test_property_from_data_str_enum_with_null( - self, enum_property_factory, union_property_factory, none_property_factory, config - ): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - from openapi_python_client.schema import Schema - - existing = enum_property_factory() - data = Schema(title="AnEnum", enum=["A", "B", "C", None], default="B") - name = "my_enum" - required = True - - schemas = Schemas(classes_by_name={"AnEnum": existing}) - - prop, new_schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - ) - - # None / null is removed from enum, and property is now nullable - assert isinstance(prop, UnionProperty), "Enums with None should be converted to UnionProperties" - enum_prop = enum_property_factory( - name="my_enum_type_1", - required=required, - values={"A": "A", "B": "B", "C": "C"}, - class_info=Class(name="ParentAnEnum", module_name="parent_an_enum"), - value_type=str, - default="ParentAnEnum.B", - ) - none_property = none_property_factory(name="my_enum_type_0", required=required) - assert prop == union_property_factory( - name=name, default="ParentAnEnum.B", inner_properties=[none_property, enum_prop] - ) - assert schemas != new_schemas, "Provided Schemas was mutated" - assert new_schemas.classes_by_name == { - "AnEnum": existing, - "ParentAnEnum": enum_prop, - } - - def test_property_from_data_null_enum(self, enum_property_factory, none_property_factory, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - from openapi_python_client.schema import Schema - - data = Schema(title="AnEnumWithOnlyNull", enum=[None], default=None) - name = "my_enum" - required = True - - schemas = Schemas() - - prop, new_schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - ) - - assert prop == none_property_factory(name="my_enum", required=required, default="None") - - def test_property_from_data_int_enum(self, enum_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - from openapi_python_client.schema import Schema - - name = "my_enum" - required = True - data = Schema.model_construct(title="anEnum", enum=[1, 2, 3], default=3) - - existing = enum_property_factory() - schemas = Schemas(classes_by_name={"AnEnum": existing}) - - prop, new_schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - ) - - assert prop == enum_property_factory( - name=name, - required=required, - values={"VALUE_1": 1, "VALUE_2": 2, "VALUE_3": 3}, - class_info=Class(name="ParentAnEnum", module_name="parent_an_enum"), - value_type=int, - default="ParentAnEnum.VALUE_3", - ) - assert schemas != new_schemas, "Provided Schemas was mutated" - assert new_schemas.classes_by_name == { - "AnEnum": existing, - "ParentAnEnum": prop, - } - - def test_property_from_data_ref_enum(self, enum_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - - name = "some_enum" - data = oai.Reference.model_construct(ref="#/components/schemas/MyEnum") - existing_enum = enum_property_factory( - name="an_enum", - required=False, - values={"A": "a"}, - class_info=Class(name="MyEnum", module_name="my_enum"), - ) - schemas = Schemas(classes_by_reference={"/components/schemas/MyEnum": existing_enum}) - - prop, new_schemas = property_from_data( - name=name, required=False, data=data, schemas=schemas, parent_name="", config=config - ) - - assert prop == enum_property_factory( - name="some_enum", - required=False, - values={"A": "a"}, - class_info=Class(name="MyEnum", module_name="my_enum"), - ) - assert schemas == new_schemas - - def test_property_from_data_ref_enum_with_overridden_default(self, enum_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - - name = "some_enum" - required = False - data = oai.Schema.model_construct( - default="b", allOf=[oai.Reference.model_construct(ref="#/components/schemas/MyEnum")] - ) - existing_enum = enum_property_factory( - name="an_enum", - default="MyEnum.A", - required=required, - values={"A": "a", "B": "b"}, - class_info=Class(name="MyEnum", module_name="my_enum"), - ) - schemas = Schemas(classes_by_reference={"/components/schemas/MyEnum": existing_enum}) - - prop, new_schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="", config=config - ) - - assert prop == enum_property_factory( - name="some_enum", - default="MyEnum.B", - required=required, - values={"A": "a", "B": "b"}, - class_info=Class(name="MyEnum", module_name="my_enum"), - ) - assert schemas == new_schemas - - def test_property_from_data_ref_enum_with_invalid_default(self, enum_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - - name = "some_enum" - data = oai.Schema.model_construct( - default="x", allOf=[oai.Reference.model_construct(ref="#/components/schemas/MyEnum")] - ) - existing_enum = enum_property_factory( - name="an_enum", - default="MyEnum.A", - values={"A": "a", "B": "b"}, - class_info=Class(name="MyEnum", module_name="my_enum"), - python_name="an_enum", - ) - schemas = Schemas(classes_by_reference={"/components/schemas/MyEnum": existing_enum}) - - prop, new_schemas = property_from_data( - name=name, required=False, data=data, schemas=schemas, parent_name="", config=config - ) - - assert schemas == new_schemas - assert prop == PropertyError(data=data, detail="Value x is not valid for enum an_enum") - def test_property_from_data_ref_model(self, model_property_factory, config): - from openapi_python_client.parser.properties import Class, Schemas, property_from_data - name = "new_name" required = False - class_name = "MyModel" + class_name = ClassName("MyModel", "") data = oai.Reference.model_construct(ref=f"#/components/schemas/{class_name}") - class_info = Class(name=class_name, module_name="my_model") + class_info = Class(name=class_name, module_name=PythonIdentifier("my_model", "")) existing_model = model_property_factory( name="old_name", class_info=class_info, ) - schemas = Schemas(classes_by_reference={f"/components/schemas/{class_name}": existing_model}) + schemas = Schemas(classes_by_reference={ReferencePath(f"/components/schemas/{class_name}"): existing_model}) prop, new_schemas = property_from_data( name=name, required=required, data=data, schemas=schemas, parent_name="", config=config @@ -590,8 +162,6 @@ def test_property_from_data_ref_model(self, model_property_factory, config): assert schemas == new_schemas def test_property_from_data_ref_not_found(self, mocker): - from openapi_python_client.parser.properties import PropertyError, Schemas, property_from_data - data = oai.Reference.model_construct(ref="a/b/c") parse_reference_path = mocker.patch(f"{MODULE_NAME}.parse_reference_path") schemas = Schemas() @@ -607,8 +177,6 @@ def test_property_from_data_ref_not_found(self, mocker): @pytest.mark.parametrize("references_exist", (True, False)) def test_property_from_data_ref(self, any_property_factory, references_exist, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - name = "new_name" required = False ref_path = "/components/schemas/RefName" @@ -628,8 +196,6 @@ def test_property_from_data_ref(self, any_property_factory, references_exist, co assert schemas.dependencies == {ref_path: {*roots, *references.get(ref_path, set())}} def test_property_from_data_invalid_ref(self, mocker): - from openapi_python_client.parser.properties import PropertyError, Schemas, property_from_data - name = mocker.MagicMock() required = mocker.MagicMock() data = oai.Reference.model_construct(ref=mocker.MagicMock()) @@ -646,175 +212,9 @@ def test_property_from_data_invalid_ref(self, mocker): assert prop == PropertyError(data=data, detail="bad stuff") assert schemas == new_schemas - def test_property_from_data_array(self, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - name = "a_list_prop" - required = True - data = oai.Schema( - type=DataType.ARRAY, - items=oai.Schema(type=DataType.STRING), - ) - schemas = Schemas() - - response = property_from_data( - name=name, - required=required, - data=data, - schemas=schemas, - parent_name="parent", - config=config, - )[0] - - assert isinstance(response, ListProperty) - assert isinstance(response.inner_property, StringProperty) - - def test_property_from_data_union(self, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - name = "union_prop" - required = True - data = oai.Schema( - anyOf=[oai.Schema(type=DataType.NUMBER)], - oneOf=[ - oai.Schema(type=DataType.INTEGER), - ], - ) - schemas = Schemas() - - response = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - )[0] - - assert isinstance(response, UnionProperty) - assert len(response.inner_properties) == 2 # noqa: PLR2004 - - def test_property_from_data_list_of_types(self, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - name = "union_prop" - required = True - data = oai.Schema( - type=[DataType.NUMBER, DataType.NULL], - ) - schemas = Schemas() - - response = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - )[0] - - assert isinstance(response, UnionProperty) - assert len(response.inner_properties) == 2 # noqa: PLR2004 - - def test_property_from_data_union_of_one_element(self, model_property_factory, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - name = "new_name" - required = False - class_name = "MyModel" - existing_model: ModelProperty = model_property_factory() - schemas = Schemas(classes_by_reference={f"/{class_name}": existing_model}) - - data = oai.Schema.model_construct( - allOf=[oai.Reference.model_construct(ref=f"#/{class_name}")], - ) - - prop, schemas = property_from_data( - name=name, required=required, data=data, schemas=schemas, parent_name="parent", config=config - ) - - assert prop == attr.evolve(existing_model, name=name, required=required, python_name=PythonIdentifier(name, "")) - - def test_property_from_data_no_valid_props_in_data(self, any_property_factory): - from openapi_python_client.parser.properties import Schemas, property_from_data - - schemas = Schemas() - data = oai.Schema() - name = "blah" - - prop, new_schemas = property_from_data( - name=name, required=True, data=data, schemas=schemas, parent_name="parent", config=MagicMock() - ) - - assert prop == any_property_factory(name=name, required=True, default=None) - assert new_schemas == schemas - class TestStringBasedProperty: - @pytest.mark.parametrize("required", (True, False)) - def test_no_format(self, string_property_factory, required, config): - from openapi_python_client.parser.properties import property_from_data - - name = "some_prop" - data = oai.Schema.model_construct(type="string", default='"hello world"', pattern="abcdef") - - p, _ = property_from_data( - name=name, required=required, data=data, parent_name=None, config=config, schemas=Schemas() - ) - - assert p == string_property_factory( - name=name, required=required, default="'\\\\\"hello world\\\\\"'", pattern=data.pattern - ) - - def test_datetime_format(self, date_time_property_factory, config): - from openapi_python_client.parser.properties import property_from_data - - name = "datetime_prop" - required = True - data = oai.Schema.model_construct(type="string", schema_format="date-time", default="2020-11-06T12:00:00") - - p, _ = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), config=config, parent_name="" - ) - - assert p == date_time_property_factory(name=name, required=required, default=f"isoparse('{data.default}')") - - def test_datetime_bad_default(self, config): - from openapi_python_client.parser.properties import property_from_data - - name = "datetime_prop" - required = True - data = oai.Schema.model_construct(type="string", schema_format="date-time", default="a") - - result, _ = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), config=config, parent_name="" - ) - - assert isinstance(result, PropertyError) - assert result.detail.startswith("Invalid datetime") - - def test_date_format(self, date_property_factory, config): - from openapi_python_client.parser.properties import property_from_data - - name = "date_prop" - required = True - - data = oai.Schema.model_construct(type="string", schema_format="date", default="2020-11-06") - - p, _ = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), config=config, parent_name="" - ) - - assert p == date_property_factory(name=name, required=required, default=f"isoparse('{data.default}').date()") - - def test_date_format_bad_default(self, config): - from openapi_python_client.parser.properties import property_from_data - - name = "date_prop" - required = True - - data = oai.Schema.model_construct(type="string", schema_format="date", default="a") - - p, _ = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), config=config, parent_name="" - ) - - assert isinstance(p, PropertyError) - assert p.detail.startswith("Invalid date") - def test__string_based_property_binary_format(self, file_property_factory, config): - from openapi_python_client.parser.properties import property_from_data - name = "file_prop" required = True data = oai.Schema.model_construct(type="string", schema_format="binary", default="a") @@ -824,47 +224,28 @@ def test__string_based_property_binary_format(self, file_property_factory, confi ) assert p == file_property_factory(name=name, required=required) - def test__string_based_property_unsupported_format(self, string_property_factory, config): - from openapi_python_client.parser.properties import property_from_data - - name = "unknown" - required = True - data = oai.Schema.model_construct(type="string", schema_format="blah") - - p, _ = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), config=config, parent_name="" - ) - - assert p == string_property_factory(name=name, required=required) - class TestCreateSchemas: - def test_skips_references_and_keeps_going(self, mocker, config): - from openapi_python_client.parser.properties import Schemas, _create_schemas - from openapi_python_client.schema import Reference, Schema - - components = {"a_ref": Reference.model_construct(), "a_schema": Schema.model_construct()} + def test_dereference_references(self, mocker, config): + components = {"a_ref": Reference(ref="#/components/schemas/a_schema"), "a_schema": Schema.model_construct()} update_schemas_with_data = mocker.patch(f"{MODULE_NAME}.update_schemas_with_data") parse_reference_path = mocker.patch(f"{MODULE_NAME}.parse_reference_path") schemas = Schemas() result = _create_schemas(components=components, schemas=schemas, config=config) - # Should not even try to parse a path for the Reference - parse_reference_path.assert_called_once_with("#/components/schemas/a_schema") - update_schemas_with_data.assert_called_once_with( + + parse_reference_path.assert_has_calls( + [call("#/components/schemas/a_ref"), call("#/components/schemas/a_schema")] + ) + update_schemas_with_data.assert_called_with( ref_path=parse_reference_path.return_value, config=config, data=components["a_schema"], - schemas=Schemas( - errors=[PropertyError(detail="Reference schemas are not supported.", data=components["a_ref"])] - ), + schemas=result, ) assert result == update_schemas_with_data.return_value def test_records_bad_uris_and_keeps_going(self, mocker, config): - from openapi_python_client.parser.properties import Schemas, _create_schemas - from openapi_python_client.schema import Schema - components = {"first": Schema.model_construct(), "second": Schema.model_construct()} update_schemas_with_data = mocker.patch(f"{MODULE_NAME}.update_schemas_with_data") parse_reference_path = mocker.patch( @@ -888,9 +269,6 @@ def test_records_bad_uris_and_keeps_going(self, mocker, config): assert result == update_schemas_with_data.return_value def test_retries_failing_properties_while_making_progress(self, mocker, config): - from openapi_python_client.parser.properties import Schemas, _create_schemas - from openapi_python_client.schema import Schema - components = {"first": Schema.model_construct(), "second": Schema.model_construct()} update_schemas_with_data = mocker.patch( f"{MODULE_NAME}.update_schemas_with_data", side_effect=[PropertyError(), Schemas(), PropertyError()] @@ -906,58 +284,26 @@ def test_retries_failing_properties_while_making_progress(self, mocker, config): call("#/components/schemas/first"), ] ) - assert update_schemas_with_data.call_count == 3 # noqa: PLR2004 + assert update_schemas_with_data.call_count == 3 assert result.errors == [PropertyError()] class TestProcessModels: - def test_retries_failing_models_while_making_progress( - self, mocker, model_property_factory, any_property_factory, config - ): - from openapi_python_client.parser.properties import _process_models - - first_model = model_property_factory() - second_class_name = ClassName("second", "") - schemas = Schemas( - classes_by_name={ - ClassName("first", ""): first_model, - second_class_name: model_property_factory(), - ClassName("non-model", ""): any_property_factory(), - } - ) - process_model = mocker.patch( - f"{MODULE_NAME}.process_model", side_effect=[PropertyError(), Schemas(), PropertyError()] - ) - process_model_errors = mocker.patch(f"{MODULE_NAME}._process_model_errors", return_value=["error"]) - - result = _process_models(schemas=schemas, config=config) - - process_model.assert_has_calls( - [ - call(first_model, schemas=schemas, config=config), - call(schemas.classes_by_name[second_class_name], schemas=schemas, config=config), - call(first_model, schemas=result, config=config), - ] - ) - assert process_model_errors.was_called_once_with([(first_model, PropertyError())]) - assert all(error in result.errors for error in process_model_errors.return_value) - def test_detect_recursive_allof_reference_no_retry(self, mocker, model_property_factory, config): - from openapi_python_client.parser.properties import Class, _process_models - from openapi_python_client.schema import Reference - class_name = ClassName("class_name", "") recursive_model = model_property_factory( class_info=Class(name=class_name, module_name=PythonIdentifier("module_name", "")) ) + second_model = model_property_factory() schemas = Schemas( classes_by_name={ "recursive": recursive_model, - "second": model_property_factory(), - } + "second": second_model, + }, + models_to_process=[recursive_model, second_model], ) recursion_error = PropertyError(data=Reference.model_construct(ref=f"#/{class_name}")) - process_model = mocker.patch(f"{MODULE_NAME}.process_model", side_effect=[recursion_error, Schemas()]) + process_model = mocker.patch(f"{MODULE_NAME}.process_model", side_effect=[recursion_error, schemas]) process_model_errors = mocker.patch(f"{MODULE_NAME}._process_model_errors", return_value=["error"]) result = _process_models(schemas=schemas, config=config) @@ -972,12 +318,60 @@ def test_detect_recursive_allof_reference_no_retry(self, mocker, model_property_ assert all(error in result.errors for error in process_model_errors.return_value) assert "\n\nRecursive allOf reference found" in recursion_error.detail + def test_resolve_reference_to_single_allof_reference(self, config, model_property_factory): + # test for https://github.com/openapi-generators/openapi-python-client/issues/1091 + + components = { + "Model1": oai.Schema.model_construct( + type="object", + properties={ + "prop1": oai.Schema.model_construct(type="string"), + }, + ), + "Model2": oai.Schema.model_construct( + allOf=[ + oai.Reference.model_construct(ref="#/components/schemas/Model1"), + ] + ), + "Model3": oai.Schema.model_construct( + allOf=[ + oai.Reference.model_construct(ref="#/components/schemas/Model2"), + oai.Schema.model_construct( + type="object", + properties={ + "prop2": oai.Schema.model_construct(type="string"), + }, + ), + ], + ), + } + schemas = Schemas() + + result = build_schemas(components=components, schemas=schemas, config=config) + + assert result.errors == [] + assert result.models_to_process == [] + + # Classes should only be generated for Model1 and Model3 + assert result.classes_by_name.keys() == {"Model1", "Model3"} + + # References to Model2 should be resolved to the same class as Model1 + assert result.classes_by_reference.keys() == { + "/components/schemas/Model1", + "/components/schemas/Model2", + "/components/schemas/Model3", + } + assert ( + result.classes_by_reference["/components/schemas/Model2"].class_info + == result.classes_by_reference["/components/schemas/Model1"].class_info + ) + + # Verify that Model3 extended the properties from Model1 + assert [p.name for p in result.classes_by_name["Model3"].optional_properties] == ["prop1", "prop2"] + class TestPropogateRemoval: def test_propogate_removal_class_name(self): - from openapi_python_client.parser.properties import ReferencePath, _propogate_removal - from openapi_python_client.utils import ClassName - root = ClassName("ClassName", "") ref_path = ReferencePath("/reference") other_class_name = ClassName("OtherClassName", "") @@ -995,9 +389,6 @@ def test_propogate_removal_class_name(self): assert not error.detail def test_propogate_removal_ref_path(self): - from openapi_python_client.parser.properties import ReferencePath, _propogate_removal - from openapi_python_client.utils import ClassName - root = ReferencePath("/root/reference") class_name = ClassName("ClassName", "") ref_path = ReferencePath("/ref/path") @@ -1015,9 +406,6 @@ def test_propogate_removal_ref_path(self): assert error.detail == f"\n{root}\n{ref_path}" def test_propogate_removal_ref_path_no_refs(self): - from openapi_python_client.parser.properties import ReferencePath, _propogate_removal - from openapi_python_client.utils import ClassName - root = ReferencePath("/root/reference") class_name = ClassName("ClassName", "") ref_path = ReferencePath("/ref/path") @@ -1031,9 +419,6 @@ def test_propogate_removal_ref_path_no_refs(self): assert error.detail == f"\n{root}" def test_propogate_removal_ref_path_already_removed(self): - from openapi_python_client.parser.properties import ReferencePath, _propogate_removal - from openapi_python_client.utils import ClassName - root = ReferencePath("/root/reference") class_name = ClassName("ClassName", "") ref_path = ReferencePath("/ref/path") @@ -1052,8 +437,6 @@ def test_propogate_removal_ref_path_already_removed(self): def test_process_model_errors(mocker, model_property_factory): - from openapi_python_client.parser.properties import _process_model_errors - propogate_removal = mocker.patch(f"{MODULE_NAME}._propogate_removal") model_errors = [ (model_property_factory(roots={"root1", "root2"}), PropertyError(detail="existing detail")), @@ -1073,9 +456,6 @@ def test_process_model_errors(mocker, model_property_factory): class TestBuildParameters: def test_skips_references_and_keeps_going(self, mocker, config): - from openapi_python_client.parser.properties import Parameters, build_parameters - from openapi_python_client.schema import Parameter, Reference - parameters = { "reference": Reference(ref="#/components/parameters/another_parameter"), "defined": Parameter( @@ -1105,9 +485,6 @@ def test_skips_references_and_keeps_going(self, mocker, config): assert result == update_parameters_with_data.return_value def test_records_bad_uris_and_keeps_going(self, mocker, config): - from openapi_python_client.parser.properties import Parameters, build_parameters - from openapi_python_client.schema import Parameter - parameters = {"first": Parameter.model_construct(), "second": Parameter.model_construct()} update_parameters_with_data = mocker.patch(f"{MODULE_NAME}.update_parameters_with_data") parse_reference_path = mocker.patch( @@ -1130,9 +507,6 @@ def test_records_bad_uris_and_keeps_going(self, mocker, config): assert result == update_parameters_with_data.return_value def test_retries_failing_parameters_while_making_progress(self, mocker, config): - from openapi_python_client.parser.properties import Parameters, build_parameters - from openapi_python_client.schema import Parameter - parameters = {"first": Parameter.model_construct(), "second": Parameter.model_construct()} update_parameters_with_data = mocker.patch( f"{MODULE_NAME}.update_parameters_with_data", side_effect=[ParameterError(), Parameters(), ParameterError()] @@ -1147,14 +521,11 @@ def test_retries_failing_parameters_while_making_progress(self, mocker, config): call("#/components/parameters/first"), ] ) - assert update_parameters_with_data.call_count == 3 # noqa: PLR2004 + assert update_parameters_with_data.call_count == 3 assert result.errors == [ParameterError()] def test_build_schemas(mocker, config): - from openapi_python_client.parser.properties import Schemas, build_schemas - from openapi_python_client.schema import Reference, Schema - create_schemas = mocker.patch(f"{MODULE_NAME}._create_schemas") process_models = mocker.patch(f"{MODULE_NAME}._process_models") diff --git a/tests/test_parser/test_properties/test_int.py b/tests/test_parser/test_properties/test_int.py deleted file mode 100644 index e50166e4a..000000000 --- a/tests/test_parser/test_properties/test_int.py +++ /dev/null @@ -1,35 +0,0 @@ -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import IntProperty -from openapi_python_client.parser.properties.protocol import Value - - -def test_invalid_default(): - err = IntProperty.build( - default="not a float", - description=None, - example=None, - required=False, - python_name="not_a_float", - name="not_a_float", - ) - - assert isinstance(err, PropertyError) - - -def test_convert_from_string(): - val = IntProperty.convert_value("1") - assert isinstance(val, Value) - assert val == "1" - - -def test_invalid_type_default(): - err = IntProperty.build( - default=True, - description=None, - example=None, - required=False, - python_name="not_a_float", - name="not_a_float", - ) - - assert isinstance(err, PropertyError) diff --git a/tests/test_parser/test_properties/test_list_property.py b/tests/test_parser/test_properties/test_list_property.py deleted file mode 100644 index bac87e669..000000000 --- a/tests/test_parser/test_properties/test_list_property.py +++ /dev/null @@ -1,85 +0,0 @@ -import attr - -import openapi_python_client.schema as oai -from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import ListProperty -from openapi_python_client.schema import DataType - - -def test_build_list_property_no_items(config): - from openapi_python_client.parser import properties - - name = "list_prop" - required = True - data = oai.Schema(type=DataType.ARRAY) - schemas = properties.Schemas() - - p, new_schemas = ListProperty.build( - name=name, - required=required, - data=data, - schemas=schemas, - parent_name="parent", - config=config, - process_properties=True, - roots={"root"}, - ) - - assert p == PropertyError(data=data, detail="type array must have items defined") - assert new_schemas == schemas - - -def test_build_list_property_invalid_items(config): - from openapi_python_client.parser import properties - - name = "name" - required = True - data = oai.Schema( - type=DataType.ARRAY, - items=oai.Reference(ref="doesnt exist"), - ) - schemas = properties.Schemas(errors=["error"]) - process_properties = False - roots = {"root"} - - p, new_schemas = ListProperty.build( - name=name, - required=required, - data=data, - schemas=attr.evolve(schemas), - parent_name="parent", - config=config, - roots=roots, - process_properties=process_properties, - ) - - assert isinstance(p, PropertyError) - assert p.data == data.items - assert p.header.startswith(f"invalid data in items of array {name}") - assert new_schemas == schemas - - -def test_build_list_property(any_property_factory, config): - from openapi_python_client.parser import properties - - name = "prop" - data = oai.Schema( - type=DataType.ARRAY, - items=oai.Schema(), - ) - schemas = properties.Schemas(errors=["error"]) - - p, new_schemas = ListProperty.build( - name=name, - required=True, - data=data, - schemas=schemas, - parent_name="parent", - config=config, - roots={"root"}, - process_properties=True, - ) - - assert isinstance(p, properties.ListProperty) - assert p.inner_property == any_property_factory(name=f"{name}_item") - assert new_schemas == schemas diff --git a/tests/test_parser/test_properties/test_merge_properties.py b/tests/test_parser/test_properties/test_merge_properties.py new file mode 100644 index 000000000..819f9ec26 --- /dev/null +++ b/tests/test_parser/test_properties/test_merge_properties.py @@ -0,0 +1,291 @@ +from itertools import permutations + +import pytest +from attr import evolve + +from openapi_python_client.parser.errors import PropertyError +from openapi_python_client.parser.properties.float import FloatProperty +from openapi_python_client.parser.properties.int import IntProperty +from openapi_python_client.parser.properties.merge_properties import merge_properties +from openapi_python_client.parser.properties.protocol import Value +from openapi_python_client.parser.properties.schemas import Class +from openapi_python_client.parser.properties.string import StringProperty + +MODULE_NAME = "openapi_python_client.parser.properties.merge_properties" + + +def test_merge_basic_attributes_same_type( + boolean_property_factory, + int_property_factory, + float_property_factory, + string_property_factory, + list_property_factory, + model_property_factory, +): + basic_props = [ + boolean_property_factory(default=Value(python_code="True", raw_value="True")), + int_property_factory(default=Value("1", 1)), + float_property_factory(default=Value("1.5", 1.5)), + string_property_factory(default=StringProperty.convert_value("x")), + list_property_factory(), + model_property_factory(), + ] + for basic_prop in basic_props: + with_required = evolve(basic_prop, required=True) + assert merge_properties(basic_prop, with_required) == with_required + assert merge_properties(with_required, basic_prop) == with_required + without_default = evolve(basic_prop, default=None) + assert merge_properties(basic_prop, without_default) == basic_prop + assert merge_properties(without_default, basic_prop) == basic_prop + with_desc1 = evolve(basic_prop, description="desc1") + with_desc2 = evolve(basic_prop, description="desc2") + assert merge_properties(basic_prop, with_desc1) == with_desc1 + assert merge_properties(with_desc1, basic_prop) == with_desc1 + assert merge_properties(with_desc1, with_desc2) == with_desc2 + + +def test_incompatible_types( + boolean_property_factory, + int_property_factory, + float_property_factory, + string_property_factory, + list_property_factory, + model_property_factory, +): + props = [ + boolean_property_factory(default=True), + int_property_factory(default=1), + float_property_factory(default=1.5), + string_property_factory(default="x"), + list_property_factory(), + model_property_factory(), + ] + + for prop1, prop2 in permutations(props, 2): + if {prop1.__class__, prop2.__class__} == {IntProperty, FloatProperty}: + continue # the int+float case is covered in another test + error = merge_properties(prop1, prop2) + assert isinstance(error, PropertyError), f"Expected {type(prop1)} and {type(prop2)} to be incompatible" + + +def test_merge_int_with_float(int_property_factory, float_property_factory): + int_prop = int_property_factory(description="desc1") + float_prop = float_property_factory(default=Value("2", 2), description="desc2") + + assert merge_properties(int_prop, float_prop) == ( + evolve(int_prop, default=Value("2", 2), description=float_prop.description) + ) + assert merge_properties(float_prop, int_prop) == evolve(int_prop, default=Value("2", 2)) + + float_prop_with_non_int_default = evolve(float_prop, default=Value("2.5", 2.5)) + error = merge_properties(int_prop, float_prop_with_non_int_default) + assert isinstance(error, PropertyError), "Expected invalid default to error" + assert error.detail == "Invalid int value: 2.5" + + +def test_merge_with_any( + any_property_factory, + boolean_property_factory, + int_property_factory, + float_property_factory, + string_property_factory, + model_property_factory, +): + original_desc = "description" + props = [ + boolean_property_factory(default=Value("True", "True"), description=original_desc), + int_property_factory(default=Value("1", "1"), description=original_desc), + float_property_factory(default=Value("1.5", "1.5"), description=original_desc), + string_property_factory(default=StringProperty.convert_value("x"), description=original_desc), + model_property_factory(description=original_desc), + ] + any_prop = any_property_factory() + for prop in props: + assert merge_properties(any_prop, prop) == prop + assert merge_properties(prop, any_prop) == prop + + +@pytest.mark.parametrize("literal_enums", (False, True)) +def test_merge_enums(literal_enums, enum_property_factory, literal_enum_property_factory, config): + if literal_enums: + enum_with_fewer_values = literal_enum_property_factory( + description="desc1", + values={"A", "B"}, + value_type=str, + ) + enum_with_more_values = literal_enum_property_factory( + example="example2", + values={"A", "B", "C"}, + value_type=str, + ) + else: + enum_with_fewer_values = enum_property_factory( + description="desc1", + values={"A": "A", "B": "B"}, + value_type=str, + ) + enum_with_more_values = enum_property_factory( + example="example2", + values={"A": "A", "B": "B", "C": "C"}, + value_type=str, + ) + + # Setting class_info separately because it doesn't get initialized by the constructor - we want + # to make sure the right enum class name gets used in the merged property + enum_with_fewer_values.class_info = Class.from_string(string="FewerValuesEnum", config=config) + enum_with_more_values.class_info = Class.from_string(string="MoreValuesEnum", config=config) + + assert merge_properties(enum_with_fewer_values, enum_with_more_values) == evolve( + enum_with_more_values, + values=enum_with_fewer_values.values, + class_info=enum_with_fewer_values.class_info, + description=enum_with_fewer_values.description, + ) + assert merge_properties(enum_with_more_values, enum_with_fewer_values) == evolve( + enum_with_fewer_values, + example=enum_with_more_values.example, + ) + + +@pytest.mark.parametrize("literal_enums", (False, True)) +def test_merge_string_with_string_enum( + literal_enums, string_property_factory, enum_property_factory, literal_enum_property_factory +): + string_prop = string_property_factory(default=Value("A", "A"), description="desc1", example="example1") + enum_prop = ( + literal_enum_property_factory( + default=Value("'B'", "B"), + description="desc2", + example="example2", + values={"A", "B"}, + value_type=str, + ) + if literal_enums + else enum_property_factory( + default=Value("test.B", "B"), + description="desc2", + example="example2", + values={"A": "A", "B": "B"}, + value_type=str, + ) + ) + + assert merge_properties(string_prop, enum_prop) == evolve(enum_prop, required=True) + assert merge_properties(enum_prop, string_prop) == evolve( + enum_prop, + required=True, + default=Value("'A'" if literal_enums else "test.A", "A"), + description=string_prop.description, + example=string_prop.example, + ) + + +@pytest.mark.parametrize("literal_enums", (False, True)) +def test_merge_int_with_int_enum( + literal_enums, int_property_factory, enum_property_factory, literal_enum_property_factory +): + int_prop = int_property_factory(default=Value("1", 1), description="desc1", example="example1") + enum_prop = ( + literal_enum_property_factory( + default=Value("1", 1), + description="desc2", + example="example2", + values={1, 2}, + value_type=int, + ) + if literal_enums + else enum_property_factory( + default=Value("test.VALUE_1", 1), + description="desc2", + example="example2", + values={"VALUE_1": 1, "VALUE_2": 2}, + value_type=int, + ) + ) + + assert merge_properties(int_prop, enum_prop) == evolve(enum_prop, required=True) + assert merge_properties(enum_prop, int_prop) == evolve( + enum_prop, required=True, description=int_prop.description, example=int_prop.example + ) + + +@pytest.mark.parametrize("literal_enums", (False, True)) +def test_merge_with_incompatible_enum( + literal_enums, + boolean_property_factory, + int_property_factory, + float_property_factory, + string_property_factory, + enum_property_factory, + literal_enum_property_factory, + model_property_factory, +): + props = [ + boolean_property_factory(), + int_property_factory(), + float_property_factory(), + string_property_factory(), + model_property_factory(), + enum_property_factory(values={"INCOMPATIBLE": "INCOMPATIBLE"}), + literal_enum_property_factory(values={"INCOMPATIBLE"}), + ] + string_enum_prop = ( + literal_enum_property_factory(value_type=str, values={"A"}) + if literal_enums + else enum_property_factory(value_type=str, values={"A": "A"}) + ) + int_enum_prop = ( + literal_enum_property_factory(value_type=int, values={1}) + if literal_enums + else enum_property_factory(value_type=int, values={"VALUE_1": 1}) + ) + for prop in props: + if not isinstance(prop, StringProperty): + assert isinstance(merge_properties(prop, string_enum_prop), PropertyError) + assert isinstance(merge_properties(string_enum_prop, prop), PropertyError) + if not isinstance(prop, IntProperty): + assert isinstance(merge_properties(prop, int_enum_prop), PropertyError) + assert isinstance(merge_properties(int_enum_prop, prop), PropertyError) + + +def test_merge_string_with_formatted_string( + date_property_factory, + date_time_property_factory, + file_property_factory, + string_property_factory, +): + string_prop = string_property_factory(description="a plain string") + string_prop_with_invalid_default = string_property_factory( + default=StringProperty.convert_value("plain string value") + ) + formatted_props = [ + date_property_factory(description="a date"), + date_time_property_factory(description="a datetime"), + file_property_factory(description="a file"), + ] + for formatted_prop in formatted_props: + merged1 = merge_properties(string_prop, formatted_prop) + assert isinstance(merged1, formatted_prop.__class__) + assert merged1.description == formatted_prop.description + + merged2 = merge_properties(formatted_prop, string_prop) + assert isinstance(merged2, formatted_prop.__class__) + assert merged2.description == string_prop.description + + assert isinstance(merge_properties(string_prop_with_invalid_default, formatted_prop), PropertyError) + assert isinstance(merge_properties(formatted_prop, string_prop_with_invalid_default), PropertyError) + + +def test_merge_lists(int_property_factory, list_property_factory, string_property_factory): + string_prop_1 = string_property_factory(description="desc1") + string_prop_2 = string_property_factory(example="desc2") + int_prop = int_property_factory() + list_prop_1 = list_property_factory(inner_property=string_prop_1) + list_prop_2 = list_property_factory(inner_property=string_prop_2) + list_prop_3 = list_property_factory(inner_property=int_prop) + + assert merge_properties(list_prop_1, list_prop_2) == evolve( + list_prop_1, inner_property=merge_properties(string_prop_1, string_prop_2) + ) + + assert isinstance(merge_properties(list_prop_1, list_prop_3), PropertyError) diff --git a/tests/test_parser/test_properties/test_model_property.py b/tests/test_parser/test_properties/test_model_property.py index 917582042..f84a31a17 100644 --- a/tests/test_parser/test_properties/test_model_property.py +++ b/tests/test_parser/test_properties/test_model_property.py @@ -1,46 +1,42 @@ -from typing import Optional - import pytest from attr import evolve import openapi_python_client.schema as oai from openapi_python_client.parser.errors import PropertyError -from openapi_python_client.parser.properties import Schemas, StringProperty -from openapi_python_client.parser.properties.model_property import _process_properties +from openapi_python_client.parser.properties import Class, ModelProperty, Schemas, StringProperty +from openapi_python_client.parser.properties.model_property import ( + ANY_ADDITIONAL_PROPERTY, + _process_properties, + _PropertyData, + process_model, +) MODULE_NAME = "openapi_python_client.parser.properties.model_property" class TestModelProperty: @pytest.mark.parametrize( - "no_optional,required,json,quoted,expected", + "no_optional,required,json,expected", [ - (False, False, False, False, "Union[Unset, MyClass]"), - (False, True, False, False, "MyClass"), - (True, False, False, False, "MyClass"), - (True, True, False, False, "MyClass"), - (False, True, True, False, "Dict[str, Any]"), - (False, False, False, True, "Union[Unset, 'MyClass']"), - (False, True, False, True, "'MyClass'"), - (True, False, False, True, "'MyClass'"), - (True, True, False, True, "'MyClass'"), - (False, True, True, True, "Dict[str, Any]"), + (False, False, False, "MyClass | Unset"), + (False, True, False, "MyClass"), + (True, False, False, "MyClass"), + (True, True, False, "MyClass"), + (False, True, True, "dict[str, Any]"), ], ) - def test_get_type_string(self, no_optional, required, json, expected, model_property_factory, quoted): + def test_get_type_string(self, no_optional, required, json, expected, model_property_factory): prop = model_property_factory( required=required, ) - assert prop.get_type_string(no_optional=no_optional, json=json, quoted=quoted) == expected + assert prop.get_type_string(no_optional=no_optional, json=json) == expected def test_get_imports(self, model_property_factory): prop = model_property_factory(required=False) assert prop.get_imports(prefix="..") == { - "from typing import Union", "from ..types import UNSET, Unset", - "from typing import Dict", "from typing import cast", } @@ -51,29 +47,19 @@ def test_get_lazy_imports(self, model_property_factory): "from ..models.my_module import MyClass", } - def test_is_base_type(self, model_property_factory): - assert model_property_factory().is_base_type is False - - @pytest.mark.parametrize( - "quoted,expected", - [ - (False, "MyClass"), - (True, '"MyClass"'), - ], - ) - def test_get_base_type_string(self, quoted, expected, model_property_factory): + def test_get_base_type_string(self, model_property_factory): m = model_property_factory() - assert m.get_base_type_string(quoted=quoted) == expected + assert m.get_base_type_string() == "MyClass" class TestBuild: @pytest.mark.parametrize( "additional_properties_schema, expected_additional_properties", [ - (True, True), - (oai.Schema.model_construct(), True), - (None, True), - (False, False), + (True, ANY_ADDITIONAL_PROPERTY), + (oai.Schema.model_construct(), ANY_ADDITIONAL_PROPERTY), + (None, ANY_ADDITIONAL_PROPERTY), + (False, None), ( oai.Schema.model_construct(type="string"), StringProperty( @@ -88,8 +74,6 @@ class TestBuild: ], ) def test_additional_schemas(self, additional_properties_schema, expected_additional_properties, config): - from openapi_python_client.parser.properties import ModelProperty, Schemas - data = oai.Schema.model_construct( additionalProperties=additional_properties_schema, ) @@ -108,8 +92,6 @@ def test_additional_schemas(self, additional_properties_schema, expected_additio assert model.additional_properties == expected_additional_properties def test_happy_path(self, model_property_factory, string_property_factory, date_time_property_factory, config): - from openapi_python_client.parser.properties import Class, ModelProperty, Schemas - name = "prop" required = True @@ -160,15 +142,12 @@ def test_happy_path(self, model_property_factory, string_property_factory, date_ "from typing import cast", "import datetime", "from ..types import UNSET, Unset", - "from typing import Union", }, lazy_imports=set(), - additional_properties=True, + additional_properties=ANY_ADDITIONAL_PROPERTY, ) def test_model_name_conflict(self, config): - from openapi_python_client.parser.properties import ModelProperty - data = oai.Schema.model_construct() schemas = Schemas(classes_by_name={"OtherModel": None}) @@ -208,14 +187,12 @@ def test_model_name_conflict(self, config): def test_model_naming( self, name: str, - title: Optional[str], - parent_name: Optional[str], + title: str | None, + parent_name: str | None, use_title_prefixing: bool, expected: str, config, ): - from openapi_python_client.parser.properties import ModelProperty - data = oai.Schema( title=title, properties={}, @@ -234,8 +211,6 @@ def test_model_naming( assert result.class_info.name == expected def test_model_bad_properties(self, config): - from openapi_python_client.parser.properties import ModelProperty - data = oai.Schema( properties={ "bad": oai.Reference.model_construct(ref="#/components/schema/NotExist"), @@ -254,8 +229,6 @@ def test_model_bad_properties(self, config): assert isinstance(result, PropertyError) def test_model_bad_additional_properties(self, config): - from openapi_python_client.parser.properties import ModelProperty - additional_properties = oai.Schema( type="object", properties={ @@ -276,8 +249,6 @@ def test_model_bad_additional_properties(self, config): assert isinstance(result, PropertyError) def test_process_properties_false(self, model_property_factory, config): - from openapi_python_client.parser.properties import Class, ModelProperty - name = "prop" required = True @@ -325,7 +296,7 @@ def test_process_properties_false(self, model_property_factory, config): class TestProcessProperties: def test_conflicting_properties_different_types( - self, model_property_factory, string_property_factory, date_time_property_factory, config + self, model_property_factory, string_property_factory, int_property_factory, config ): data = oai.Schema.model_construct( allOf=[oai.Reference.model_construct(ref="#/First"), oai.Reference.model_construct(ref="#/Second")] @@ -335,9 +306,7 @@ def test_conflicting_properties_different_types( "/First": model_property_factory( required_properties=[], optional_properties=[string_property_factory()] ), - "/Second": model_property_factory( - required_properties=[], optional_properties=[date_time_property_factory()] - ), + "/Second": model_property_factory(required_properties=[], optional_properties=[int_property_factory()]), } ) @@ -403,25 +372,6 @@ def test_reference_not_processed(self, model_property_factory, config): assert isinstance(result, PropertyError) - def test_conflicting_properties_same_types(self, model_property_factory, string_property_factory, config): - data = oai.Schema.model_construct( - allOf=[oai.Reference.model_construct(ref="#/First"), oai.Reference.model_construct(ref="#/Second")] - ) - schemas = Schemas( - classes_by_reference={ - "/First": model_property_factory( - required_properties=[], optional_properties=[string_property_factory(default="abc")] - ), - "/Second": model_property_factory( - required_properties=[], optional_properties=[string_property_factory()] - ), - } - ) - - result = _process_properties(data=data, schemas=schemas, class_name="", config=config, roots={"root"}) - - assert isinstance(result, PropertyError) - def test_allof_string_and_string_enum( self, model_property_factory, enum_property_factory, string_property_factory, config ): @@ -593,6 +543,42 @@ def test_duplicate_properties(self, model_property_factory, string_property_fact assert result.optional_props == [prop], "There should only be one copy of duplicate properties" + def test_allof_required_override(self, model_property_factory, string_property_factory, config): + """Test that required field can be overridden in allOf schemas""" + # Simulates: + # FooBase: + # type: object + # properties: + # bar: {type: string} + # baz: {type: string} + # FooCreate: + # allOf: + # - $ref: '#/components/schemas/FooBase' + # - type: object + # required: [bar] + bar_prop = string_property_factory(name="bar", required=False) + baz_prop = string_property_factory(name="baz", required=False) + + data = oai.Schema.model_construct( + allOf=[ + oai.Reference.model_construct(ref="#/FooBase"), + oai.Schema.model_construct(type="object", required=["bar"]), + ] + ) + schemas = Schemas( + classes_by_reference={ + "/FooBase": model_property_factory(required_properties=[], optional_properties=[bar_prop, baz_prop]), + } + ) + + result = _process_properties(data=data, schemas=schemas, class_name="FooCreate", config=config, roots={"root"}) + + # bar should now be required, baz should remain optional + assert len(result.required_props) == 1 + assert result.required_props[0].name == "bar" + assert len(result.optional_props) == 1 + assert result.optional_props[0].name == "baz" + @pytest.mark.parametrize("first_required", [True, False]) @pytest.mark.parametrize("second_required", [True, False]) def test_mixed_requirements( @@ -663,12 +649,36 @@ def test_conflicting_property_names(self, config): result = _process_properties(data=data, schemas=schemas, class_name="", config=config, roots={"root"}) assert isinstance(result, PropertyError) + def test_merge_inline_objects(self, model_property_factory, enum_property_factory, config): + data = oai.Schema.model_construct( + allOf=[ + oai.Schema.model_construct( + type="object", + properties={ + "prop1": oai.Schema.model_construct(type="string", default="a"), + }, + ), + oai.Schema.model_construct( + type="object", + properties={ + "prop1": oai.Schema.model_construct(type="string", description="desc"), + }, + ), + ] + ) + schemas = Schemas() + + result = _process_properties(data=data, schemas=schemas, class_name="", config=config, roots={"root"}) + assert not isinstance(result, PropertyError) + assert len(result.optional_props) == 1 + prop1 = result.optional_props[0] + assert isinstance(prop1, StringProperty) + assert prop1.description == "desc" + assert prop1.default == StringProperty.convert_value("a") + class TestProcessModel: def test_process_model_error(self, mocker, model_property_factory, config): - from openapi_python_client.parser.properties import Schemas - from openapi_python_client.parser.properties.model_property import process_model - model_prop = model_property_factory() schemas = Schemas() process_property_data = mocker.patch(f"{MODULE_NAME}._process_property_data") @@ -683,9 +693,6 @@ def test_process_model_error(self, mocker, model_property_factory, config): assert model_prop.additional_properties is None def test_process_model(self, mocker, model_property_factory, config): - from openapi_python_client.parser.properties import Schemas - from openapi_python_client.parser.properties.model_property import _PropertyData, process_model - model_prop = model_property_factory() schemas = Schemas() property_data = _PropertyData( @@ -710,11 +717,9 @@ def test_process_model(self, mocker, model_property_factory, config): def test_set_relative_imports(model_property_factory): - from openapi_python_client.parser.properties import Class - class_info = Class("ClassName", module_name="module_name") - relative_imports = {"from typing import List", f"from ..models.{class_info.module_name} import {class_info.name}"} + relative_imports = {f"from ..models.{class_info.module_name} import {class_info.name}"} model_property = model_property_factory(class_info=class_info, relative_imports=relative_imports) - assert model_property.relative_imports == {"from typing import List"} + assert model_property.relative_imports == set() diff --git a/tests/test_parser/test_properties/test_none.py b/tests/test_parser/test_properties/test_none.py index 500d078e9..b6289cdb8 100644 --- a/tests/test_parser/test_properties/test_none.py +++ b/tests/test_parser/test_properties/test_none.py @@ -1,9 +1,12 @@ from openapi_python_client.parser.errors import PropertyError from openapi_python_client.parser.properties import NoneProperty from openapi_python_client.parser.properties.protocol import Value +from openapi_python_client.utils import PythonIdentifier def test_default(): + # currently this is testing an unused code path: + # https://github.com/openapi-generators/openapi-python-client/issues/1162 err = NoneProperty.build( default="not None", description=None, @@ -18,11 +21,11 @@ def test_default(): def test_dont_retest_values(): prop = NoneProperty.build( - default=Value("not None"), + default=Value("not None", "not None"), description=None, example=None, required=False, - python_name="not_none", + python_name=PythonIdentifier("not_none", ""), name="not_none", ) diff --git a/tests/test_parser/test_properties/test_protocol.py b/tests/test_parser/test_properties/test_protocol.py index a110f4ed9..625556c5b 100644 --- a/tests/test_parser/test_properties/test_protocol.py +++ b/tests/test_parser/test_properties/test_protocol.py @@ -1,45 +1,45 @@ -import pytest +from __future__ import annotations +import pytest -def test_is_base_type(any_property_factory): - assert any_property_factory().is_base_type is True +from openapi_python_client.parser.properties import AnyProperty +from openapi_python_client.parser.properties.protocol import Value @pytest.mark.parametrize( - "required,no_optional,json,quoted,expected", + "required,no_optional,json,expected", [ - (False, False, False, False, "Union[Unset, TestType]"), - (False, True, False, False, "TestType"), - (True, False, False, False, "TestType"), - (True, True, False, False, "TestType"), - (False, False, True, False, "Union[Unset, str]"), - (False, True, True, False, "str"), - (True, False, True, False, "str"), - (True, True, True, False, "str"), + (False, False, False, "TestType | Unset"), + (False, True, False, "TestType"), + (True, False, False, "TestType"), + (True, True, False, "TestType"), + (False, False, True, "str | Unset"), + (False, True, True, "str"), + (True, False, True, "str"), + (True, True, True, "str"), ], ) -def test_get_type_string(any_property_factory, mocker, required, no_optional, json, expected, quoted): - from openapi_python_client.parser.properties import AnyProperty - +def test_get_type_string(any_property_factory, mocker, required, no_optional, json, expected): mocker.patch.object(AnyProperty, "_type_string", "TestType") mocker.patch.object(AnyProperty, "_json_type_string", "str") p = any_property_factory(required=required) - assert p.get_type_string(no_optional=no_optional, json=json, quoted=quoted) == expected + assert p.get_type_string(no_optional=no_optional, json=json) == expected @pytest.mark.parametrize( "default,required,expected", [ - (None, False, "test: Union[Unset, TestType] = UNSET"), - (None, True, "test: TestType"), - ("Test", False, "test: Union[Unset, TestType] = Test"), - ("Test", True, "test: TestType = Test"), + (None, False, "test: Any | Unset = UNSET"), + (None, True, "test: Any"), + ("Test", False, "test: Any | Unset = Test"), + ("Test", True, "test: Any = Test"), ], ) -def test_to_string(mocker, default, required, expected, any_property_factory): +def test_to_string(default: str | None, required: bool, expected: str, any_property_factory): name = "test" - mocker.patch("openapi_python_client.parser.properties.AnyProperty._type_string", "TestType") - p = any_property_factory(name=name, required=required, default=default) + p = any_property_factory( + name=name, required=required, default=Value(default, default) if default is not None else None + ) assert p.to_string() == expected @@ -49,34 +49,4 @@ def test_get_imports(any_property_factory): assert p.get_imports(prefix="") == set() p = any_property_factory(name="test", required=False, default=None) - assert p.get_imports(prefix="") == {"from types import UNSET, Unset", "from typing import Union"} - - -@pytest.mark.parametrize( - "quoted,expected", - [ - (False, "TestType"), - (True, "TestType"), - ], -) -def test_get_base_type_string(quoted, expected, any_property_factory, mocker): - from openapi_python_client.parser.properties import AnyProperty - - mocker.patch.object(AnyProperty, "_type_string", "TestType") - p = any_property_factory() - assert p.get_base_type_string(quoted=quoted) is expected - - -@pytest.mark.parametrize( - "quoted,expected", - [ - (False, "str"), - (True, "str"), - ], -) -def test_get_base_json_type_string(quoted, expected, any_property_factory, mocker): - from openapi_python_client.parser.properties import AnyProperty - - mocker.patch.object(AnyProperty, "_json_type_string", "str") - p = any_property_factory() - assert p.get_base_json_type_string(quoted=quoted) is expected + assert p.get_imports(prefix="") == {"from types import UNSET, Unset"} diff --git a/tests/test_parser/test_properties/test_schemas.py b/tests/test_parser/test_properties/test_schemas.py index 5560795cf..7e7af8514 100644 --- a/tests/test_parser/test_properties/test_schemas.py +++ b/tests/test_parser/test_properties/test_schemas.py @@ -1,18 +1,21 @@ import pytest from attr import evolve +from openapi_python_client.config import ClassOverride from openapi_python_client.parser.errors import ParameterError from openapi_python_client.parser.properties import Class, Parameters -from openapi_python_client.parser.properties.schemas import parameter_from_reference -from openapi_python_client.schema import Parameter, Reference +from openapi_python_client.parser.properties.schemas import ( + parameter_from_data, + parameter_from_reference, + update_parameters_with_data, +) +from openapi_python_client.schema import Parameter, ParameterLocation, Reference, Schema from openapi_python_client.utils import ClassName MODULE_NAME = "openapi_python_client.parser.properties.schemas" def test_class_from_string_default_config(config): - from openapi_python_client.parser.properties import Class - class_ = Class.from_string(string="#/components/schemas/PingResponse", config=config) assert class_.name == "PingResponse" @@ -29,9 +32,6 @@ def test_class_from_string_default_config(config): ), ) def test_class_from_string(class_override, module_override, expected_class, expected_module, config): - from openapi_python_client.config import ClassOverride - from openapi_python_client.parser.properties import Class - ref = "#/components/schemas/MyResponse" config = evolve( config, class_overrides={"MyResponse": ClassOverride(class_name=class_override, module_name=module_override)} @@ -44,9 +44,6 @@ def test_class_from_string(class_override, module_override, expected_class, expe class TestParameterFromData: def test_cannot_parse_parameters_by_reference(self, config): - from openapi_python_client.parser.properties import Parameters - from openapi_python_client.parser.properties.schemas import parameter_from_data - ref = Reference.model_construct(ref="#/components/parameters/a_param") parameters = Parameters() param_or_error, new_parameters = parameter_from_data( @@ -56,10 +53,6 @@ def test_cannot_parse_parameters_by_reference(self, config): assert new_parameters == parameters def test_parameters_without_schema_are_ignored(self, config): - from openapi_python_client.parser.properties import Parameters - from openapi_python_client.parser.properties.schemas import parameter_from_data - from openapi_python_client.schema import ParameterLocation - param = Parameter(name="a_schemaless_param", param_in=ParameterLocation.QUERY) parameters = Parameters() param_or_error, new_parameters = parameter_from_data( @@ -69,10 +62,6 @@ def test_parameters_without_schema_are_ignored(self, config): assert new_parameters == parameters def test_registers_new_parameters(self, config): - from openapi_python_client.parser.properties import Parameters - from openapi_python_client.parser.properties.schemas import parameter_from_data - from openapi_python_client.schema import ParameterLocation, Schema - param = Parameter.model_construct( name="a_param", param_in=ParameterLocation.QUERY, param_schema=Schema.model_construct() ) @@ -119,9 +108,6 @@ def test_returns_reference_from_registry(self): class TestUpdateParametersFromData: def test_reports_parameters_with_errors(self, mocker, config): - from openapi_python_client.parser.properties.schemas import update_parameters_with_data - from openapi_python_client.schema import ParameterLocation, Schema - parameters = Parameters() param = Parameter.model_construct( name="a_param", param_in=ParameterLocation.QUERY, param_schema=Schema.model_construct() @@ -141,9 +127,6 @@ def test_reports_parameters_with_errors(self, mocker, config): ) def test_records_references_to_parameters(self, mocker, config): - from openapi_python_client.parser.properties.schemas import update_parameters_with_data - from openapi_python_client.schema import ParameterLocation, Schema - parameters = Parameters() param = Parameter.model_construct( name="a_param", param_in=ParameterLocation.QUERY, param_schema=Schema.model_construct() diff --git a/tests/test_parser/test_properties/test_union.py b/tests/test_parser/test_properties/test_union.py index d8a5d762c..902603f2e 100644 --- a/tests/test_parser/test_properties/test_union.py +++ b/tests/test_parser/test_properties/test_union.py @@ -1,62 +1,9 @@ import openapi_python_client.schema as oai -from openapi_python_client.parser.errors import ParseError, PropertyError -from openapi_python_client.parser.properties import Schemas, UnionProperty +from openapi_python_client.parser.errors import ParseError +from openapi_python_client.parser.properties import Schemas, UnionProperty, property_from_data from openapi_python_client.schema import DataType, ParameterLocation -def test_property_from_data_union(union_property_factory, date_time_property_factory, string_property_factory, config): - from openapi_python_client.parser.properties import Schemas, property_from_data - - name = "union_prop" - required = True - data = oai.Schema( - anyOf=[oai.Schema(type=DataType.STRING, default="a")], - oneOf=[ - oai.Schema(type=DataType.STRING, schema_format="date-time"), - ], - ) - expected = union_property_factory( - name=name, - required=required, - inner_properties=[ - string_property_factory(name=f"{name}_type_0", default="'a'"), - date_time_property_factory(name=f"{name}_type_1"), - ], - ) - - p, s = property_from_data( - name=name, required=required, data=data, schemas=Schemas(), parent_name="parent", config=config - ) - - assert p == expected - assert s == Schemas() - - -def test_build_union_property_invalid_property(config): - name = "bad_union" - required = True - reference = oai.Reference.model_construct(ref="#/components/schema/NotExist") - data = oai.Schema(anyOf=[reference]) - - p, s = UnionProperty.build( - name=name, required=required, data=data, schemas=Schemas(), parent_name="parent", config=config - ) - assert p == PropertyError(detail=f"Invalid property in union {name}", data=reference) - - -def test_invalid_default(config): - data = oai.Schema( - type=[DataType.NUMBER, DataType.INTEGER], - default="a", - ) - - err, _ = UnionProperty.build( - data=data, required=True, schemas=Schemas(), parent_name="parent", name="name", config=config - ) - - assert isinstance(err, PropertyError) - - def test_invalid_location(config): data = oai.Schema( type=[DataType.NUMBER, DataType.NULL], @@ -81,3 +28,55 @@ def test_not_required_in_path(config): err = prop.validate_location(ParameterLocation.PATH) assert isinstance(err, ParseError) + + +def test_union_oneOf_descriptive_type_name( + union_property_factory, + date_time_property_factory, + string_property_factory, + boolean_property_factory, + date_property_factory, + int_property_factory, + float_property_factory, + config, +): + nested_schema_variant_A = oai.Schema(type=DataType.STRING, title="A") + nested_schema_variant_B = oai.Schema(type=DataType.INTEGER, title="B") + nested_schema_variant_2 = oai.Schema(type=DataType.NUMBER) + nested_schema_variant_C = oai.Schema(type=DataType.BOOLEAN, title="C") + + name = "union_prop" + required = True + data = oai.Schema( + anyOf=[ + # AnyOf retains the old naming convention + nested_schema_variant_C, + oai.Schema(type=DataType.STRING, schema_format="date"), + ], + oneOf=[ + # OneOf fields that define their own titles will have those titles as their Type names + nested_schema_variant_A, + nested_schema_variant_B, + nested_schema_variant_2, + oai.Schema(type=DataType.STRING, schema_format="date-time"), + ], + ) + expected = union_property_factory( + name=name, + required=required, + inner_properties=[ + boolean_property_factory(name=f"{name}_C"), + date_property_factory(name=f"{name}_type_1"), + string_property_factory(name=f"{name}_A"), + int_property_factory(name=f"{name}_B"), + float_property_factory(name=f"{name}_type_4"), + date_time_property_factory(name=f"{name}_type_5"), + ], + ) + + p, s = property_from_data( + name=name, required=required, data=data, schemas=Schemas(), parent_name="parent", config=config + ) + + assert p == expected + assert s == Schemas() diff --git a/tests/test_parser/test_responses.py b/tests/test_parser/test_responses.py index 0342112c5..6c0f60e2c 100644 --- a/tests/test_parser/test_responses.py +++ b/tests/test_parser/test_responses.py @@ -1,28 +1,36 @@ from unittest.mock import MagicMock +import pytest + import openapi_python_client.schema as oai +from openapi_python_client.parser import responses from openapi_python_client.parser.errors import ParseError, PropertyError from openapi_python_client.parser.properties import Schemas -from openapi_python_client.parser.responses import JSON_SOURCE, NONE_SOURCE +from openapi_python_client.parser.responses import ( + JSON_SOURCE, + NONE_SOURCE, + HTTPStatusPattern, + Response, + response_from_data, +) MODULE_NAME = "openapi_python_client.parser.responses" def test_response_from_data_no_content(any_property_factory): - from openapi_python_client.parser.responses import Response, response_from_data - data = oai.Response.model_construct(description="") - response, schemas = response_from_data( - status_code=200, + response, _schemas = response_from_data( + status_code=HTTPStatusPattern(pattern="200", code_range=(200, 200)), data=data, schemas=Schemas(), + responses={}, parent_name="parent", config=MagicMock(), ) assert response == Response( - status_code=200, + status_code=HTTPStatusPattern(pattern="200", code_range=(200, 200)), prop=any_property_factory( name="response_200", default=None, @@ -34,63 +42,43 @@ def test_response_from_data_no_content(any_property_factory): ) -def test_response_from_data_reference(any_property_factory): - from openapi_python_client.parser.responses import Response, response_from_data - - data = oai.Reference.model_construct() - - response, schemas = response_from_data( - status_code=200, - data=data, - schemas=Schemas(), - parent_name="parent", - config=MagicMock(), - ) - - assert response == Response( - status_code=200, - prop=any_property_factory( - name="response_200", - default=None, - required=True, - ), - source=NONE_SOURCE, - data=data, - ) +status_code = HTTPStatusPattern(pattern="200", code_range=(200, 200)) def test_response_from_data_unsupported_content_type(): - from openapi_python_client.parser.responses import response_from_data - data = oai.Response.model_construct(description="", content={"blah": None}) - response, schemas = response_from_data( - status_code=200, + config = MagicMock() + config.content_type_overrides = {} + response, _schemas = response_from_data( + status_code=status_code, data=data, schemas=Schemas(), + responses={}, parent_name="parent", - config=MagicMock(), + config=config, ) assert response == ParseError(data=data, detail="Unsupported content_type {'blah': None}") def test_response_from_data_no_content_schema(any_property_factory): - from openapi_python_client.parser.responses import Response, response_from_data - data = oai.Response.model_construct( description="", content={"application/vnd.api+json; version=2.2": oai.MediaType.model_construct()}, ) - response, schemas = response_from_data( - status_code=200, + config = MagicMock() + config.content_type_overrides = {} + response, _schemas = response_from_data( + status_code=status_code, data=data, schemas=Schemas(), + responses={}, parent_name="parent", - config=MagicMock(), + config=config, ) assert response == Response( - status_code=200, + status_code=status_code, prop=any_property_factory( name="response_200", default=None, @@ -103,19 +91,19 @@ def test_response_from_data_no_content_schema(any_property_factory): def test_response_from_data_property_error(mocker): - from openapi_python_client.parser import responses - property_from_data = mocker.patch.object(responses, "property_from_data", return_value=(PropertyError(), Schemas())) data = oai.Response.model_construct( description="", content={"application/json": oai.MediaType.model_construct(media_type_schema="something")}, ) config = MagicMock() + config.content_type_overrides = {} - response, schemas = responses.response_from_data( - status_code=400, + response, _schemas = responses.response_from_data( + status_code=HTTPStatusPattern(pattern="400", code_range=(400, 400)), data=data, schemas=Schemas(), + responses={}, parent_name="parent", config=config, ) @@ -132,8 +120,6 @@ def test_response_from_data_property_error(mocker): def test_response_from_data_property(mocker, any_property_factory): - from openapi_python_client.parser import responses - prop = any_property_factory() property_from_data = mocker.patch.object(responses, "property_from_data", return_value=(prop, Schemas())) data = oai.Response.model_construct( @@ -141,17 +127,20 @@ def test_response_from_data_property(mocker, any_property_factory): content={"application/json": oai.MediaType.model_construct(media_type_schema="something")}, ) config = MagicMock() + config.content_type_overrides = {} + status_code = HTTPStatusPattern(pattern="400", code_range=(400, 400)) - response, schemas = responses.response_from_data( - status_code=400, + response, _schemas = responses.response_from_data( + status_code=status_code, data=data, schemas=Schemas(), + responses={}, parent_name="parent", config=config, ) assert response == responses.Response( - status_code=400, + status_code=status_code, prop=prop, source=JSON_SOURCE, data=data, @@ -164,3 +153,139 @@ def test_response_from_data_property(mocker, any_property_factory): parent_name="parent", config=config, ) + + +def test_response_from_data_reference(mocker, any_property_factory): + prop = any_property_factory() + mocker.patch.object(responses, "property_from_data", return_value=(prop, Schemas())) + predefined_response_data = oai.Response.model_construct( + description="", + content={"application/json": oai.MediaType.model_construct(media_type_schema="something")}, + ) + config = MagicMock() + config.content_type_overrides = {} + + response, _schemas = responses.response_from_data( + status_code=HTTPStatusPattern(pattern="400", code_range=(400, 400)), + data=oai.Reference.model_construct(ref="#/components/responses/ErrorResponse"), + schemas=Schemas(), + responses={"ErrorResponse": predefined_response_data}, + parent_name="parent", + config=config, + ) + + assert response == responses.Response( + status_code=HTTPStatusPattern(pattern="400", code_range=(400, 400)), + prop=prop, + source=JSON_SOURCE, + data=predefined_response_data, + ) + + +@pytest.mark.parametrize( + "ref_string,expected_error_string", + [ + ("#/components/responses/Nonexistent", "Could not find"), + ("https://remote-reference", "Remote references"), + ("#/components/something-that-isnt-responses/ErrorResponse", "not allowed in responses"), + ], +) +def test_response_from_data_invalid_reference(ref_string, expected_error_string, mocker, any_property_factory): + prop = any_property_factory() + mocker.patch.object(responses, "property_from_data", return_value=(prop, Schemas())) + predefined_response_data = oai.Response.model_construct( + description="", + content={"application/json": oai.MediaType.model_construct(media_type_schema="something")}, + ) + config = MagicMock() + config.content_type_overrides = {} + + response, _schemas = responses.response_from_data( + status_code=HTTPStatusPattern(pattern="400", code_range=(400, 400)), + data=oai.Reference.model_construct(ref=ref_string), + schemas=Schemas(), + responses={"ErrorResponse": predefined_response_data}, + parent_name="parent", + config=config, + ) + + assert isinstance(response, ParseError) + assert expected_error_string in response.detail + + +def test_response_from_data_ref_to_response_that_is_a_ref(mocker, any_property_factory): + prop = any_property_factory() + mocker.patch.object(responses, "property_from_data", return_value=(prop, Schemas())) + predefined_response_base_data = oai.Response.model_construct( + description="", + content={"application/json": oai.MediaType.model_construct(media_type_schema="something")}, + ) + predefined_response_data = oai.Reference.model_construct( + ref="#/components/references/BaseResponse", + ) + config = MagicMock() + config.content_type_overrides = {} + + response, _schemas = responses.response_from_data( + status_code=HTTPStatusPattern(pattern="400", code_range=(400, 400)), + data=oai.Reference.model_construct(ref="#/components/responses/ErrorResponse"), + schemas=Schemas(), + responses={ + "BaseResponse": predefined_response_base_data, + "ErrorResponse": predefined_response_data, + }, + parent_name="parent", + config=config, + ) + + assert isinstance(response, ParseError) + assert response.detail is not None and "Top-level $ref" in response.detail + + +def test_response_from_data_content_type_overrides(any_property_factory): + data = oai.Response.model_construct( + description="", + content={"application/zip": oai.MediaType.model_construct()}, + ) + config = MagicMock() + config.content_type_overrides = {"application/zip": "application/octet-stream"} + response, _schemas = response_from_data( + status_code=HTTPStatusPattern(pattern="200", code_range=(200, 200)), + data=data, + schemas=Schemas(), + responses={}, + parent_name="parent", + config=config, + ) + + assert response == Response( + status_code=HTTPStatusPattern(pattern="200", code_range=(200, 200)), + prop=any_property_factory( + name="response_200", + default=None, + required=True, + description=data.description, + ), + source=NONE_SOURCE, + data=data, + ) + + +@pytest.mark.parametrize( + "pattern1, pattern2, result", + [ + ("400", "401", True), + ("503", "500", False), + ("default", "400", False), + ("400", "default", True), + ("2XX", "3XX", True), + ("3XX", "2XX", False), + ("2XX", "400", False), + ], +) +def test_http_status_pattern_lt(pattern1: str, pattern2: str, result: bool) -> None: + first = HTTPStatusPattern.parse(pattern1) + second = HTTPStatusPattern.parse(pattern2) + assert isinstance(first, HTTPStatusPattern) + assert isinstance(second, HTTPStatusPattern) + assert (first < second) == result diff --git a/tests/test_schema/test_data_type.py b/tests/test_schema/test_data_type.py index 19aa256eb..0a4d9681d 100644 --- a/tests/test_schema/test_data_type.py +++ b/tests/test_schema/test_data_type.py @@ -1,3 +1,4 @@ +import pydantic import pytest import openapi_python_client.schema as oai @@ -5,8 +6,6 @@ class TestDataType: def test_schema_bad_types(self): - import pydantic - with pytest.raises(pydantic.ValidationError): oai.Schema(type="bad_type") diff --git a/tests/test_schema/test_noisy_refs.py b/tests/test_schema/test_noisy_refs.py new file mode 100644 index 000000000..ff5a7cf1c --- /dev/null +++ b/tests/test_schema/test_noisy_refs.py @@ -0,0 +1,89 @@ +# If a field may be reference (`Union[Reference, OtherType]`) and the dictionary +# being processed for it contains "$ref", it seems like it should preferentially +# be parsed as a `Reference`[1]. Since the models are defined with +# `extra="allow"`, Pydantic won't guarantee this parse if the dictionary is in +# an unspecified sense a "better match" for `OtherType`[2], e.g., perhaps if it +# has several more fields matching that type versus the single match for `$ref`. +# +# We can use a discriminated union to force parsing these dictionaries as +# `Reference`s. +# +# References: +# [1] https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#reference-object +# [2] https://docs.pydantic.dev/latest/concepts/unions/#smart-mode +from typing import Annotated, TypeVar, get_args, get_origin + +import pytest +from pydantic import TypeAdapter + +from openapi_python_client.schema.openapi_schema_pydantic import ( + Callback, + Example, + Header, + Link, + Parameter, + PathItem, + Reference, + RequestBody, + Response, + Schema, + SecurityScheme, +) + +try: + from openapi_python_client.schema.openapi_schema_pydantic.reference import ReferenceOr +except ImportError: + T = TypeVar("T") + ReferenceOr = Reference | T + + +def get_example(base_type): + schema = base_type.model_json_schema() + if "examples" in schema: + return schema["examples"][0] + if "$defs" in schema: + return schema["$defs"][base_type.__name__]["examples"][0] + raise TypeError(f"No example found for {base_type.__name__}") + + +def deannotate_type(t): + while get_origin(t) is Annotated: + t = get_args(t)[0] + return t + + +# The following types occur in various models, so we want to make sure they +# parse properly. They are verified to /fail/ to parse as of commit 3bd12f86. + + +@pytest.mark.parametrize( + ("ref_or_type", "get_example_fn"), + [ + (ReferenceOr[Callback], lambda t: {"test1": get_example(PathItem), "test2": get_example(PathItem)}), + (ReferenceOr[Example], get_example), + (ReferenceOr[Header], get_example), + (ReferenceOr[Link], get_example), + (ReferenceOr[Parameter], get_example), + (ReferenceOr[RequestBody], get_example), + (ReferenceOr[Response], get_example), + (ReferenceOr[Schema], get_example), + (ReferenceOr[SecurityScheme], get_example), + ], +) +def test_type(ref_or_type, get_example_fn): + base_type = None + for maybe_annotated_type in get_args(deannotate_type(ref_or_type)): + each_type = deannotate_type(maybe_annotated_type) + if each_type is not Reference: + base_type = each_type + break + assert base_type is not None + + example = get_example_fn(base_type) + + parsed = TypeAdapter(ref_or_type).validate_python(example) + assert type(parsed) is get_origin(base_type) or base_type + + example["$ref"] = "ref" + parsed = TypeAdapter(ref_or_type).validate_python(example) + assert type(parsed) is Reference diff --git a/tests/test_schema/test_schema.py b/tests/test_schema/test_schema.py index 4b93f2c42..0aa892af1 100644 --- a/tests/test_schema/test_schema.py +++ b/tests/test_schema/test_schema.py @@ -12,6 +12,11 @@ def test_nullable_with_allof(): assert schema.allOf == [] +def test_constant_bool(): + schema = Schema.model_validate_json('{"type":"boolean", "enum":[true], "const":true, "default":true}') + assert schema.const is True + + def test_nullable_with_type_list(): schema = Schema.model_validate_json('{"type": ["string", "number"], "nullable": true}') assert schema.type == [DataType.STRING, DataType.NUMBER, DataType.NULL] @@ -25,3 +30,39 @@ def test_nullable_with_any_of(): def test_nullable_with_one_of(): schema = Schema.model_validate_json('{"oneOf": [{"type": "string"}], "nullable": true}') assert schema.oneOf == [Schema(type=DataType.STRING), Schema(type=DataType.NULL)] + + +def test_exclusive_minimum_as_boolean(): + schema = Schema.model_validate_json('{"minimum": 10, "exclusiveMinimum": true}') + assert schema.exclusiveMinimum == 10 + assert schema.minimum is None + + +def test_exclusive_maximum_as_boolean(): + schema = Schema.model_validate_json('{"maximum": 100, "exclusiveMaximum": true}') + assert schema.exclusiveMaximum == 100 + assert schema.maximum is None + + +def test_exclusive_minimum_as_number(): + schema = Schema.model_validate_json('{"exclusiveMinimum": 5}') + assert schema.exclusiveMinimum == 5 + assert schema.minimum is None + + +def test_exclusive_maximum_as_number(): + schema = Schema.model_validate_json('{"exclusiveMaximum": 50}') + assert schema.exclusiveMaximum == 50 + assert schema.maximum is None + + +def test_exclusive_minimum_as_false_boolean(): + schema = Schema.model_validate_json('{"minimum": 10, "exclusiveMinimum": false}') + assert schema.exclusiveMinimum is None + assert schema.minimum == 10 + + +def test_exclusive_maximum_as_false_boolean(): + schema = Schema.model_validate_json('{"maximum": 100, "exclusiveMaximum": false}') + assert schema.exclusiveMaximum is None + assert schema.maximum == 100 diff --git a/tests/test_templates/test_property_templates/test_date_property/date_property_template.py b/tests/test_templates/test_property_templates/test_date_property/date_property_template.py.jinja similarity index 90% rename from tests/test_templates/test_property_templates/test_date_property/date_property_template.py rename to tests/test_templates/test_property_templates/test_date_property/date_property_template.py.jinja index 3709963fe..66ed7a9fc 100644 --- a/tests/test_templates/test_property_templates/test_date_property/date_property_template.py +++ b/tests/test_templates/test_property_templates/test_date_property/date_property_template.py.jinja @@ -1,5 +1,4 @@ from datetime import date -from typing import cast, Union from dateutil.parser import isoparse {% from "property_templates/date_property.py.jinja" import transform, construct %} diff --git a/tests/test_templates/test_property_templates/test_date_property/required_not_null.py b/tests/test_templates/test_property_templates/test_date_property/required_not_null.py index ad4f380a4..c2a190fab 100644 --- a/tests/test_templates/test_property_templates/test_date_property/required_not_null.py +++ b/tests/test_templates/test_property_templates/test_date_property/required_not_null.py @@ -1,5 +1,4 @@ from datetime import date -from typing import cast, Union from dateutil.parser import isoparse some_source = date(2020, 10, 12) diff --git a/tests/test_templates/test_property_templates/test_date_property/test_date_property.py b/tests/test_templates/test_property_templates/test_date_property/test_date_property.py index 98999b910..89944994c 100644 --- a/tests/test_templates/test_property_templates/test_date_property/test_date_property.py +++ b/tests/test_templates/test_property_templates/test_date_property/test_date_property.py @@ -27,7 +27,7 @@ def test_required(): lstrip_blocks=True ) - template = env.get_template("date_property_template.py") + template = env.get_template("date_property_template.py.jinja") content = template.render(property=prop) expected = here / "required_not_null.py" assert content == expected.read_text() diff --git a/tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py b/tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py.jinja similarity index 90% rename from tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py rename to tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py.jinja index 85fa1548d..c922fa469 100644 --- a/tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py +++ b/tests/test_templates/test_property_templates/test_datetime_property/datetime_property_template.py.jinja @@ -1,5 +1,4 @@ from datetime import date -from typing import cast, Union from dateutil.parser import isoparse {% from "property_templates/datetime_property.py.jinja" import transform, construct %} diff --git a/tests/test_templates/test_property_templates/test_datetime_property/required_not_null.py b/tests/test_templates/test_property_templates/test_datetime_property/required_not_null.py index 8253828e3..ab016a8a1 100644 --- a/tests/test_templates/test_property_templates/test_datetime_property/required_not_null.py +++ b/tests/test_templates/test_property_templates/test_datetime_property/required_not_null.py @@ -1,5 +1,4 @@ from datetime import date -from typing import cast, Union from dateutil.parser import isoparse some_source = date(2020, 10, 12) diff --git a/tests/test_templates/test_property_templates/test_datetime_property/test_datetime_property.py b/tests/test_templates/test_property_templates/test_datetime_property/test_datetime_property.py index 83d91ff3a..bb9a3bd10 100644 --- a/tests/test_templates/test_property_templates/test_datetime_property/test_datetime_property.py +++ b/tests/test_templates/test_property_templates/test_datetime_property/test_datetime_property.py @@ -27,7 +27,7 @@ def test_required(): lstrip_blocks=True ) - template = env.get_template("datetime_property_template.py") + template = env.get_template("datetime_property_template.py.jinja") content = template.render(property=prop) expected = here / "required_not_null.py" assert content == expected.read_text() diff --git a/tests/test_templates/test_property_templates/test_uuid_property/__init__.py b/tests/test_templates/test_property_templates/test_uuid_property/__init__.py new file mode 100644 index 000000000..839bd41fd --- /dev/null +++ b/tests/test_templates/test_property_templates/test_uuid_property/__init__.py @@ -0,0 +1 @@ +"""Tests for UUID property templates.""" diff --git a/tests/test_templates/test_property_templates/test_uuid_property/test_uuid_multipart.py b/tests/test_templates/test_property_templates/test_uuid_property/test_uuid_multipart.py new file mode 100644 index 000000000..fe7cd7f85 --- /dev/null +++ b/tests/test_templates/test_property_templates/test_uuid_property/test_uuid_multipart.py @@ -0,0 +1,70 @@ +"""Tests for UUID property multipart macro functionality.""" + +from pathlib import Path +from typing import Any +from uuid import UUID + +import jinja2 +import pytest + +from openapi_python_client.parser.properties import UuidProperty +from openapi_python_client.utils import PythonIdentifier + + +def uuid_property(required: bool = True, default: Any = None) -> UuidProperty: + """Helper to create a UuidProperty for testing.""" + return UuidProperty( + name="test_uuid", + required=required, + default=default, + python_name=PythonIdentifier(value="test_uuid", prefix=""), + description="A test UUID property", + example="550e8400-e29b-41d4-a716-446655440000", + ) + + +@pytest.fixture +def jinja_env() -> jinja2.Environment: + """Create a Jinja2 environment with the property templates loaded.""" + templates_dir = Path(__file__).parent.parent.parent.parent.parent / "openapi_python_client" / "templates" + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(templates_dir), + trim_blocks=True, + lstrip_blocks=True, + ) + return env + + +def test_multipart_macro_generates_syntactically_correct_code_for_required_uuid(jinja_env: jinja2.Environment) -> None: + """Test that the multipart macro generates syntactically correct Python code for required UUID properties.""" + prop = uuid_property(required=True) + + template = jinja_env.get_template("property_templates/uuid_property.py.jinja") + + # Render the multipart macro + multipart_code = template.module.multipart(prop, "test_uuid", '"test_uuid"') # type: ignore[attr-defined] + + # Verify the generated code is syntactically correct + expected = 'files.append(("test_uuid", (None, str(test_uuid), "text/plain")))' + assert multipart_code.strip() == expected + + # Verify it compiles as valid Python + compile(multipart_code, "", "exec") + + +def test_multipart_macro_generates_syntactically_correct_code_for_optional_uuid(jinja_env: jinja2.Environment) -> None: + """Test that the multipart macro generates syntactically correct Python code for optional UUID properties.""" + prop = uuid_property(required=False) + + template = jinja_env.get_template("property_templates/uuid_property.py.jinja") + + # Render the multipart macro + multipart_code = template.module.multipart(prop, "test_uuid", '"test_uuid"') # type: ignore[attr-defined] + + # Verify the generated code is syntactically correct + expected = 'files.append(("test_uuid", (None, str(test_uuid), "text/plain")))' + assert multipart_code.strip() == expected + + # Verify it compiles as valid Python + compile(multipart_code, "", "exec") + \ No newline at end of file diff --git a/tests/test_utils.py b/tests/test_utils.py index 3cd213488..fafa61805 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -99,7 +99,7 @@ def test_no_string_escapes(): ("int", "int_"), ("dict", "dict_"), ("not_reserved", "not_reserved"), - ("type", "type"), + ("type", "type_"), ("id", "id"), ("None", "None_"), ], @@ -132,5 +132,5 @@ def test_pascalcase(before, after): pytest.param("application/vnd.api+json;charset=utf-8", "application/vnd.api+json"), ], ) -def test_get_content_type(content_type: str, expected: str) -> None: - assert utils.get_content_type(content_type) == expected +def test_get_content_type(content_type: str, expected: str, config) -> None: + assert utils.get_content_type(content_type, config) == expected