pax_global_header00006660000000000000000000000064145525125020014513gustar00rootroot0000000000000052 comment=3ddb509786089f9df0658303545ae4b66db2d47c pydantic-1.10.14/000077500000000000000000000000001455251250200134725ustar00rootroot00000000000000pydantic-1.10.14/.github/000077500000000000000000000000001455251250200150325ustar00rootroot00000000000000pydantic-1.10.14/.github/FUNDING.yml000066400000000000000000000000251455251250200166440ustar00rootroot00000000000000github: samuelcolvin pydantic-1.10.14/.github/ISSUE_TEMPLATE/000077500000000000000000000000001455251250200172155ustar00rootroot00000000000000pydantic-1.10.14/.github/ISSUE_TEMPLATE/bug.yml000066400000000000000000000075661455251250200205330ustar00rootroot00000000000000name: 🐛 Bug description: Report a bug or unexpected behavior in pydantic labels: [bug, unconfirmed] body: - type: markdown attributes: value: Thank you for contributing to pydantic! ✊ - type: checkboxes id: checks attributes: label: Initial Checks description: | Just a few checks to make sure you need to create a bug report. _Sorry to sound so draconian 👿; but every second spent replying to issues is time not spent improving pydantic 🙇._ options: - label: I have searched GitHub for a duplicate issue and I'm sure this is something new required: true - label: I have searched Google & StackOverflow for a solution and couldn't find anything required: true - label: I have read and followed [the docs](https://pydantic-docs.helpmanual.io) and still think this is a bug required: true - label: > I am confident that the issue is with pydantic (not my code, or another library in the ecosystem like [FastAPI](https://fastapi.tiangolo.com) or [mypy](https://mypy.readthedocs.io/en/stable)) required: true - type: textarea id: description attributes: label: Description description: | Please explain what you're seeing and what you would expect to see. Please provide as much detail as possible to make understanding and solving your problem as quick as possible. 🙏 validations: required: true - type: textarea id: example attributes: label: Example Code description: > If applicable, please add a self-contained, [minimal, reproducible, example](https://stackoverflow.com/help/minimal-reproducible-example) demonstrating the bug. placeholder: | import pydantic ... render: Python - type: textarea id: version attributes: label: Python, Pydantic & OS Version description: | Which version of Python & Pydantic are you using, and which Operating System? Please run the following command and copy the output below: ```bash python -c "import pydantic.utils; print(pydantic.utils.version_info())" ``` render: Text validations: required: true - type: checkboxes id: affected-components attributes: label: Affected Components description: Which of the following parts of pydantic does this bug affect? # keep this lis in sync with feature_request.yml options: - label: '[Compatibility between releases](https://pydantic-docs.helpmanual.io/changelog/)' - label: '[Data validation/parsing](https://pydantic-docs.helpmanual.io/usage/models/#basic-model-usage)' - label: '[Data serialization](https://pydantic-docs.helpmanual.io/usage/exporting_models/) - `.dict()` and `.json()`' - label: '[JSON Schema](https://pydantic-docs.helpmanual.io/usage/schema/)' - label: '[Dataclasses](https://pydantic-docs.helpmanual.io/usage/dataclasses/)' - label: '[Model Config](https://pydantic-docs.helpmanual.io/usage/model_config/)' - label: '[Field Types](https://pydantic-docs.helpmanual.io/usage/types/) - adding or changing a particular data type' - label: '[Function validation decorator](https://pydantic-docs.helpmanual.io/usage/validation_decorator/)' - label: '[Generic Models](https://pydantic-docs.helpmanual.io/usage/models/#generic-models)' - label: '[Other Model behaviour](https://pydantic-docs.helpmanual.io/usage/models/) - `construct()`, pickling, private attributes, ORM mode' - label: '[Settings Management](https://pydantic-docs.helpmanual.io/usage/settings/)' - label: '[Plugins](https://pydantic-docs.helpmanual.io/) and integration with other tools - mypy, FastAPI, python-devtools, Hypothesis, VS Code, PyCharm, etc.' pydantic-1.10.14/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000003471455251250200212110ustar00rootroot00000000000000blank_issues_enabled: true contact_links: - name: 🤔 Ask a Question url: 'https://github.com/pydantic/pydantic/discussions/new?category=question' about: Ask a question about how to use pydantic using github discussions pydantic-1.10.14/.github/ISSUE_TEMPLATE/feature_request.yml000066400000000000000000000055601455251250200231510ustar00rootroot00000000000000name: 🚀 Feature request description: Suggest a new feature or change to pydantic labels: [feature request] body: - type: markdown attributes: value: Thank you for contributing to pydantic! ✊ - type: checkboxes id: searched attributes: label: Initial Checks description: | Just a few checks to make sure you need to create a feature request. _Sorry to sound so draconian 👿; but every second spent replying to issues is time not spent improving pydantic 🙇._ options: - label: I have searched Google & GitHub for similar requests and couldn't find anything required: true - label: I have read and followed [the docs](https://pydantic-docs.helpmanual.io) and still think this feature is missing required: true - type: textarea id: description attributes: label: Description description: | Please give as much detail as possible about the feature you would like to suggest. 🙏 You might like to add: * A demo of how code might look when using the feature * Your use case(s) for the feature * Why the feature should be added to pydantic (as opposed to another library or just implemented in your code) validations: required: true - type: checkboxes id: affected-components attributes: label: Affected Components description: Which of the following parts of pydantic does this feature affect? # keep this lis in sync with bug.yml options: - label: '[Compatibility between releases](https://pydantic-docs.helpmanual.io/changelog/)' - label: '[Data validation/parsing](https://pydantic-docs.helpmanual.io/usage/models/#basic-model-usage)' - label: '[Data serialization](https://pydantic-docs.helpmanual.io/usage/exporting_models/) - `.dict()` and `.json()`' - label: '[JSON Schema](https://pydantic-docs.helpmanual.io/usage/schema/)' - label: '[Dataclasses](https://pydantic-docs.helpmanual.io/usage/dataclasses/)' - label: '[Model Config](https://pydantic-docs.helpmanual.io/usage/model_config/)' - label: '[Field Types](https://pydantic-docs.helpmanual.io/usage/types/) - adding or changing a particular data type' - label: '[Function validation decorator](https://pydantic-docs.helpmanual.io/usage/validation_decorator/)' - label: '[Generic Models](https://pydantic-docs.helpmanual.io/usage/models/#generic-models)' - label: '[Other Model behaviour](https://pydantic-docs.helpmanual.io/usage/models/) - `construct()`, pickling, private attributes, ORM mode' - label: '[Settings Management](https://pydantic-docs.helpmanual.io/usage/settings/)' - label: '[Plugins](https://pydantic-docs.helpmanual.io/) and integration with other tools - mypy, FastAPI, python-devtools, Hypothesis, VS Code, PyCharm, etc.' pydantic-1.10.14/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000023301455251250200206310ustar00rootroot00000000000000 ## Change Summary ## Related issue number ## Checklist * [ ] Unit tests for the changes exist * [ ] Tests pass on CI and coverage remains at 100% * [ ] Documentation reflects the changes where applicable * [ ] `changes/-.md` file added describing change (see [changes/README.md](https://github.com/pydantic/pydantic/blob/main/changes/README.md) for details. You can [skip this check](https://github.com/pydantic/hooky#change-file-checks) if the change does not need a change file.) * [ ] My PR is ready to review, **please add a comment including the phrase "please review" to assign reviewers** pydantic-1.10.14/.github/SECURITY.md000066400000000000000000000012551455251250200166260ustar00rootroot00000000000000# Security Policy ## Supported Versions We will endeavour to support: * the most recent minor release with bug fixes * the latest minor release from the last major version for 6 months after a new major version is released with critical bug fixes * all versions if a security vulnerability is found provided 1) upgrading to a later version is non-trivial 2) sufficient people are using that version to make support worthwhile ## Reporting a Vulnerability If you find what you think might be a security vulnerability with pydantic, please do not create an issue on github. Instead please email s@muelcolvin.com I'll reply to your email promptly and try to get a patch out ASAP. pydantic-1.10.14/.github/dependabot.yml000066400000000000000000000002651455251250200176650ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: / schedule: interval: monthly - package-ecosystem: github-actions directory: / schedule: interval: monthly pydantic-1.10.14/.github/workflows/000077500000000000000000000000001455251250200170675ustar00rootroot00000000000000pydantic-1.10.14/.github/workflows/ci.yml000066400000000000000000000311331455251250200202060ustar00rootroot00000000000000name: CI on: push: branches: - main - 1.10.X-fixes tags: - '**' pull_request: {} jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: 3.9 - uses: actions/cache@v3 id: cache with: path: | ${{ env.pythonLocation }} .mypy_cache key: > lint-v2 ${{ runner.os }} ${{ env.pythonLocation }} ${{ hashFiles('tests/requirements-linting.txt') }} - name: install if: steps.cache.outputs.cache-hit != 'true' run: | make install-linting pip freeze - name: lint run: make lint - name: pyupgrade run: make pyupgrade - name: mypy run: make mypy - name: make history run: python3 ./changes/make_history.py - name: check dist run: make check-dist - name: install node for pyright uses: actions/setup-node@v3 with: node-version: '14' - run: npm install -g pyright - run: make pyright docs-build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.10' - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} key: > docs-build-v2 ${{ runner.os }} ${{ env.pythonLocation }} ${{ hashFiles('setup.py') }} ${{ hashFiles('requirements.txt') }} ${{ hashFiles('docs/requirements.txt') }} - name: install if: steps.cache.outputs.cache-hit != 'true' run: make install-docs - name: build site run: make docs - name: Store docs site uses: actions/upload-artifact@v3 with: name: docs path: site test-memray: name: test memray runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.10' - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} key: > test-memray ${{ runner.os }} ${{ env.pythonLocation }} ${{ hashFiles('setup.py') }} ${{ hashFiles('requirements.txt') }} ${{ hashFiles('tests/requirements-testing.txt') }} - name: install run: | make install-testing pip install pytest-memray==1.4.0 - name: compile run: | make build-trace python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(0 if pydantic.compiled else 1)" - name: test run: pytest --ignore=tests/mypy/ --memray test-linux-compiled: name: test py${{ matrix.python-version }} on linux compiled runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] env: PYTHON: ${{ matrix.python-version }} OS: ubuntu steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} key: > test-linux-compiled-v2 ${{ runner.os }} ${{ env.pythonLocation }} ${{ hashFiles('setup.py') }} ${{ hashFiles('requirements.txt') }} ${{ hashFiles('tests/requirements-testing.txt') }} - name: install run: make install-testing - name: compile run: | make build-trace python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(0 if pydantic.compiled else 1)" ls -alh ls -alh pydantic/ - run: mkdir coverage - name: test run: make test env: COVERAGE_FILE: coverage/.coverage.linux-py${{ matrix.python-version }}-compiled CONTEXT: linux-py${{ matrix.python-version }}-compiled - name: store coverage files uses: actions/upload-artifact@v3 with: name: coverage path: coverage test-not-compiled: name: test py${{ matrix.python-version }} on ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu, macos, windows] python-version: ['3.7', '3.8', '3.9', '3.10'] include: - os: ubuntu env: PYTHON: ${{ matrix.python-version }} OS: ${{ matrix.os }} COMPILED: no DEPS: yes runs-on: ${{ matrix.os }}-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: actions/cache@v3 id: cache with: path: ${{ env.pythonLocation }} key: > test-not-compiled-v2 ${{ runner.os }} ${{ env.pythonLocation }} ${{ hashFiles('setup.py') }} ${{ hashFiles('requirements.txt') }} ${{ hashFiles('tests/requirements-testing.txt') }} - name: install run: make install-testing - run: pip freeze - run: mkdir coverage - name: test with deps run: make test env: COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-with-deps CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-with-deps - name: uninstall deps run: pip uninstall -y cython email-validator devtools python-dotenv - name: test without deps run: make test env: COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-without-deps CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-without-deps - name: store coverage files uses: actions/upload-artifact@v3 with: name: coverage path: coverage test-old-mypy: name: test mypy v${{ matrix.mypy-version }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: mypy-version: ['0.910', '0.921', '0.931', '0.942', '0.950', '0.960'] steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.10' - name: install run: | make install-testing pip freeze - name: uninstall deps run: pip uninstall -y mypy tomli toml - name: install specific mypy version run: pip install mypy==${{ matrix.mypy-version }} - run: mkdir coverage - name: run tests run: pytest --cov=pydantic tests/mypy env: COVERAGE_FILE: coverage/.coverage.linux-py3.10-mypy${{ matrix.mypy-version }} CONTEXT: linux-py3.10-mypy${{ matrix.mypy-version }} - name: store coverage files uses: actions/upload-artifact@v3 with: name: coverage path: coverage coverage-combine: needs: [test-linux-compiled, test-not-compiled, test-old-mypy] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: '3.8' - name: get coverage files uses: actions/download-artifact@v3 with: name: coverage path: coverage - run: pip install coverage - run: ls -la coverage - run: coverage combine coverage - run: coverage report - run: coverage html --show-contexts --title "pydantic coverage for ${{ github.sha }}" - name: Store coverage html uses: actions/upload-artifact@v3 with: name: coverage-html path: htmlcov test-fastapi: name: test fastAPI runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.10' - name: install run: make install-testing - name: test run: make test-fastapi build: name: build py3.${{ matrix.python-version }} on ${{ matrix.platform || matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu , macos , windows] python-version: ['7', '8', '9', '10', '11'] include: - os: ubuntu platform: linux - os: windows ls: dir runs-on: ${{ matrix.os }}-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.8' - name: install run: pip install -U twine setuptools wheel cibuildwheel - name: build sdist if: matrix.os == 'ubuntu' && matrix.python-version == '9' run: python setup.py sdist bdist_wheel env: SKIP_CYTHON: 1 - name: build ${{ matrix.platform || matrix.os }} binaries run: cibuildwheel --output-dir dist env: PIP: 'pip' CIBW_BUILD: 'cp3${{ matrix.python-version }}-*' CIBW_SKIP: '*-win32' CIBW_PLATFORM: '${{ matrix.platform || matrix.os }}' CIBW_BEFORE_BUILD: 'pip install -U "cython<3"' CIBW_TEST_REQUIRES: 'pytest==6.2.5 pytest-mock==3.6.1' CIBW_TEST_COMMAND: 'pytest {project}/tests' CIBW_MANYLINUX_X86_64_IMAGE: 'manylinux2014' CIBW_MANYLINUX_I686_IMAGE: 'manylinux2014' CIBW_ARCHS_MACOS: 'x86_64 arm64' CIBW_TEST_SKIP: '*-macosx_arm64' # see https://cibuildwheel.readthedocs.io/en/stable/faq/#universal2 # TODO build windows 32bit binaries - name: list dist files run: | ${{ matrix.ls || 'ls -lh' }} dist/ twine check dist/* - name: Store dist artifacts uses: actions/upload-artifact@v3 with: name: pypi_files path: dist # https://github.com/marketplace/actions/alls-green#why check: # This job does nothing and is only used for the branch protection if: always() needs: - lint - docs-build - test-linux-compiled - test-not-compiled - test-old-mypy - test-fastapi - build runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} inspect-pypi-assets: needs: [build] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: get dist artifacts uses: actions/download-artifact@v3 with: name: pypi_files path: dist - name: list dist files run: | ls -lh dist/ echo "`ls dist | wc -l` files" - name: extract and list sdist file run: | mkdir sdist-files tar -xvf dist/*.tar.gz -C sdist-files tree -a sdist-files - name: extract and list wheel file run: | ls dist/*cp310-manylinux*x86_64.whl | head -n 1 python -m zipfile --list `ls dist/*cp310-manylinux*x86_64.whl | head -n 1` publish_docs: needs: [check] if: "success() && startsWith(github.ref, 'refs/tags/')" runs-on: ubuntu-latest steps: - name: checkout docs-site uses: actions/checkout@v3 with: ref: docs-site - name: checkout current branch uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: '3.10' - name: install run: make install-docs - name: Set git credentials run: | git config --global user.name "${{ github.actor }}" git config --global user.email "${{ github.actor }}@users.noreply.github.com" - run: make docs - name: check version id: check-version uses: samuelcolvin/check-python-version@v3.2 with: version_file_path: 'pydantic/version.py' - run: mike deploy -b docs-site ${{ steps.check-version.outputs.VERSION_MAJOR_MINOR }} --update-aliases --push if: "!fromJSON(steps.check-version.outputs.IS_PRERELEASE)" release: needs: [check] if: "success() && startsWith(github.ref, 'refs/tags/')" runs-on: ubuntu-latest environment: release permissions: id-token: write steps: - uses: actions/checkout@v3 - name: get dist artifacts uses: actions/download-artifact@v3 with: name: pypi_files path: dist - name: check version uses: samuelcolvin/check-python-version@v3.2 with: version_file_path: pydantic/version.py - name: Upload package to PyPI uses: pypa/gh-action-pypi-publish@release/v1 pydantic-1.10.14/.github/workflows/combine-dependabot.yml000066400000000000000000000124141455251250200233330ustar00rootroot00000000000000# from https://github.com/hrvey/combine-prs-workflow/blob/master/combine-prs.yml name: 'Combine Dependabot PRs' on: workflow_dispatch: inputs: branchPrefix: description: 'Branch prefix to find combinable PRs based on' required: true default: 'dependabot/' mustBeGreen: description: 'Only combine PRs that are green' required: true default: true combineBranchName: description: 'Name of the branch to combine PRs into' required: true default: 'combine-dependabot-bumps' ignoreLabel: description: 'Exclude PRs with this label' required: true default: 'nocombine' jobs: combine-prs: runs-on: ubuntu-latest steps: - uses: actions/github-script@v6 id: fetch-branch-names name: Fetch branch names with: github-token: ${{secrets.GITHUB_TOKEN}} script: | const pulls = await github.paginate('GET /repos/:owner/:repo/pulls', { owner: context.repo.owner, repo: context.repo.repo }); branches = []; prs = []; base_branch = null; for (const pull of pulls) { const branch = pull['head']['ref']; console.log('Pull for branch: ' + branch); if (branch.startsWith('${{ github.event.inputs.branchPrefix }}')) { console.log('Branch matched: ' + branch); statusOK = true; if(${{ github.event.inputs.mustBeGreen }}) { console.log('Checking green status: ' + branch); const statuses = await github.paginate('GET /repos/{owner}/{repo}/commits/{ref}/status', { owner: context.repo.owner, repo: context.repo.repo, ref: branch }); if(statuses.length > 0) { const latest_status = statuses[0]['state']; console.log('Validating status: ' + latest_status); if(latest_status != 'success') { console.log('Discarding ' + branch + ' with status ' + latest_status); statusOK = false; } } } console.log('Checking labels: ' + branch); const labels = pull['labels']; for(const label of labels) { const labelName = label['name']; console.log('Checking label: ' + labelName); if(labelName == '${{ github.event.inputs.ignoreLabel }}') { console.log('Discarding ' + branch + ' with label ' + labelName); statusOK = false; } } if (statusOK) { console.log('Adding branch to array: ' + branch); branches.push(branch); prs.push('#' + pull['number'] + ' ' + pull['title']); base_branch = pull['base']['ref']; } } } if (branches.length == 0) { core.setFailed('No PRs/branches matched criteria'); return; } core.setOutput('base-branch', base_branch); core.setOutput('prs-string', prs.join('\n')); combined = branches.join(' ') console.log('Combined: ' + combined); return combined - uses: actions/checkout@v3 with: fetch-depth: 0 # Creates a branch with other PR branches merged together - name: Created combined branch env: BASE_BRANCH: ${{ steps.fetch-branch-names.outputs.base-branch }} BRANCHES_TO_COMBINE: ${{ steps.fetch-branch-names.outputs.result }} COMBINE_BRANCH_NAME: ${{ github.event.inputs.combineBranchName }} run: | echo "$BRANCHES_TO_COMBINE" sourcebranches="${BRANCHES_TO_COMBINE%\"}" sourcebranches="${sourcebranches#\"}" basebranch="${BASE_BRANCH%\"}" basebranch="${basebranch#\"}" git config pull.rebase false git config user.name github-actions git config user.email github-actions@github.com git branch $COMBINE_BRANCH_NAME $basebranch git checkout $COMBINE_BRANCH_NAME git pull origin $sourcebranches --no-edit git push origin $COMBINE_BRANCH_NAME # Creates a PR with the new combined branch - uses: actions/github-script@v6 name: Create Combined Pull Request env: PRS_STRING: ${{ steps.fetch-branch-names.outputs.prs-string }} with: github-token: ${{secrets.GITHUB_TOKEN}} script: | const prString = process.env.PRS_STRING; const body = 'This PR was created by the Combine PRs action by combining the following PRs:\n' + prString; await github.pulls.create({ owner: context.repo.owner, repo: context.repo.repo, title: 'Combined Dependabot Bumps', head: '${{ github.event.inputs.combineBranchName }}', base: '${{ steps.fetch-branch-names.outputs.base-branch }}', body: body }); pydantic-1.10.14/.github/workflows/dependencies-check.yml000066400000000000000000000023311455251250200233120ustar00rootroot00000000000000name: Dependencies Check on: # can't schedule here since scheduled jobs always run on main workflow_dispatch: {} jobs: find_dependency_cases: runs-on: ubuntu-latest outputs: PYTHON_DEPENDENCY_CASES: ${{ steps.list-python-dependencies.outputs.PYTHON_DEPENDENCY_CASES }} steps: - uses: actions/checkout@v3 - uses: samuelcolvin/list-python-dependencies@main id: list-python-dependencies with: mode: first-last test: name: test py${{ matrix.python-version }} on ${{ matrix.PYTHON_DEPENDENCY_CASE }} needs: - find_dependency_cases strategy: fail-fast: true matrix: python-version: ['3.7', '3.11'] PYTHON_DEPENDENCY_CASE: ${{ fromJSON(needs.find_dependency_cases.outputs.PYTHON_DEPENDENCY_CASES) }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: set up python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - run: make install-testing - name: uninstall optional deps run: pip uninstall -y email-validator devtools python-dotenv - run: pip install ${{ matrix.PYTHON_DEPENDENCY_CASE }} - run: pip freeze - run: make test pydantic-1.10.14/.github/workflows/upload-previews.yml000066400000000000000000000024461455251250200227460ustar00rootroot00000000000000name: Upload Previews on: workflow_run: workflows: [CI] types: [completed] permissions: statuses: write jobs: upload-previews: if: ${{ github.event.workflow_run.conclusion == 'success' }} runs-on: ubuntu-latest steps: - uses: actions/setup-python@v4 with: python-version: '3.9' - run: pip install smokeshow - uses: dawidd6/action-download-artifact@v2 with: workflow: ci.yml commit: ${{ github.event.workflow_run.head_sha }} - run: smokeshow upload coverage-html env: SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Coverage {coverage-percentage} SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 100 SMOKESHOW_GITHUB_CONTEXT: coverage SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.workflow_run.head_sha }} SMOKESHOW_AUTH_KEY: ${{ secrets.SMOKESHOW_AUTH_KEY }} - run: smokeshow upload docs env: SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Docs Preview SMOKESHOW_GITHUB_CONTEXT: docs SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.workflow_run.head_sha }} SMOKESHOW_AUTH_KEY: ${{ secrets.SMOKESHOW_AUTH_KEY }} pydantic-1.10.14/.gitignore000066400000000000000000000007331455251250200154650ustar00rootroot00000000000000.idea/ env/ venv/ .venv/ env3*/ Pipfile *.lock *.py[cod] *.egg-info/ .python-version /build/ dist/ .cache/ .mypy_cache/ test.py .coverage .hypothesis /htmlcov/ /benchmarks/*.json /docs/.changelog.md /docs/.version.md /docs/.tmp_schema_mappings.html /docs/.tmp_examples/ /docs/.tmp-projections/ /docs/usage/.tmp-projections/ /site/ /site.zip .pytest_cache/ .vscode/ _build/ pydantic/*.c pydantic/*.so .auto-format /sandbox/ /.ghtopdep_cache/ /fastapi/ /codecov.sh /worktrees/ pydantic-1.10.14/.hooky.toml000066400000000000000000000002201455251250200155700ustar00rootroot00000000000000# configuring https://github.com/pydantic/hooky [tool.hooky] reviewers = ['samuelcolvin', 'PrettyWood', 'hramezani'] require_change_file = true pydantic-1.10.14/.pre-commit-config.yaml000066400000000000000000000007161455251250200177570ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: - id: check-yaml args: ['--unsafe'] - id: end-of-file-fixer - repo: local hooks: - id: lint name: Lint entry: make lint types: [python] language: system - id: mypy name: Mypy entry: make mypy types: [python] language: system - id: pyupgrade name: Pyupgrade entry: make pyupgrade types: [python] language: system pydantic-1.10.14/HISTORY.md000066400000000000000000002404611455251250200151640ustar00rootroot00000000000000## v1.10.14 (2024-01-19) * Update install.md by @dmontagu in https://github.com/pydantic/pydantic/pull/7690 * Fixes ci to only deploy docs on release by @sydney-runkle in https://github.com/pydantic/pydantic/pull/7740 * Ubuntu fixes for V1 by @sydney-runkle in https://github.com/pydantic/pydantic/pull/8540 and https://github.com/pydantic/pydantic/pull/8587 * Fix cached_property handling in dataclasses when copied by @rdbisme in https://github.com/pydantic/pydantic/pull/8407 ## v1.10.13 (2023-09-27) * Fix: Add max length check to `pydantic.validate_email`, #7673 by @hramezani * Docs: Fix pip commands to install v1, #6930 by @chbndrhnns ## v1.10.12 (2023-07-24) * Fixes the `maxlen` property being dropped on `deque` validation. Happened only if the deque item has been typed. Changes the `_validate_sequence_like` func, #6581 by @maciekglowka ## v1.10.11 (2023-07-04) * Importing create_model in tools.py through relative path instead of absolute path - so that it doesn't import V2 code when copied over to V2 branch, #6361 by @SharathHuddar ## v2.0b3 (2023-06-16) Third beta pre-release of Pydantic V2 See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b3) ## v2.0b2 (2023-06-03) Add `from_attributes` runtime flag to `TypeAdapter.validate_python` and `BaseModel.model_validate`. See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b2) ## v2.0b1 (2023-06-01) First beta pre-release of Pydantic V2 See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0b1) ## v2.0a4 (2023-05-05) Fourth pre-release of Pydantic V2 See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a4) ## v2.0a3 (2023-04-20) Third pre-release of Pydantic V2 See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a3) ## v2.0a2 (2023-04-12) Second pre-release of Pydantic V2 See the full changelog [here](https://github.com/pydantic/pydantic/releases/tag/v2.0a2) ## v2.0a1 (2023-04-03) First pre-release of Pydantic V2! See [this post](https://docs.pydantic.dev/blog/pydantic-v2-alpha/) for more details. ## v1.10.10 (2023-06-30) * Add Pydantic `Json` field support to settings management, #6250 by @hramezani * Fixed literal validator errors for unhashable values, #6188 by @markus1978 * Fixed bug with generics receiving forward refs, #6130 by @mark-todd * Update install method of FastAPI for internal tests in CI, #6117 by @Kludex ## v1.10.9 (2023-06-07) * Fix trailing zeros not ignored in Decimal validation, #5968 by @hramezani * Fix mypy plugin for v1.4.0, #5928 by @cdce8p * Add future and past date hypothesis strategies, #5850 by @bschoenmaeckers * Discourage usage of Cython 3 with Pydantic 1.x, #5845 by @lig ## v1.10.8 (2023-05-23) * Fix a bug in `Literal` usage with `typing-extension==4.6.0`, #5826 by @hramezani * This solves the (closed) issue #3849 where aliased fields that use discriminated union fail to validate when the data contains the non-aliased field name, #5736 by @benwah * Update email-validator dependency to >=2.0.0post2, #5627 by @adriangb * update `AnyClassMethod` for changes in [python/typeshed#9771](https://github.com/python/typeshed/issues/9771), #5505 by @ITProKyle ## v1.10.7 (2023-03-22) * Fix creating schema from model using `ConstrainedStr` with `regex` as dict key, #5223 by @matejetz * Address bug in mypy plugin caused by explicit_package_bases=True, #5191 by @dmontagu * Add implicit defaults in the mypy plugin for Field with no default argument, #5190 by @dmontagu * Fix schema generated for Enum values used as Literals in discriminated unions, #5188 by @javibookline * Fix mypy failures caused by the pydantic mypy plugin when users define `from_orm` in their own classes, #5187 by @dmontagu * Fix `InitVar` usage with pydantic dataclasses, mypy version `1.1.1` and the custom mypy plugin, #5162 by @cdce8p ## v1.10.6 (2023-03-08) * Implement logic to support creating validators from non standard callables by using defaults to identify them and unwrapping `functools.partial` and `functools.partialmethod` when checking the signature, #5126 by @JensHeinrich * Fix mypy plugin for v1.1.1, and fix `dataclass_transform` decorator for pydantic dataclasses, #5111 by @cdce8p * Raise `ValidationError`, not `ConfigError`, when a discriminator value is unhashable, #4773 by @kurtmckee ## v1.10.5 (2023-02-15) * Fix broken parametrized bases handling with `GenericModel`s with complex sets of models, #5052 by @MarkusSintonen * Invalidate mypy cache if plugin config changes, #5007 by @cdce8p * Fix `RecursionError` when deep-copying dataclass types wrapped by pydantic, #4949 by @mbillingr * Fix `X | Y` union syntax breaking `GenericModel`, #4146 by @thenx * Switch coverage badge to show coverage for this branch/release, #5060 by @samuelcolvin ## v1.10.4 (2022-12-30) * Change dependency to `typing-extensions>=4.2.0`, #4885 by @samuelcolvin ## v1.10.3 (2022-12-29) **NOTE: v1.10.3 was ["yanked"](https://pypi.org/help/#yanked) from PyPI due to #4885 which is fixed in v1.10.4** * fix parsing of custom root models, #4883 by @gou177 * fix: use dataclass proxy for frozen or empty dataclasses, #4878 by @PrettyWood * Fix `schema` and `schema_json` on models where a model instance is a one of default values, #4781 by @Bobronium * Add Jina AI to sponsors on docs index page, #4767 by @samuelcolvin * fix: support assignment on `DataclassProxy`, #4695 by @PrettyWood * Add `postgresql+psycopg` as allowed scheme for `PostgreDsn` to make it usable with SQLAlchemy 2, #4689 by @morian * Allow dict schemas to have both `patternProperties` and `additionalProperties`, #4641 by @jparise * Fixes error passing None for optional lists with `unique_items`, #4568 by @mfulgo * Fix `GenericModel` with `Callable` param raising a `TypeError`, #4551 by @mfulgo * Fix field regex with `StrictStr` type annotation, #4538 by @sisp * Correct `dataclass_transform` keyword argument name from `field_descriptors` to `field_specifiers`, #4500 by @samuelcolvin * fix: avoid multiple calls of `__post_init__` when dataclasses are inherited, #4487 by @PrettyWood * Reduce the size of binary wheels, #2276 by @samuelcolvin ## v1.10.2 (2022-09-05) * **Revert Change:** Revert percent encoding of URL parts which was originally added in #4224, #4470 by @samuelcolvin * Prevent long (length > `4_300`) strings/bytes as input to int fields, see [python/cpython#95778](https://github.com/python/cpython/issues/95778) and [CVE-2020-10735](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10735), #1477 by @samuelcolvin * fix: dataclass wrapper was not always called, #4477 by @PrettyWood * Use `tomllib` on Python 3.11 when parsing `mypy` configuration, #4476 by @hauntsaninja * Basic fix of `GenericModel` cache to detect order of arguments in `Union` models, #4474 by @sveinugu * Fix mypy plugin when using bare types like `list` and `dict` as `default_factory`, #4457 by @samuelcolvin ## v1.10.1 (2022-08-31) * Add `__hash__` method to `pydancic.color.Color` class, #4454 by @czaki ## v1.10.0 (2022-08-30) * Refactor the whole _pydantic_ `dataclass` decorator to really act like its standard lib equivalent. It hence keeps `__eq__`, `__hash__`, ... and makes comparison with its non-validated version possible. It also fixes usage of `frozen` dataclasses in fields and usage of `default_factory` in nested dataclasses. The support of `Config.extra` has been added. Finally, config customization directly via a `dict` is now possible, #2557 by @PrettyWood

**BREAKING CHANGES:** - The `compiled` boolean (whether _pydantic_ is compiled with cython) has been moved from `main.py` to `version.py` - Now that `Config.extra` is supported, `dataclass` ignores by default extra arguments (like `BaseModel`) * Fix PEP487 `__set_name__` protocol in `BaseModel` for PrivateAttrs, #4407 by @tlambert03 * Allow for custom parsing of environment variables via `parse_env_var` in `Config`, #4406 by @acmiyaguchi * Rename `master` to `main`, #4405 by @hramezani * Fix `StrictStr` does not raise `ValidationError` when `max_length` is present in `Field`, #4388 by @hramezani * Make `SecretStr` and `SecretBytes` hashable, #4387 by @chbndrhnns * Fix `StrictBytes` does not raise `ValidationError` when `max_length` is present in `Field`, #4380 by @JeanArhancet * Add support for bare `type`, #4375 by @hramezani * Support Python 3.11, including binaries for 3.11 in PyPI, #4374 by @samuelcolvin * Add support for `re.Pattern`, #4366 by @hramezani * Fix `__post_init_post_parse__` is incorrectly passed keyword arguments when no `__post_init__` is defined, #4361 by @hramezani * Fix implicitly importing `ForwardRef` and `Callable` from `pydantic.typing` instead of `typing` and also expose `MappingIntStrAny`, #4358 by @aminalaee * remove `Any` types from the `dataclass` decorator so it can be used with the `disallow_any_expr` mypy option, #4356 by @DetachHead * moved repo to `pydantic/pydantic`, #4348 by @yezz123 * fix "extra fields not permitted" error when dataclass with `Extra.forbid` is validated multiple times, #4343 by @detachhead * Add Python 3.9 and 3.10 examples to docs, #4339 by @Bobronium * Discriminated union models now use `oneOf` instead of `anyOf` when generating OpenAPI schema definitions, #4335 by @MaxwellPayne * Allow type checkers to infer inner type of `Json` type. `Json[list[str]]` will be now inferred as `list[str]`, `Json[Any]` should be used instead of plain `Json`. Runtime behaviour is not changed, #4332 by @Bobronium * Allow empty string aliases by using a `alias is not None` check, rather than `bool(alias)`, #4253 by @sergeytsaplin * Update `ForwardRef`s in `Field.outer_type_`, #4249 by @JacobHayes * The use of `__dataclass_transform__` has been replaced by `typing_extensions.dataclass_transform`, which is the preferred way to mark pydantic models as a dataclass under [PEP 681](https://peps.python.org/pep-0681/), #4241 by @multimeric * Use parent model's `Config` when validating nested `NamedTuple` fields, #4219 by @synek * Update `BaseModel.construct` to work with aliased Fields, #4192 by @kylebamos * Catch certain raised errors in `smart_deepcopy` and revert to `deepcopy` if so, #4184 by @coneybeare * Add `Config.anystr_upper` and `to_upper` kwarg to constr and conbytes, #4165 by @satheler * Fix JSON schema for `set` and `frozenset` when they include default values, #4155 by @aminalaee * Teach the mypy plugin that methods decorated by `@validator` are classmethods, #4102 by @DMRobertson * Improve mypy plugin's ability to detect required fields, #4086 by @richardxia * Support fields of type `Type[]` in schema, #4051 by @aminalaee * Add `default` value in JSON Schema when `const=True`, #4031 by @aminalaee * Adds reserved word check to signature generation logic, #4011 by @strue36 * Fix Json strategy failure for the complex nested field, #4005 by @sergiosim * Add JSON-compatible float constraint `allow_inf_nan`, #3994 by @tiangolo * Remove undefined behaviour when `env_prefix` had characters in common with `env_nested_delimiter`, #3975 by @arsenron * Support generics model with `create_model`, #3945 by @hot123s * allow submodels to overwrite extra field info, #3934 by @PrettyWood * Document and test structural pattern matching ([PEP 636](https://peps.python.org/pep-0636/)) on `BaseModel`, #3920 by @irgolic * Fix incorrect deserialization of python timedelta object to ISO 8601 for negative time deltas. Minus was serialized in incorrect place ("P-1DT23H59M59.888735S" instead of correct "-P1DT23H59M59.888735S"), #3899 by @07pepa * Fix validation of discriminated union fields with an alias when passing a model instance, #3846 by @chornsby * Add a CockroachDsn type to validate CockroachDB connection strings. The type supports the following schemes: `cockroachdb`, `cockroachdb+psycopg2` and `cockroachdb+asyncpg`, #3839 by @blubber * Fix MyPy plugin to not override pre-existing `__init__` method in models, #3824 by @patrick91 * Fix mypy version checking, #3783 by @KotlinIsland * support overwriting dunder attributes of `BaseModel` instances, #3777 by @PrettyWood * Added `ConstrainedDate` and `condate`, #3740 by @hottwaj * Support `kw_only` in dataclasses, #3670 by @detachhead * Add comparison method for `Color` class, #3646 by @aminalaee * Drop support for python3.6, associated cleanup, #3605 by @samuelcolvin * created new function `to_lower_camel()` for "non pascal case" camel case, #3463 by @schlerp * Add checks to `default` and `default_factory` arguments in Mypy plugin, #3430 by @klaa97 * fix mangling of `inspect.signature` for `BaseModel`, #3413 by @fix-inspect-signature * Adds the `SecretField` abstract class so that all the current and future secret fields like `SecretStr` and `SecretBytes` will derive from it, #3409 by @expobrain * Support multi hosts validation in `PostgresDsn`, #3337 by @rglsk * Fix parsing of very small numeric timedelta values, #3315 by @samuelcolvin * Update `SecretsSettingsSource` to respect `config.case_sensitive`, #3273 by @JeanArhancet * Add MongoDB network data source name (DSN) schema, #3229 by @snosratiershad * Add support for multiple dotenv files, #3222 by @rekyungmin * Raise an explicit `ConfigError` when multiple fields are incorrectly set for a single validator, #3215 by @SunsetOrange * Allow ellipsis on `Field`s inside `Annotated` for `TypedDicts` required, #3133 by @ezegomez * Catch overflow errors in `int_validator`, #3112 by @ojii * Adds a `__rich_repr__` method to `Representation` class which enables pretty printing with [Rich](https://github.com/willmcgugan/rich), #3099 by @willmcgugan * Add percent encoding in `AnyUrl` and descendent types, #3061 by @FaresAhmedb * `validate_arguments` decorator now supports `alias`, #3019 by @MAD-py * Avoid `__dict__` and `__weakref__` attributes in `AnyUrl` and IP address fields, #2890 by @nuno-andre * Add ability to use `Final` in a field type annotation, #2766 by @uriyyo * Update requirement to `typing_extensions>=4.1.0` to guarantee `dataclass_transform` is available, #4424 by @commonism * Add Explosion and AWS to main sponsors, #4413 by @samuelcolvin * Update documentation for `copy_on_model_validation` to reflect recent changes, #4369 by @samuelcolvin * Runtime warning if `__slots__` is passed to `create_model`, `__slots__` is then ignored, #4432 by @samuelcolvin * Add type hints to `BaseSettings.Config` to avoid mypy errors, also correct mypy version compatibility notice in docs, #4450 by @samuelcolvin ## v1.10.0b1 (2022-08-24) Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0b1) for details. ## v1.10.0a2 (2022-08-24) Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0a2) for details. ## v1.10.0a1 (2022-08-22) Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0a1) for details. ## v1.9.2 (2022-08-11) **Revert Breaking Change**: _v1.9.1_ introduced a breaking change where model fields were deep copied by default, this release reverts the default behaviour to match _v1.9.0_ and before, while also allow deep-copy behaviour via `copy_on_model_validation = 'deep'`. See #4092 for more information. * Allow for shallow copies of model fields, `Config.copy_on_model_validation` is now a str which must be `'none'`, `'deep'`, or `'shallow'` corresponding to not copying, deep copy & shallow copy; default `'shallow'`, #4093 by @timkpaine ## v1.9.1 (2022-05-19) Thank you to pydantic's sponsors: @tiangolo, @stellargraph, @JonasKs, @grillazz, @Mazyod, @kevinalh, @chdsbd, @povilasb, @povilasb, @jina-ai, @mainframeindustries, @robusta-dev, @SendCloud, @rszamszur, @jodal, @hardbyte, @corleyma, @daddycocoaman, @Rehket, @jokull, @reillysiemens, @westonsteimel, @primer-io, @koxudaxi, @browniebroke, @stradivari96, @adriangb, @kamalgill, @jqueguiner, @dev-zero, @datarootsio, @RedCarpetUp for their kind support. * Limit the size of `generics._generic_types_cache` and `generics._assigned_parameters` to avoid unlimited increase in memory usage, #4083 by @samuelcolvin * Add Jupyverse and FPS as Jupyter projects using pydantic, #4082 by @davidbrochart * Speedup `__isinstancecheck__` on pydantic models when the type is not a model, may also avoid memory "leaks", #4081 by @samuelcolvin * Fix in-place modification of `FieldInfo` that caused problems with PEP 593 type aliases, #4067 by @adriangb * Add support for autocomplete in VS Code via `__dataclass_transform__` when using `pydantic.dataclasses.dataclass`, #4006 by @giuliano-oliveira * Remove benchmarks from codebase and docs, #3973 by @samuelcolvin * Typing checking with pyright in CI, improve docs on vscode/pylance/pyright, #3972 by @samuelcolvin * Fix nested Python dataclass schema regression, #3819 by @himbeles * Update documentation about lazy evaluation of sources for Settings, #3806 by @garyd203 * Prevent subclasses of bytes being converted to bytes, #3706 by @samuelcolvin * Fixed "error checking inheritance of" when using PEP585 and PEP604 type hints, #3681 by @aleksul * Allow self referencing `ClassVar`s in models, #3679 by @samuelcolvin * **Breaking Change, see #4106**: Fix issue with self-referencing dataclass, #3675 by @uriyyo * Include non-standard port numbers in rendered URLs, #3652 by @dolfinus * `Config.copy_on_model_validation` does a deep copy and not a shallow one, #3641 by @PrettyWood * fix: clarify that discriminated unions do not support singletons, #3636 by @tommilligan * Add `read_text(encoding='utf-8')` for `setup.py`, #3625 by @hswong3i * Fix JSON Schema generation for Discriminated Unions within lists, #3608 by @samuelcolvin ## v1.9.0 (2021-12-31) Thank you to pydantic's sponsors: @sthagen, @timdrijvers, @toinbis, @koxudaxi, @ginomempin, @primer-io, @and-semakin, @westonsteimel, @reillysiemens, @es3n1n, @jokull, @JonasKs, @Rehket, @corleyma, @daddycocoaman, @hardbyte, @datarootsio, @jodal, @aminalaee, @rafsaf, @jqueguiner, @chdsbd, @kevinalh, @Mazyod, @grillazz, @JonasKs, @simw, @leynier, @xfenix for their kind support. ### Highlights * add Python 3.10 support, #2885 by @PrettyWood * [Discriminated unions](https://docs.pydantic.dev/usage/types/#discriminated-unions-aka-tagged-unions), #619 by @PrettyWood * [`Config.smart_union` for better union logic](https://docs.pydantic.dev/usage/model_config/#smart-union), #2092 by @PrettyWood * Binaries for Macos M1 CPUs, #3498 by @samuelcolvin * Complex types can be set via [nested environment variables](https://docs.pydantic.dev/usage/settings/#parsing-environment-variable-values), e.g. `foo___bar`, #3159 by @Air-Mark * add a dark mode to _pydantic_ documentation, #2913 by @gbdlin * Add support for autocomplete in VS Code via `__dataclass_transform__`, #2721 by @tiangolo * Add "exclude" as a field parameter so that it can be configured using model config, #660 by @daviskirk ### v1.9.0 (2021-12-31) Changes * Apply `update_forward_refs` to `Config.json_encodes` prevent name clashes in types defined via strings, #3583 by @samuelcolvin * Extend pydantic's mypy plugin to support mypy versions `0.910`, `0.920`, `0.921` & `0.930`, #3573 & #3594 by @PrettyWood, @christianbundy, @samuelcolvin ### v1.9.0a2 (2021-12-24) Changes * support generic models with discriminated union, #3551 by @PrettyWood * keep old behaviour of `json()` by default, #3542 by @PrettyWood * Removed typing-only `__root__` attribute from `BaseModel`, #3540 by @layday * Build Python 3.10 wheels, #3539 by @mbachry * Fix display of `extra` fields with model `__repr__`, #3234 by @cocolman * models copied via `Config.copy_on_model_validation` always have all fields, #3201 by @PrettyWood * nested ORM from nested dictionaries, #3182 by @PrettyWood * fix link to discriminated union section by @PrettyWood ### v1.9.0a1 (2021-12-18) Changes * Add support for `Decimal`-specific validation configurations in `Field()`, additionally to using `condecimal()`, to allow better support from editors and tooling, #3507 by @tiangolo * Add `arm64` binaries suitable for MacOS with an M1 CPU to PyPI, #3498 by @samuelcolvin * Fix issue where `None` was considered invalid when using a `Union` type containing `Any` or `object`, #3444 by @tharradine * When generating field schema, pass optional `field` argument (of type `pydantic.fields.ModelField`) to `__modify_schema__()` if present, #3434 by @jasujm * Fix issue when pydantic fail to parse `typing.ClassVar` string type annotation, #3401 by @uriyyo * Mention Python >= 3.9.2 as an alternative to `typing_extensions.TypedDict`, #3374 by @BvB93 * Changed the validator method name in the [Custom Errors example](https://docs.pydantic.dev/usage/models/#custom-errors) to more accurately describe what the validator is doing; changed from `name_must_contain_space` to ` value_must_equal_bar`, #3327 by @michaelrios28 * Add `AmqpDsn` class, #3254 by @kludex * Always use `Enum` value as default in generated JSON schema, #3190 by @joaommartins * Add support for Mypy 0.920, #3175 by @christianbundy * `validate_arguments` now supports `extra` customization (used to always be `Extra.forbid`), #3161 by @PrettyWood * Complex types can be set by nested environment variables, #3159 by @Air-Mark * Fix mypy plugin to collect fields based on `pydantic.utils.is_valid_field` so that it ignores untyped private variables, #3146 by @hi-ogawa * fix `validate_arguments` issue with `Config.validate_all`, #3135 by @PrettyWood * avoid dict coercion when using dict subclasses as field type, #3122 by @PrettyWood * add support for `object` type, #3062 by @PrettyWood * Updates pydantic dataclasses to keep `_special` properties on parent classes, #3043 by @zulrang * Add a `TypedDict` class for error objects, #3038 by @matthewhughes934 * Fix support for using a subclass of an annotation as a default, #3018 by @JacobHayes * make `create_model_from_typeddict` mypy compliant, #3008 by @PrettyWood * Make multiple inheritance work when using `PrivateAttr`, #2989 by @hmvp * Parse environment variables as JSON, if they have a `Union` type with a complex subfield, #2936 by @cbartz * Prevent `StrictStr` permitting `Enum` values where the enum inherits from `str`, #2929 by @samuelcolvin * Make `SecretsSettingsSource` parse values being assigned to fields of complex types when sourced from a secrets file, just as when sourced from environment variables, #2917 by @davidmreed * add a dark mode to _pydantic_ documentation, #2913 by @gbdlin * Make `pydantic-mypy` plugin compatible with `pyproject.toml` configuration, consistent with `mypy` changes. See the [doc](https://docs.pydantic.dev/mypy_plugin/#configuring-the-plugin) for more information, #2908 by @jrwalk * add Python 3.10 support, #2885 by @PrettyWood * Correctly parse generic models with `Json[T]`, #2860 by @geekingfrog * Update contrib docs re: Python version to use for building docs, #2856 by @paxcodes * Clarify documentation about _pydantic_'s support for custom validation and strict type checking, despite _pydantic_ being primarily a parsing library, #2855 by @paxcodes * Fix schema generation for `Deque` fields, #2810 by @sergejkozin * fix an edge case when mixing constraints and `Literal`, #2794 by @PrettyWood * Fix postponed annotation resolution for `NamedTuple` and `TypedDict` when they're used directly as the type of fields within Pydantic models, #2760 by @jameysharp * Fix bug when `mypy` plugin fails on `construct` method call for `BaseSettings` derived classes, #2753 by @uriyyo * Add function overloading for a `pydantic.create_model` function, #2748 by @uriyyo * Fix mypy plugin issue with self field declaration, #2743 by @uriyyo * The colon at the end of the line "The fields which were supplied when user was initialised:" suggests that the code following it is related. Changed it to a period, #2733 by @krisaoe * Renamed variable `schema` to `schema_` to avoid shadowing of global variable name, #2724 by @shahriyarr * Add support for autocomplete in VS Code via `__dataclass_transform__`, #2721 by @tiangolo * add missing type annotations in `BaseConfig` and handle `max_length = 0`, #2719 by @PrettyWood * Change `orm_mode` checking to allow recursive ORM mode parsing with dicts, #2718 by @nuno-andre * Add episode 313 of the *Talk Python To Me* podcast, where Michael Kennedy and Samuel Colvin discuss *pydantic*, to the docs, #2712 by @RatulMaharaj * fix JSON schema generation when a field is of type `NamedTuple` and has a default value, #2707 by @PrettyWood * `Enum` fields now properly support extra kwargs in schema generation, #2697 by @sammchardy * **Breaking Change, see #3780**: Make serialization of referenced pydantic models possible, #2650 by @PrettyWood * Add `uniqueItems` option to `ConstrainedList`, #2618 by @nuno-andre * Try to evaluate forward refs automatically at model creation, #2588 by @uriyyo * Switch docs preview and coverage display to use [smokeshow](https://smokeshow.helpmanual.io/), #2580 by @samuelcolvin * Add `__version__` attribute to pydantic module, #2572 by @paxcodes * Add `postgresql+asyncpg`, `postgresql+pg8000`, `postgresql+psycopg2`, `postgresql+psycopg2cffi`, `postgresql+py-postgresql` and `postgresql+pygresql` schemes for `PostgresDsn`, #2567 by @postgres-asyncpg * Enable the Hypothesis plugin to generate a constrained decimal when the `decimal_places` argument is specified, #2524 by @cwe5590 * Allow `collections.abc.Callable` to be used as type in Python 3.9, #2519 by @daviskirk * Documentation update how to custom compile pydantic when using pip install, small change in `setup.py` to allow for custom CFLAGS when compiling, #2517 by @peterroelants * remove side effect of `default_factory` to run it only once even if `Config.validate_all` is set, #2515 by @PrettyWood * Add lookahead to ip regexes for `AnyUrl` hosts. This allows urls with DNS labels looking like IPs to validate as they are perfectly valid host names, #2512 by @sbv-csis * Set `minItems` and `maxItems` in generated JSON schema for fixed-length tuples, #2497 by @PrettyWood * Add `strict` argument to `conbytes`, #2489 by @koxudaxi * Support user defined generic field types in generic models, #2465 by @daviskirk * Add an example and a short explanation of subclassing `GetterDict` to docs, #2463 by @nuno-andre * add `KafkaDsn` type, `HttpUrl` now has default port 80 for http and 443 for https, #2447 by @MihanixA * Add `PastDate` and `FutureDate` types, #2425 by @Kludex * Support generating schema for `Generic` fields with subtypes, #2375 by @maximberg * fix(encoder): serialize `NameEmail` to str, #2341 by @alecgerona * add `Config.smart_union` to prevent coercion in `Union` if possible, see [the doc](https://docs.pydantic.dev/usage/model_config/#smart-union) for more information, #2092 by @PrettyWood * Add ability to use `typing.Counter` as a model field type, #2060 by @uriyyo * Add parameterised subclasses to `__bases__` when constructing new parameterised classes, so that `A <: B => A[int] <: B[int]`, #2007 by @diabolo-dan * Create `FileUrl` type that allows URLs that conform to [RFC 8089](https://tools.ietf.org/html/rfc8089#section-2). Add `host_required` parameter, which is `True` by default (`AnyUrl` and subclasses), `False` in `RedisDsn`, `FileUrl`, #1983 by @vgerak * add `confrozenset()`, analogous to `conset()` and `conlist()`, #1897 by @PrettyWood * stop calling parent class `root_validator` if overridden, #1895 by @PrettyWood * Add `repr` (defaults to `True`) parameter to `Field`, to hide it from the default representation of the `BaseModel`, #1831 by @fnep * Accept empty query/fragment URL parts, #1807 by @xavier ## v1.8.2 (2021-05-11) !!! warning A security vulnerability, level "moderate" is fixed in v1.8.2. Please upgrade **ASAP**. See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) * **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` (or their negative values) does not cause an infinite loop, see security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) * fix schema generation with Enum by generating a valid name, #2575 by @PrettyWood * fix JSON schema generation with a `Literal` of an enum member, #2536 by @PrettyWood * Fix bug with configurations declarations that are passed as keyword arguments during class creation, #2532 by @uriyyo * Allow passing `json_encoders` in class kwargs, #2521 by @layday * support arbitrary types with custom `__eq__`, #2483 by @PrettyWood * support `Annotated` in `validate_arguments` and in generic models with Python 3.9, #2483 by @PrettyWood ## v1.8.1 (2021-03-03) Bug fixes for regressions and new features from `v1.8` * allow elements of `Config.field` to update elements of a `Field`, #2461 by @samuelcolvin * fix validation with a `BaseModel` field and a custom root type, #2449 by @PrettyWood * expose `Pattern` encoder to `fastapi`, #2444 by @PrettyWood * enable the Hypothesis plugin to generate a constrained float when the `multiple_of` argument is specified, #2442 by @tobi-lipede-oodle * Avoid `RecursionError` when using some types like `Enum` or `Literal` with generic models, #2436 by @PrettyWood * do not overwrite declared `__hash__` in subclasses of a model, #2422 by @PrettyWood * fix `mypy` complaints on `Path` and `UUID` related custom types, #2418 by @PrettyWood * Support properly variable length tuples of compound types, #2416 by @PrettyWood ## v1.8 (2021-02-26) Thank you to pydantic's sponsors: @jorgecarleitao, @BCarley, @chdsbd, @tiangolo, @matin, @linusg, @kevinalh, @koxudaxi, @timdrijvers, @mkeen, @meadsteve, @ginomempin, @primer-io, @and-semakin, @tomthorogood, @AjitZK, @westonsteimel, @Mazyod, @christippett, @CarlosDomingues, @Kludex, @r-m-n for their kind support. ### Highlights * [Hypothesis plugin](https://docs.pydantic.dev/hypothesis_plugin/) for testing, #2097 by @Zac-HD * support for [`NamedTuple` and `TypedDict`](https://docs.pydantic.dev/usage/types/#annotated-types), #2216 by @PrettyWood * Support [`Annotated` hints on model fields](https://docs.pydantic.dev/usage/schema/#typingannotated-fields), #2147 by @JacobHayes * [`frozen` parameter on `Config`](https://docs.pydantic.dev/usage/model_config/) to allow models to be hashed, #1880 by @rhuille ### Changes * **Breaking Change**, remove old deprecation aliases from v1, #2415 by @samuelcolvin: * remove notes on migrating to v1 in docs * remove `Schema` which was replaced by `Field` * remove `Config.case_insensitive` which was replaced by `Config.case_sensitive` (default `False`) * remove `Config.allow_population_by_alias` which was replaced by `Config.allow_population_by_field_name` * remove `model.fields` which was replaced by `model.__fields__` * remove `model.to_string()` which was replaced by `str(model)` * remove `model.__values__` which was replaced by `model.__dict__` * **Breaking Change:** always validate only first sublevel items with `each_item`. There were indeed some edge cases with some compound types where the validated items were the last sublevel ones, #1933 by @PrettyWood * Update docs extensions to fix local syntax highlighting, #2400 by @daviskirk * fix: allow `utils.lenient_issubclass` to handle `typing.GenericAlias` objects like `list[str]` in Python >= 3.9, #2399 by @daviskirk * Improve field declaration for _pydantic_ `dataclass` by allowing the usage of _pydantic_ `Field` or `'metadata'` kwarg of `dataclasses.field`, #2384 by @PrettyWood * Making `typing-extensions` a required dependency, #2368 by @samuelcolvin * Make `resolve_annotations` more lenient, allowing for missing modules, #2363 by @samuelcolvin * Allow configuring models through class kwargs, #2356 by @Bobronium * Prevent `Mapping` subclasses from always being coerced to `dict`, #2325 by @ofek * fix: allow `None` for type `Optional[conset / conlist]`, #2320 by @PrettyWood * Support empty tuple type, #2318 by @PrettyWood * fix: `python_requires` metadata to require >=3.6.1, #2306 by @hukkinj1 * Properly encode `Decimal` with, or without any decimal places, #2293 by @hultner * fix: update `__fields_set__` in `BaseModel.copy(update=…)`, #2290 by @PrettyWood * fix: keep order of fields with `BaseModel.construct()`, #2281 by @PrettyWood * Support generating schema for Generic fields, #2262 by @maximberg * Fix `validate_decorator` so `**kwargs` doesn't exclude values when the keyword has the same name as the `*args` or `**kwargs` names, #2251 by @cybojenix * Prevent overriding positional arguments with keyword arguments in `validate_arguments`, as per behaviour with native functions, #2249 by @cybojenix * add documentation for `con*` type functions, #2242 by @tayoogunbiyi * Support custom root type (aka `__root__`) when using `parse_obj()` with nested models, #2238 by @PrettyWood * Support custom root type (aka `__root__`) with `from_orm()`, #2237 by @PrettyWood * ensure cythonized functions are left untouched when creating models, based on #1944 by @kollmats, #2228 by @samuelcolvin * Resolve forward refs for stdlib dataclasses converted into _pydantic_ ones, #2220 by @PrettyWood * Add support for `NamedTuple` and `TypedDict` types. Those two types are now handled and validated when used inside `BaseModel` or _pydantic_ `dataclass`. Two utils are also added `create_model_from_namedtuple` and `create_model_from_typeddict`, #2216 by @PrettyWood * Do not ignore annotated fields when type is `Union[Type[...], ...]`, #2213 by @PrettyWood * Raise a user-friendly `TypeError` when a `root_validator` does not return a `dict` (e.g. `None`), #2209 by @masalim2 * Add a `FrozenSet[str]` type annotation to the `allowed_schemes` argument on the `strict_url` field type, #2198 by @Midnighter * add `allow_mutation` constraint to `Field`, #2195 by @sblack-usu * Allow `Field` with a `default_factory` to be used as an argument to a function decorated with `validate_arguments`, #2176 by @thomascobb * Allow non-existent secrets directory by only issuing a warning, #2175 by @davidolrik * fix URL regex to parse fragment without query string, #2168 by @andrewmwhite * fix: ensure to always return one of the values in `Literal` field type, #2166 by @PrettyWood * Support `typing.Annotated` hints on model fields. A `Field` may now be set in the type hint with `Annotated[..., Field(...)`; all other annotations are ignored but still visible with `get_type_hints(..., include_extras=True)`, #2147 by @JacobHayes * Added `StrictBytes` type as well as `strict=False` option to `ConstrainedBytes`, #2136 by @rlizzo * added `Config.anystr_lower` and `to_lower` kwarg to `constr` and `conbytes`, #2134 by @tayoogunbiyi * Support plain `typing.Tuple` type, #2132 by @PrettyWood * Add a bound method `validate` to functions decorated with `validate_arguments` to validate parameters without actually calling the function, #2127 by @PrettyWood * Add the ability to customize settings sources (add / disable / change priority order), #2107 by @kozlek * Fix mypy complaints about most custom _pydantic_ types, #2098 by @PrettyWood * Add a [Hypothesis](https://hypothesis.readthedocs.io/) plugin for easier [property-based testing](https://increment.com/testing/in-praise-of-property-based-testing/) with Pydantic's custom types - [usage details here](https://docs.pydantic.dev/hypothesis_plugin/), #2097 by @Zac-HD * add validator for `None`, `NoneType` or `Literal[None]`, #2095 by @PrettyWood * Handle properly fields of type `Callable` with a default value, #2094 by @PrettyWood * Updated `create_model` return type annotation to return type which inherits from `__base__` argument, #2071 by @uriyyo * Add merged `json_encoders` inheritance, #2064 by @art049 * allow overwriting `ClassVar`s in sub-models without having to re-annotate them, #2061 by @layday * add default encoder for `Pattern` type, #2045 by @PrettyWood * Add `NonNegativeInt`, `NonPositiveInt`, `NonNegativeFloat`, `NonPositiveFloat`, #1975 by @mdavis-xyz * Use % for percentage in string format of colors, #1960 by @EdwardBetts * Fixed issue causing `KeyError` to be raised when building schema from multiple `BaseModel` with the same names declared in separate classes, #1912 by @JSextonn * Add `rediss` (Redis over SSL) protocol to `RedisDsn` Allow URLs without `user` part (e.g., `rediss://:pass@localhost`), #1911 by @TrDex * Add a new `frozen` boolean parameter to `Config` (default: `False`). Setting `frozen=True` does everything that `allow_mutation=False` does, and also generates a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the attributes are hashable, #1880 by @rhuille * fix schema generation with multiple Enums having the same name, #1857 by @PrettyWood * Added support for 13/19 digits VISA credit cards in `PaymentCardNumber` type, #1416 by @AlexanderSov * fix: prevent `RecursionError` while using recursive `GenericModel`s, #1370 by @xppt * use `enum` for `typing.Literal` in JSON schema, #1350 by @PrettyWood * Fix: some recursive models did not require `update_forward_refs` and silently behaved incorrectly, #1201 by @PrettyWood * Fix bug where generic models with fields where the typevar is nested in another type `a: List[T]` are considered to be concrete. This allows these models to be subclassed and composed as expected, #947 by @daviskirk * Add `Config.copy_on_model_validation` flag. When set to `False`, _pydantic_ will keep models used as fields untouched on validation instead of reconstructing (copying) them, #265 by @PrettyWood ## v1.7.4 (2021-05-11) * **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` (or their negative values) does not cause an infinite loop, See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) ## v1.7.3 (2020-11-30) Thank you to pydantic's sponsors: @timdrijvers, @BCarley, @chdsbd, @tiangolo, @matin, @linusg, @kevinalh, @jorgecarleitao, @koxudaxi, @primer-api, @mkeen, @meadsteve for their kind support. * fix: set right default value for required (optional) fields, #2142 by @PrettyWood * fix: support `underscore_attrs_are_private` with generic models, #2138 by @PrettyWood * fix: update all modified field values in `root_validator` when `validate_assignment` is on, #2116 by @PrettyWood * Allow pickling of `pydantic.dataclasses.dataclass` dynamically created from a built-in `dataclasses.dataclass`, #2111 by @aimestereo * Fix a regression where Enum fields would not propagate keyword arguments to the schema, #2109 by @bm424 * Ignore `__doc__` as private attribute when `Config.underscore_attrs_are_private` is set, #2090 by @PrettyWood ## v1.7.2 (2020-11-01) * fix slow `GenericModel` concrete model creation, allow `GenericModel` concrete name reusing in module, #2078 by @Bobronium * keep the order of the fields when `validate_assignment` is set, #2073 by @PrettyWood * forward all the params of the stdlib `dataclass` when converted into _pydantic_ `dataclass`, #2065 by @PrettyWood ## v1.7.1 (2020-10-28) Thank you to pydantic's sponsors: @timdrijvers, @BCarley, @chdsbd, @tiangolo, @matin, @linusg, @kevinalh, @jorgecarleitao, @koxudaxi, @primer-api, @mkeen for their kind support. * fix annotation of `validate_arguments` when passing configuration as argument, #2055 by @layday * Fix mypy assignment error when using `PrivateAttr`, #2048 by @aphedges * fix `underscore_attrs_are_private` causing `TypeError` when overriding `__init__`, #2047 by @samuelcolvin * Fixed regression introduced in v1.7 involving exception handling in field validators when `validate_assignment=True`, #2044 by @johnsabath * fix: _pydantic_ `dataclass` can inherit from stdlib `dataclass` and `Config.arbitrary_types_allowed` is supported, #2042 by @PrettyWood ## v1.7 (2020-10-26) Thank you to pydantic's sponsors: @timdrijvers, @BCarley, @chdsbd, @tiangolo, @matin, @linusg, @kevinalh, @jorgecarleitao, @koxudaxi, @primer-api for their kind support. ### Highlights * Python 3.9 support, thanks @PrettyWood * [Private model attributes](https://docs.pydantic.dev/usage/models/#private-model-attributes), thanks @Bobronium * ["secrets files" support in `BaseSettings`](https://docs.pydantic.dev/usage/settings/#secret-support), thanks @mdgilene * [convert stdlib dataclasses to pydantic dataclasses and use stdlib dataclasses in models](https://docs.pydantic.dev/usage/dataclasses/#stdlib-dataclasses-and-pydantic-dataclasses), thanks @PrettyWood ### Changes * **Breaking Change:** remove `__field_defaults__`, add `default_factory` support with `BaseModel.construct`. Use `.get_default()` method on fields in `__fields__` attribute instead, #1732 by @PrettyWood * Rearrange CI to run linting as a separate job, split install recipes for different tasks, #2020 by @samuelcolvin * Allows subclasses of generic models to make some, or all, of the superclass's type parameters concrete, while also defining new type parameters in the subclass, #2005 by @choogeboom * Call validator with the correct `values` parameter type in `BaseModel.__setattr__`, when `validate_assignment = True` in model config, #1999 by @me-ransh * Force `fields.Undefined` to be a singleton object, fixing inherited generic model schemas, #1981 by @daviskirk * Include tests in source distributions, #1976 by @sbraz * Add ability to use `min_length/max_length` constraints with secret types, #1974 by @uriyyo * Also check `root_validators` when `validate_assignment` is on, #1971 by @PrettyWood * Fix const validators not running when custom validators are present, #1957 by @hmvp * add `deque` to field types, #1935 by @wozniakty * add basic support for Python 3.9, #1832 by @PrettyWood * Fix typo in the anchor of exporting_models.md#modelcopy and incorrect description, #1821 by @KimMachineGun * Added ability for `BaseSettings` to read "secret files", #1820 by @mdgilene * add `parse_raw_as` utility function, #1812 by @PrettyWood * Support home directory relative paths for `dotenv` files (e.g. `~/.env`), #1803 by @PrettyWood * Clarify documentation for `parse_file` to show that the argument should be a file *path* not a file-like object, #1794 by @mdavis-xyz * Fix false positive from mypy plugin when a class nested within a `BaseModel` is named `Model`, #1770 by @selimb * add basic support of Pattern type in schema generation, #1767 by @PrettyWood * Support custom title, description and default in schema of enums, #1748 by @PrettyWood * Properly represent `Literal` Enums when `use_enum_values` is True, #1747 by @noelevans * Allows timezone information to be added to strings to be formatted as time objects. Permitted formats are `Z` for UTC or an offset for absolute positive or negative time shifts. Or the timezone data can be omitted, #1744 by @noelevans * Add stub `__init__` with Python 3.6 signature for `ForwardRef`, #1738 by @sirtelemak * Fix behaviour with forward refs and optional fields in nested models, #1736 by @PrettyWood * add `Enum` and `IntEnum` as valid types for fields, #1735 by @PrettyWood * Change default value of `__module__` argument of `create_model` from `None` to `'pydantic.main'`. Set reference of created concrete model to it's module to allow pickling (not applied to models created in functions), #1686 by @Bobronium * Add private attributes support, #1679 by @Bobronium * add `config` to `@validate_arguments`, #1663 by @samuelcolvin * Allow descendant Settings models to override env variable names for the fields defined in parent Settings models with `env` in their `Config`. Previously only `env_prefix` configuration option was applicable, #1561 by @ojomio * Support `ref_template` when creating schema `$ref`s, #1479 by @kilo59 * Add a `__call__` stub to `PyObject` so that mypy will know that it is callable, #1352 by @brianmaissy * `pydantic.dataclasses.dataclass` decorator now supports built-in `dataclasses.dataclass`. It is hence possible to convert an existing `dataclass` easily to add *pydantic* validation. Moreover nested dataclasses are also supported, #744 by @PrettyWood ## v1.6.2 (2021-05-11) * **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` (or their negative values) does not cause an infinite loop, See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) ## v1.6.1 (2020-07-15) * fix validation and parsing of nested models with `default_factory`, #1710 by @PrettyWood ## v1.6 (2020-07-11) Thank you to pydantic's sponsors: @matin, @tiangolo, @chdsbd, @jorgecarleitao, and 1 anonymous sponsor for their kind support. * Modify validators for `conlist` and `conset` to not have `always=True`, #1682 by @samuelcolvin * add port check to `AnyUrl` (can't exceed 65536) ports are 16 insigned bits: `0 <= port <= 2**16-1` src: [rfc793 header format](https://tools.ietf.org/html/rfc793#section-3.1), #1654 by @flapili * Document default `regex` anchoring semantics, #1648 by @yurikhan * Use `chain.from_iterable` in class_validators.py. This is a faster and more idiomatic way of using `itertools.chain`. Instead of computing all the items in the iterable and storing them in memory, they are computed one-by-one and never stored as a huge list. This can save on both runtime and memory space, #1642 by @cool-RR * Add `conset()`, analogous to `conlist()`, #1623 by @patrickkwang * make *pydantic* errors (un)pickable, #1616 by @PrettyWood * Allow custom encoding for `dotenv` files, #1615 by @PrettyWood * Ensure `SchemaExtraCallable` is always defined to get type hints on BaseConfig, #1614 by @PrettyWood * Update datetime parser to support negative timestamps, #1600 by @mlbiche * Update mypy, remove `AnyType` alias for `Type[Any]`, #1598 by @samuelcolvin * Adjust handling of root validators so that errors are aggregated from _all_ failing root validators, instead of reporting on only the first root validator to fail, #1586 by @beezee * Make `__modify_schema__` on Enums apply to the enum schema rather than fields that use the enum, #1581 by @therefromhere * Fix behavior of `__all__` key when used in conjunction with index keys in advanced include/exclude of fields that are sequences, #1579 by @xspirus * Subclass validators do not run when referencing a `List` field defined in a parent class when `each_item=True`. Added an example to the docs illustrating this, #1566 by @samueldeklund * change `schema.field_class_to_schema` to support `frozenset` in schema, #1557 by @wangpeibao * Call `__modify_schema__` only for the field schema, #1552 by @PrettyWood * Move the assignment of `field.validate_always` in `fields.py` so the `always` parameter of validators work on inheritance, #1545 by @dcHHH * Added support for UUID instantiation through 16 byte strings such as `b'\x12\x34\x56\x78' * 4`. This was done to support `BINARY(16)` columns in sqlalchemy, #1541 by @shawnwall * Add a test assertion that `default_factory` can return a singleton, #1523 by @therefromhere * Add `NameEmail.__eq__` so duplicate `NameEmail` instances are evaluated as equal, #1514 by @stephen-bunn * Add datamodel-code-generator link in pydantic document site, #1500 by @koxudaxi * Added a "Discussion of Pydantic" section to the documentation, with a link to "Pydantic Introduction" video by Alexander Hultnér, #1499 by @hultner * Avoid some side effects of `default_factory` by calling it only once if possible and by not setting a default value in the schema, #1491 by @PrettyWood * Added docs about dumping dataclasses to JSON, #1487 by @mikegrima * Make `BaseModel.__signature__` class-only, so getting `__signature__` from model instance will raise `AttributeError`, #1466 by @Bobronium * include `'format': 'password'` in the schema for secret types, #1424 by @atheuz * Modify schema constraints on `ConstrainedFloat` so that `exclusiveMinimum` and minimum are not included in the schema if they are equal to `-math.inf` and `exclusiveMaximum` and `maximum` are not included if they are equal to `math.inf`, #1417 by @vdwees * Squash internal `__root__` dicts in `.dict()` (and, by extension, in `.json()`), #1414 by @patrickkwang * Move `const` validator to post-validators so it validates the parsed value, #1410 by @selimb * Fix model validation to handle nested literals, e.g. `Literal['foo', Literal['bar']]`, #1364 by @DBCerigo * Remove `user_required = True` from `RedisDsn`, neither user nor password are required, #1275 by @samuelcolvin * Remove extra `allOf` from schema for fields with `Union` and custom `Field`, #1209 by @mostaphaRoudsari * Updates OpenAPI schema generation to output all enums as separate models. Instead of inlining the enum values in the model schema, models now use a `$ref` property to point to the enum definition, #1173 by @calvinwyoung ## v1.5.1 (2020-04-23) * Signature generation with `extra: allow` never uses a field name, #1418 by @prettywood * Avoid mutating `Field` default value, #1412 by @prettywood ## v1.5 (2020-04-18) * Make includes/excludes arguments for `.dict()`, `._iter()`, ..., immutable, #1404 by @AlexECX * Always use a field's real name with includes/excludes in `model._iter()`, regardless of `by_alias`, #1397 by @AlexECX * Update constr regex example to include start and end lines, #1396 by @lmcnearney * Confirm that shallow `model.copy()` does make a shallow copy of attributes, #1383 by @samuelcolvin * Renaming `model_name` argument of `main.create_model()` to `__model_name` to allow using `model_name` as a field name, #1367 by @kittipatv * Replace raising of exception to silent passing for non-Var attributes in mypy plugin, #1345 by @b0g3r * Remove `typing_extensions` dependency for Python 3.8, #1342 by @prettywood * Make `SecretStr` and `SecretBytes` initialization idempotent, #1330 by @atheuz * document making secret types dumpable using the json method, #1328 by @atheuz * Move all testing and build to github actions, add windows and macos binaries, thank you @StephenBrown2 for much help, #1326 by @samuelcolvin * fix card number length check in `PaymentCardNumber`, `PaymentCardBrand` now inherits from `str`, #1317 by @samuelcolvin * Have `BaseModel` inherit from `Representation` to make mypy happy when overriding `__str__`, #1310 by @FuegoFro * Allow `None` as input to all optional list fields, #1307 by @prettywood * Add `datetime` field to `default_factory` example, #1301 by @StephenBrown2 * Allow subclasses of known types to be encoded with superclass encoder, #1291 by @StephenBrown2 * Exclude exported fields from all elements of a list/tuple of submodels/dicts with `'__all__'`, #1286 by @masalim2 * Add pydantic.color.Color objects as available input for Color fields, #1258 by @leosussan * In examples, type nullable fields as `Optional`, so that these are valid mypy annotations, #1248 by @kokes * Make `pattern_validator()` accept pre-compiled `Pattern` objects. Fix `str_validator()` return type to `str`, #1237 by @adamgreg * Document how to manage Generics and inheritance, #1229 by @esadruhn * `update_forward_refs()` method of BaseModel now copies `__dict__` of class module instead of modyfying it, #1228 by @paul-ilyin * Support instance methods and class methods with `@validate_arguments`, #1222 by @samuelcolvin * Add `default_factory` argument to `Field` to create a dynamic default value by passing a zero-argument callable, #1210 by @prettywood * add support for `NewType` of `List`, `Optional`, etc, #1207 by @Kazy * fix mypy signature for `root_validator`, #1192 by @samuelcolvin * Fixed parsing of nested 'custom root type' models, #1190 by @Shados * Add `validate_arguments` function decorator which checks the arguments to a function matches type annotations, #1179 by @samuelcolvin * Add `__signature__` to models, #1034 by @Bobronium * Refactor `._iter()` method, 10x speed boost for `dict(model)`, #1017 by @Bobronium ## v1.4 (2020-01-24) * **Breaking Change:** alias precedence logic changed so aliases on a field always take priority over an alias from `alias_generator` to avoid buggy/unexpected behaviour, see [here](https://docs.pydantic.dev/usage/model_config/#alias-precedence) for details, #1178 by @samuelcolvin * Add support for unicode and punycode in TLDs, #1182 by @jamescurtin * Fix `cls` argument in validators during assignment, #1172 by @samuelcolvin * completing Luhn algorithm for `PaymentCardNumber`, #1166 by @cuencandres * add support for generics that implement `__get_validators__` like a custom data type, #1159 by @tiangolo * add support for infinite generators with `Iterable`, #1152 by @tiangolo * fix `url_regex` to accept schemas with `+`, `-` and `.` after the first character, #1142 by @samuelcolvin * move `version_info()` to `version.py`, suggest its use in issues, #1138 by @samuelcolvin * Improve pydantic import time by roughly 50% by deferring some module loading and regex compilation, #1127 by @samuelcolvin * Fix `EmailStr` and `NameEmail` to accept instances of themselves in cython, #1126 by @koxudaxi * Pass model class to the `Config.schema_extra` callable, #1125 by @therefromhere * Fix regex for username and password in URLs, #1115 by @samuelcolvin * Add support for nested generic models, #1104 by @dmontagu * add `__all__` to `__init__.py` to prevent "implicit reexport" errors from mypy, #1072 by @samuelcolvin * Add support for using "dotenv" files with `BaseSettings`, #1011 by @acnebs ## v1.3 (2019-12-21) * Change `schema` and `schema_model` to handle dataclasses by using their `__pydantic_model__` feature, #792 by @aviramha * Added option for `root_validator` to be skipped if values validation fails using keyword `skip_on_failure=True`, #1049 by @aviramha * Allow `Config.schema_extra` to be a callable so that the generated schema can be post-processed, #1054 by @selimb * Update mypy to version 0.750, #1057 by @dmontagu * Trick Cython into allowing str subclassing, #1061 by @skewty * Prevent type attributes being added to schema unless the attribute `__schema_attributes__` is `True`, #1064 by @samuelcolvin * Change `BaseModel.parse_file` to use `Config.json_loads`, #1067 by @kierandarcy * Fix for optional `Json` fields, #1073 by @volker48 * Change the default number of threads used when compiling with cython to one, allow override via the `CYTHON_NTHREADS` environment variable, #1074 by @samuelcolvin * Run FastAPI tests during Pydantic's CI tests, #1075 by @tiangolo * My mypy strictness constraints, and associated tweaks to type annotations, #1077 by @samuelcolvin * Add `__eq__` to SecretStr and SecretBytes to allow "value equals", #1079 by @sbv-trueenergy * Fix schema generation for nested None case, #1088 by @lutostag * Consistent checks for sequence like objects, #1090 by @samuelcolvin * Fix `Config` inheritance on `BaseSettings` when used with `env_prefix`, #1091 by @samuelcolvin * Fix for `__modify_schema__` when it conflicted with `field_class_to_schema*`, #1102 by @samuelcolvin * docs: Fix explanation of case sensitive environment variable names when populating `BaseSettings` subclass attributes, #1105 by @tribals * Rename django-rest-framework benchmark in documentation, #1119 by @frankie567 ## v1.2 (2019-11-28) * **Possible Breaking Change:** Add support for required `Optional` with `name: Optional[AnyType] = Field(...)` and refactor `ModelField` creation to preserve `required` parameter value, #1031 by @tiangolo; see [here](https://docs.pydantic.dev/usage/models/#required-optional-fields) for details * Add benchmarks for `cattrs`, #513 by @sebastianmika * Add `exclude_none` option to `dict()` and friends, #587 by @niknetniko * Add benchmarks for `valideer`, #670 by @gsakkis * Add `parse_obj_as` and `parse_file_as` functions for ad-hoc parsing of data into arbitrary pydantic-compatible types, #934 by @dmontagu * Add `allow_reuse` argument to validators, thus allowing validator reuse, #940 by @dmontagu * Add support for mapping types for custom root models, #958 by @dmontagu * Mypy plugin support for dataclasses, #966 by @koxudaxi * Add support for dataclasses default factory, #968 by @ahirner * Add a `ByteSize` type for converting byte string (`1GB`) to plain bytes, #977 by @dgasmith * Fix mypy complaint about `@root_validator(pre=True)`, #984 by @samuelcolvin * Add manylinux binaries for Python 3.8 to pypi, also support manylinux2010, #994 by @samuelcolvin * Adds ByteSize conversion to another unit, #995 by @dgasmith * Fix `__str__` and `__repr__` inheritance for models, #1022 by @samuelcolvin * add testimonials section to docs, #1025 by @sullivancolin * Add support for `typing.Literal` for Python 3.8, #1026 by @dmontagu ## v1.1.1 (2019-11-20) * Fix bug where use of complex fields on sub-models could cause fields to be incorrectly configured, #1015 by @samuelcolvin ## v1.1 (2019-11-07) * Add a mypy plugin for type checking `BaseModel.__init__` and more, #722 by @dmontagu * Change return type typehint for `GenericModel.__class_getitem__` to prevent PyCharm warnings, #936 by @dmontagu * Fix usage of `Any` to allow `None`, also support `TypeVar` thus allowing use of un-parameterised collection types e.g. `Dict` and `List`, #962 by @samuelcolvin * Set `FieldInfo` on subfields to fix schema generation for complex nested types, #965 by @samuelcolvin ## v1.0 (2019-10-23) * **Breaking Change:** deprecate the `Model.fields` property, use `Model.__fields__` instead, #883 by @samuelcolvin * **Breaking Change:** Change the precedence of aliases so child model aliases override parent aliases, including using `alias_generator`, #904 by @samuelcolvin * **Breaking change:** Rename `skip_defaults` to `exclude_unset`, and add ability to exclude actual defaults, #915 by @dmontagu * Add `**kwargs` to `pydantic.main.ModelMetaclass.__new__` so `__init_subclass__` can take custom parameters on extended `BaseModel` classes, #867 by @retnikt * Fix field of a type that has a default value, #880 by @koxudaxi * Use `FutureWarning` instead of `DeprecationWarning` when `alias` instead of `env` is used for settings models, #881 by @samuelcolvin * Fix issue with `BaseSettings` inheritance and `alias` getting set to `None`, #882 by @samuelcolvin * Modify `__repr__` and `__str__` methods to be consistent across all public classes, add `__pretty__` to support python-devtools, #884 by @samuelcolvin * deprecation warning for `case_insensitive` on `BaseSettings` config, #885 by @samuelcolvin * For `BaseSettings` merge environment variables and in-code values recursively, as long as they create a valid object when merged together, to allow splitting init arguments, #888 by @idmitrievsky * change secret types example, #890 by @ashears * Change the signature of `Model.construct()` to be more user-friendly, document `construct()` usage, #898 by @samuelcolvin * Add example for the `construct()` method, #907 by @ashears * Improve use of `Field` constraints on complex types, raise an error if constraints are not enforceable, also support tuples with an ellipsis `Tuple[X, ...]`, `Sequence` and `FrozenSet` in schema, #909 by @samuelcolvin * update docs for bool missing valid value, #911 by @trim21 * Better `str`/`repr` logic for `ModelField`, #912 by @samuelcolvin * Fix `ConstrainedList`, update schema generation to reflect `min_items` and `max_items` `Field()` arguments, #917 by @samuelcolvin * Allow abstracts sets (eg. dict keys) in the `include` and `exclude` arguments of `dict()`, #921 by @samuelcolvin * Fix JSON serialization errors on `ValidationError.json()` by using `pydantic_encoder`, #922 by @samuelcolvin * Clarify usage of `remove_untouched`, improve error message for types with no validators, #926 by @retnikt ## v1.0b2 (2019-10-07) * Mark `StrictBool` typecheck as `bool` to allow for default values without mypy errors, #690 by @dmontagu * Transfer the documentation build from sphinx to mkdocs, re-write much of the documentation, #856 by @samuelcolvin * Add support for custom naming schemes for `GenericModel` subclasses, #859 by @dmontagu * Add `if TYPE_CHECKING:` to the excluded lines for test coverage, #874 by @dmontagu * Rename `allow_population_by_alias` to `allow_population_by_field_name`, remove unnecessary warning about it, #875 by @samuelcolvin ## v1.0b1 (2019-10-01) * **Breaking Change:** rename `Schema` to `Field`, make it a function to placate mypy, #577 by @samuelcolvin * **Breaking Change:** modify parsing behavior for `bool`, #617 by @dmontagu * **Breaking Change:** `get_validators` is no longer recognised, use `__get_validators__`. `Config.ignore_extra` and `Config.allow_extra` are no longer recognised, use `Config.extra`, #720 by @samuelcolvin * **Breaking Change:** modify default config settings for `BaseSettings`; `case_insensitive` renamed to `case_sensitive`, default changed to `case_sensitive = False`, `env_prefix` default changed to `''` - e.g. no prefix, #721 by @dmontagu * **Breaking change:** Implement `root_validator` and rename root errors from `__obj__` to `__root__`, #729 by @samuelcolvin * **Breaking Change:** alter the behaviour of `dict(model)` so that sub-models are nolonger converted to dictionaries, #733 by @samuelcolvin * **Breaking change:** Added `initvars` support to `post_init_post_parse`, #748 by @Raphael-C-Almeida * **Breaking Change:** Make `BaseModel.json()` only serialize the `__root__` key for models with custom root, #752 by @dmontagu * **Breaking Change:** complete rewrite of `URL` parsing logic, #755 by @samuelcolvin * **Breaking Change:** preserve superclass annotations for field-determination when not provided in subclass, #757 by @dmontagu * **Breaking Change:** `BaseSettings` now uses the special `env` settings to define which environment variables to read, not aliases, #847 by @samuelcolvin * add support for `assert` statements inside validators, #653 by @abdusco * Update documentation to specify the use of `pydantic.dataclasses.dataclass` and subclassing `pydantic.BaseModel`, #710 by @maddosaurus * Allow custom JSON decoding and encoding via `json_loads` and `json_dumps` `Config` properties, #714 by @samuelcolvin * make all annotated fields occur in the order declared, #715 by @dmontagu * use pytest to test `mypy` integration, #735 by @dmontagu * add `__repr__` method to `ErrorWrapper`, #738 by @samuelcolvin * Added support for `FrozenSet` members in dataclasses, and a better error when attempting to use types from the `typing` module that are not supported by Pydantic, #745 by @djpetti * add documentation for Pycharm Plugin, #750 by @koxudaxi * fix broken examples in the docs, #753 by @dmontagu * moving typing related objects into `pydantic.typing`, #761 by @samuelcolvin * Minor performance improvements to `ErrorWrapper`, `ValidationError` and datetime parsing, #763 by @samuelcolvin * Improvements to `datetime`/`date`/`time`/`timedelta` types: more descriptive errors, change errors to `value_error` not `type_error`, support bytes, #766 by @samuelcolvin * fix error messages for `Literal` types with multiple allowed values, #770 by @dmontagu * Improved auto-generated `title` field in JSON schema by converting underscore to space, #772 by @skewty * support `mypy --no-implicit-reexport` for dataclasses, also respect `--no-implicit-reexport` in pydantic itself, #783 by @samuelcolvin * add the `PaymentCardNumber` type, #790 by @matin * Fix const validations for lists, #794 by @hmvp * Set `additionalProperties` to false in schema for models with extra fields disallowed, #796 by @Code0x58 * `EmailStr` validation method now returns local part case-sensitive per RFC 5321, #798 by @henriklindgren * Added ability to validate strictness to `ConstrainedFloat`, `ConstrainedInt` and `ConstrainedStr` and added `StrictFloat` and `StrictInt` classes, #799 by @DerRidda * Improve handling of `None` and `Optional`, replace `whole` with `each_item` (inverse meaning, default `False`) on validators, #803 by @samuelcolvin * add support for `Type[T]` type hints, #807 by @timonbimon * Performance improvements from removing `change_exceptions`, change how pydantic error are constructed, #819 by @samuelcolvin * Fix the error message arising when a `BaseModel`-type model field causes a `ValidationError` during parsing, #820 by @dmontagu * allow `getter_dict` on `Config`, modify `GetterDict` to be more like a `Mapping` object and thus easier to work with, #821 by @samuelcolvin * Only check `TypeVar` param on base `GenericModel` class, #842 by @zpencerq * rename `Model._schema_cache` -> `Model.__schema_cache__`, `Model._json_encoder` -> `Model.__json_encoder__`, `Model._custom_root_type` -> `Model.__custom_root_type__`, #851 by @samuelcolvin ## v0.32.2 (2019-08-17) (Docs are available [here](https://5d584fcca7c9b70007d1c997--pydantic-docs.netlify.com)) * fix `__post_init__` usage with dataclass inheritance, fix #739 by @samuelcolvin * fix required fields validation on GenericModels classes, #742 by @amitbl * fix defining custom `Schema` on `GenericModel` fields, #754 by @amitbl ## v0.32.1 (2019-08-08) * do not validate extra fields when `validate_assignment` is on, #724 by @YaraslauZhylko ## v0.32 (2019-08-06) * add model name to `ValidationError` error message, #676 by @dmontagu * **breaking change**: remove `__getattr__` and rename `__values__` to `__dict__` on `BaseModel`, deprecation warning on use `__values__` attr, attributes access speed increased up to 14 times, #712 by @Bobronium * support `ForwardRef` (without self-referencing annotations) in Python 3.6, #706 by @koxudaxi * implement `schema_extra` in `Config` sub-class, #663 by @tiangolo ## v0.31.1 (2019-07-31) * fix json generation for `EnumError`, #697 by @dmontagu * update numerous dependencies ## v0.31 (2019-07-24) * better support for floating point `multiple_of` values, #652 by @justindujardin * fix schema generation for `NewType` and `Literal`, #649 by @dmontagu * fix `alias_generator` and field config conflict, #645 by @gmetzker and #658 by @Bobronium * more detailed message for `EnumError`, #673 by @dmontagu * add advanced exclude support for `dict`, `json` and `copy`, #648 by @Bobronium * fix bug in `GenericModel` for models with concrete parameterized fields, #672 by @dmontagu * add documentation for `Literal` type, #651 by @dmontagu * add `Config.keep_untouched` for custom descriptors support, #679 by @Bobronium * use `inspect.cleandoc` internally to get model description, #657 by @tiangolo * add `Color` to schema generation, by @euri10 * add documentation for Literal type, #651 by @dmontagu ## v0.30.1 (2019-07-15) * fix so nested classes which inherit and change `__init__` are correctly processed while still allowing `self` as a parameter, #644 by @lnaden and @dgasmith ## v0.30 (2019-07-07) * enforce single quotes in code, #612 by @samuelcolvin * fix infinite recursion with dataclass inheritance and `__post_init__`, #606 by @Hanaasagi * fix default values for `GenericModel`, #610 by @dmontagu * clarify that self-referencing models require Python 3.7+, #616 by @vlcinsky * fix truncate for types, #611 by @dmontagu * add `alias_generator` support, #622 by @Bobronium * fix unparameterized generic type schema generation, #625 by @dmontagu * fix schema generation with multiple/circular references to the same model, #621 by @tiangolo and @wongpat * support custom root types, #628 by @koxudaxi * support `self` as a field name in `parse_obj`, #632 by @samuelcolvin ## v0.29 (2019-06-19) * support dataclasses.InitVar, #592 by @pfrederiks * Updated documentation to elucidate the usage of `Union` when defining multiple types under an attribute's annotation and showcase how the type-order can affect marshalling of provided values, #594 by @somada141 * add `conlist` type, #583 by @hmvp * add support for generics, #595 by @dmontagu ## v0.28 (2019-06-06) * fix support for JSON Schema generation when using models with circular references in Python 3.7, #572 by @tiangolo * support `__post_init_post_parse__` on dataclasses, #567 by @sevaho * allow dumping dataclasses to JSON, #575 by @samuelcolvin and @DanielOberg * ORM mode, #562 by @samuelcolvin * fix `pydantic.compiled` on ipython, #573 by @dmontagu and @samuelcolvin * add `StrictBool` type, #579 by @cazgp ## v0.27 (2019-05-30) * **breaking change** `_pydantic_post_init` to execute dataclass' original `__post_init__` before validation, #560 by @HeavenVolkoff * fix handling of generic types without specified parameters, #550 by @dmontagu * **breaking change** (maybe): this is the first release compiled with **cython**, see the docs and please submit an issue if you run into problems ## v0.27.0a1 (2019-05-26) * fix JSON Schema for `list`, `tuple`, and `set`, #540 by @tiangolo * compiling with cython, `manylinux` binaries, some other performance improvements, #548 by @samuelcolvin ## v0.26 (2019-05-22) * fix to schema generation for `IPvAnyAddress`, `IPvAnyInterface`, `IPvAnyNetwork` #498 by @pilosus * fix variable length tuples support, #495 by @pilosus * fix return type hint for `create_model`, #526 by @dmontagu * **Breaking Change:** fix `.dict(skip_keys=True)` skipping values set via alias (this involves changing `validate_model()` to always returns `Tuple[Dict[str, Any], Set[str], Optional[ValidationError]]`), #517 by @sommd * fix to schema generation for `IPv4Address`, `IPv6Address`, `IPv4Interface`, `IPv6Interface`, `IPv4Network`, `IPv6Network` #532 by @euri10 * add `Color` type, #504 by @pilosus and @samuelcolvin ## v0.25 (2019-05-05) * Improve documentation on self-referencing models and annotations, #487 by @theenglishway * fix `.dict()` with extra keys, #490 by @JaewonKim * support `const` keyword in `Schema`, #434 by @Sean1708 ## v0.24 (2019-04-23) * fix handling `ForwardRef` in sub-types, like `Union`, #464 by @tiangolo * fix secret serialization, #465 by @atheuz * Support custom validators for dataclasses, #454 by @primal100 * fix `parse_obj` to cope with dict-like objects, #472 by @samuelcolvin * fix to schema generation in nested dataclass-based models, #474 by @NoAnyLove * fix `json` for `Path`, `FilePath`, and `DirectoryPath` objects, #473 by @mikegoodspeed ## v0.23 (2019-04-04) * improve documentation for contributing section, #441 by @pilosus * improve README.rst to include essential information about the package, #446 by @pilosus * `IntEnum` support, #444 by @potykion * fix PyObject callable value, #409 by @pilosus * fix `black` deprecation warnings after update, #451 by @pilosus * fix `ForwardRef` collection bug, #450 by @tigerwings * Support specialized `ClassVars`, #455 by @tyrylu * fix JSON serialization for `ipaddress` types, #333 by @pilosus * add `SecretStr` and `SecretBytes` types, #452 by @atheuz ## v0.22 (2019-03-29) * add `IPv{4,6,Any}Network` and `IPv{4,6,Any}Interface` types from `ipaddress` stdlib, #333 by @pilosus * add docs for `datetime` types, #386 by @pilosus * fix to schema generation in dataclass-based models, #408 by @pilosus * fix path in nested models, #437 by @kataev * add `Sequence` support, #304 by @pilosus ## v0.21.0 (2019-03-15) * fix typo in `NoneIsNotAllowedError` message, #414 by @YaraslauZhylko * add `IPvAnyAddress`, `IPv4Address` and `IPv6Address` types, #333 by @pilosus ## v0.20.1 (2019-02-26) * fix type hints of `parse_obj` and similar methods, #405 by @erosennin * fix submodel validation, #403 by @samuelcolvin * correct type hints for `ValidationError.json`, #406 by @layday ## v0.20.0 (2019-02-18) * fix tests for Python 3.8, #396 by @samuelcolvin * Adds fields to the `dir` method for autocompletion in interactive sessions, #398 by @dgasmith * support `ForwardRef` (and therefore `from __future__ import annotations`) with dataclasses, #397 by @samuelcolvin ## v0.20.0a1 (2019-02-13) * **breaking change** (maybe): more sophisticated argument parsing for validators, any subset of `values`, `config` and `field` is now permitted, eg. `(cls, value, field)`, however the variadic key word argument ("`**kwargs`") **must** be called `kwargs`, #388 by @samuelcolvin * **breaking change**: Adds `skip_defaults` argument to `BaseModel.dict()` to allow skipping of fields that were not explicitly set, signature of `Model.construct()` changed, #389 by @dgasmith * add `py.typed` marker file for PEP-561 support, #391 by @je-l * Fix `extra` behaviour for multiple inheritance/mix-ins, #394 by @YaraslauZhylko ## v0.19.0 (2019-02-04) * Support `Callable` type hint, fix #279 by @proofit404 * Fix schema for fields with `validator` decorator, fix #375 by @tiangolo * Add `multiple_of` constraint to `ConstrainedDecimal`, `ConstrainedFloat`, `ConstrainedInt` and their related types `condecimal`, `confloat`, and `conint` #371, thanks @StephenBrown2 * Deprecated `ignore_extra` and `allow_extra` Config fields in favor of `extra`, #352 by @liiight * Add type annotations to all functions, test fully with mypy, #373 by @samuelcolvin * fix for 'missing' error with `validate_all` or `validate_always`, #381 by @samuelcolvin * Change the second/millisecond watershed for date/datetime parsing to `2e10`, #385 by @samuelcolvin ## v0.18.2 (2019-01-22) * Fix to schema generation with `Optional` fields, fix #361 by @samuelcolvin ## v0.18.1 (2019-01-17) * add `ConstrainedBytes` and `conbytes` types, #315 @Gr1N * adding `MANIFEST.in` to include license in package `.tar.gz`, #358 by @samuelcolvin ## v0.18.0 (2019-01-13) * **breaking change**: don't call validators on keys of dictionaries, #254 by @samuelcolvin * Fix validators with `always=True` when the default is `None` or the type is optional, also prevent `whole` validators being called for sub-fields, fix #132 by @samuelcolvin * improve documentation for settings priority and allow it to be easily changed, #343 by @samuelcolvin * fix `ignore_extra=False` and `allow_population_by_alias=True`, fix #257 by @samuelcolvin * **breaking change**: Set `BaseConfig` attributes `min_anystr_length` and `max_anystr_length` to `None` by default, fix #349 in #350 by @tiangolo * add support for postponed annotations, #348 by @samuelcolvin ## v0.17.0 (2018-12-27) * fix schema for `timedelta` as number, #325 by @tiangolo * prevent validators being called repeatedly after inheritance, #327 by @samuelcolvin * prevent duplicate validator check in ipython, fix #312 by @samuelcolvin * add "Using Pydantic" section to docs, #323 by @tiangolo & #326 by @samuelcolvin * fix schema generation for fields annotated as `: dict`, `: list`, `: tuple` and `: set`, #330 & #335 by @nkonin * add support for constrained strings as dict keys in schema, #332 by @tiangolo * support for passing Config class in dataclasses decorator, #276 by @jarekkar (**breaking change**: this supersedes the `validate_assignment` argument with `config`) * support for nested dataclasses, #334 by @samuelcolvin * better errors when getting an `ImportError` with `PyObject`, #309 by @samuelcolvin * rename `get_validators` to `__get_validators__`, deprecation warning on use of old name, #338 by @samuelcolvin * support `ClassVar` by excluding such attributes from fields, #184 by @samuelcolvin ## v0.16.1 (2018-12-10) * fix `create_model` to correctly use the passed `__config__`, #320 by @hugoduncan ## v0.16.0 (2018-12-03) * **breaking change**: refactor schema generation to be compatible with JSON Schema and OpenAPI specs, #308 by @tiangolo * add `schema` to `schema` module to generate top-level schemas from base models, #308 by @tiangolo * add additional fields to `Schema` class to declare validation for `str` and numeric values, #311 by @tiangolo * rename `_schema` to `schema` on fields, #318 by @samuelcolvin * add `case_insensitive` option to `BaseSettings` `Config`, #277 by @jasonkuhrt ## v0.15.0 (2018-11-18) * move codebase to use black, #287 by @samuelcolvin * fix alias use in settings, #286 by @jasonkuhrt and @samuelcolvin * fix datetime parsing in `parse_date`, #298 by @samuelcolvin * allow dataclass inheritance, fix #293 by @samuelcolvin * fix `PyObject = None`, fix #305 by @samuelcolvin * allow `Pattern` type, fix #303 by @samuelcolvin ## v0.14.0 (2018-10-02) * dataclasses decorator, #269 by @Gaunt and @samuelcolvin ## v0.13.1 (2018-09-21) * fix issue where int_validator doesn't cast a `bool` to an `int` #264 by @nphyatt * add deep copy support for `BaseModel.copy()` #249, @gangefors ## v0.13.0 (2018-08-25) * raise an exception if a field's name shadows an existing `BaseModel` attribute #242 * add `UrlStr` and `urlstr` types #236 * timedelta json encoding ISO8601 and total seconds, custom json encoders #247, by @cfkanesan and @samuelcolvin * allow `timedelta` objects as values for properties of type `timedelta` (matches `datetime` etc. behavior) #247 ## v0.12.1 (2018-07-31) * fix schema generation for fields defined using `typing.Any` #237 ## v0.12.0 (2018-07-31) * add `by_alias` argument in `.dict()` and `.json()` model methods #205 * add Json type support #214 * support tuples #227 * major improvements and changes to schema #213 ## v0.11.2 (2018-07-05) * add `NewType` support #115 * fix `list`, `set` & `tuple` validation #225 * separate out `validate_model` method, allow errors to be returned along with valid values #221 ## v0.11.1 (2018-07-02) * support Python 3.7 #216, thanks @layday * Allow arbitrary types in model #209, thanks @oldPadavan ## v0.11.0 (2018-06-28) * make `list`, `tuple` and `set` types stricter #86 * **breaking change**: remove msgpack parsing #201 * add `FilePath` and `DirectoryPath` types #10 * model schema generation #190 * JSON serialisation of models and schemas #133 ## v0.10.0 (2018-06-11) * add `Config.allow_population_by_alias` #160, thanks @bendemaree * **breaking change**: new errors format #179, thanks @Gr1N * **breaking change**: removed `Config.min_number_size` and `Config.max_number_size` #183, thanks @Gr1N * **breaking change**: correct behaviour of `lt` and `gt` arguments to `conint` etc. #188 for the old behaviour use `le` and `ge` #194, thanks @jaheba * added error context and ability to redefine error message templates using `Config.error_msg_templates` #183, thanks @Gr1N * fix typo in validator exception #150 * copy defaults to model values, so different models don't share objects #154 ## v0.9.1 (2018-05-10) * allow custom `get_field_config` on config classes #159 * add `UUID1`, `UUID3`, `UUID4` and `UUID5` types #167, thanks @Gr1N * modify some inconsistent docstrings and annotations #173, thanks @YannLuo * fix type annotations for exotic types #171, thanks @Gr1N * re-use type validators in exotic types #171 * scheduled monthly requirements updates #168 * add `Decimal`, `ConstrainedDecimal` and `condecimal` types #170, thanks @Gr1N ## v0.9.0 (2018-04-28) * tweak email-validator import error message #145 * fix parse error of `parse_date()` and `parse_datetime()` when input is 0 #144, thanks @YannLuo * add `Config.anystr_strip_whitespace` and `strip_whitespace` kwarg to `constr`, by default values is `False` #163, thanks @Gr1N * add `ConstrainedFloat`, `confloat`, `PositiveFloat` and `NegativeFloat` types #166, thanks @Gr1N ## v0.8.0 (2018-03-25) * fix type annotation for `inherit_config` #139 * **breaking change**: check for invalid field names in validators #140 * validate attributes of parent models #141 * **breaking change**: email validation now uses [email-validator](https://github.com/JoshData/python-email-validator) #142 ## v0.7.1 (2018-02-07) * fix bug with `create_model` modifying the base class ## v0.7.0 (2018-02-06) * added compatibility with abstract base classes (ABCs) #123 * add `create_model` method #113 #125 * **breaking change**: rename `.config` to `.__config__` on a model * **breaking change**: remove deprecated `.values()` on a model, use `.dict()` instead * remove use of `OrderedDict` and use simple dict #126 * add `Config.use_enum_values` #127 * add wildcard validators of the form `@validate('*')` #128 ## v0.6.4 (2018-02-01) * allow Python date and times objects #122 ## v0.6.3 (2017-11-26) * fix direct install without `README.rst` present ## v0.6.2 (2017-11-13) * errors for invalid validator use * safer check for complex models in `Settings` ## v0.6.1 (2017-11-08) * prevent duplicate validators, #101 * add `always` kwarg to validators, #102 ## v0.6.0 (2017-11-07) * assignment validation #94, thanks petroswork! * JSON in environment variables for complex types, #96 * add `validator` decorators for complex validation, #97 * depreciate `values(...)` and replace with `.dict(...)`, #99 ## v0.5.0 (2017-10-23) * add `UUID` validation #89 * remove `index` and `track` from error object (json) if they're null #90 * improve the error text when a list is provided rather than a dict #90 * add benchmarks table to docs #91 ## v0.4.0 (2017-07-08) * show length in string validation error * fix aliases in config during inheritance #55 * simplify error display * use unicode ellipsis in `truncate` * add `parse_obj`, `parse_raw` and `parse_file` helper functions #58 * switch annotation only fields to come first in fields list not last ## v0.3.0 (2017-06-21) * immutable models via `config.allow_mutation = False`, associated cleanup and performance improvement #44 * immutable helper methods `construct()` and `copy()` #53 * allow pickling of models #53 * `setattr` is removed as `__setattr__` is now intelligent #44 * `raise_exception` removed, Models now always raise exceptions #44 * instance method validators removed * django-restful-framework benchmarks added #47 * fix inheritance bug #49 * make str type stricter so list, dict etc are not coerced to strings. #52 * add `StrictStr` which only always strings as input #52 ## v0.2.1 (2017-06-07) * pypi and travis together messed up the deploy of `v0.2` this should fix it ## v0.2.0 (2017-06-07) * **breaking change**: `values()` on a model is now a method not a property, takes `include` and `exclude` arguments * allow annotation only fields to support mypy * add pretty `to_string(pretty=True)` method for models ## v0.1.0 (2017-06-03) * add docs * add history pydantic-1.10.14/LICENSE000066400000000000000000000021471455251250200145030ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2017, 2018, 2019, 2020, 2021 Samuel Colvin and other contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pydantic-1.10.14/MANIFEST.in000066400000000000000000000001651455251250200152320ustar00rootroot00000000000000include LICENSE include README.md include HISTORY.md graft tests global-exclude __pycache__ global-exclude *.py[cod] pydantic-1.10.14/Makefile000066400000000000000000000052021455251250200151310ustar00rootroot00000000000000.DEFAULT_GOAL := all sources = pydantic tests docs/build isort = isort $(sources) black = black -S -l 120 --target-version py38 $(sources) .PHONY: install-linting install-linting: pip install -r tests/requirements-linting.txt pre-commit install .PHONY: install-pydantic install-pydantic: python -m pip install -U wheel pip pip install -r requirements.txt SKIP_CYTHON=1 pip install -e . .PHONY: install-testing install-testing: install-pydantic pip install -r tests/requirements-testing.txt .PHONY: install-docs install-docs: install-pydantic pip install -U -r docs/requirements.txt .PHONY: install install: install-testing install-linting install-docs @echo 'installed development requirements' .PHONY: build-trace build-trace: python setup.py build_ext --force --inplace --define CYTHON_TRACE .PHONY: build build: python setup.py build_ext --inplace .PHONY: format format: pyupgrade --py37-plus --exit-zero-even-if-changed `find $(sources) -name "*.py" -type f` $(isort) $(black) .PHONY: lint lint: flake8 $(sources) $(isort) --check-only --df $(black) --check --diff .PHONY: check-dist check-dist: python setup.py check -ms SKIP_CYTHON=1 python setup.py sdist twine check dist/* .PHONY: mypy mypy: mypy pydantic docs/build .PHONY: pyupgrade pyupgrade: pyupgrade --py37-plus `find pydantic tests -name "*.py" -type f` .PHONY: pyright pyright: cd tests/pyright && pyright .PHONY: test test: pytest --cov=pydantic .PHONY: testcov testcov: test @echo "building coverage html" @coverage html .PHONY: testcov-compile testcov-compile: build-trace test @echo "building coverage html" @coverage html .PHONY: test-examples test-examples: @echo "running examples" @find docs/examples -type f -name '*.py' | xargs -I'{}' sh -c 'python {} >/dev/null 2>&1 || (echo "{} failed")' .PHONY: test-fastapi test-fastapi: git clone https://github.com/tiangolo/fastapi.git --single-branch ./tests/test_fastapi.sh .PHONY: all all: lint mypy testcov .PHONY: clean clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]'` rm -f `find . -type f -name '*~'` rm -f `find . -type f -name '.*~'` rm -rf .cache rm -rf .pytest_cache rm -rf .mypy_cache rm -rf htmlcov rm -rf *.egg-info rm -f .coverage rm -f .coverage.* rm -rf build rm -rf dist rm -f pydantic/*.c pydantic/*.so python setup.py clean rm -rf site rm -rf docs/_build rm -rf docs/.changelog.md docs/.version.md docs/.tmp_schema_mappings.html rm -rf fastapi/test.db rm -rf coverage.xml .PHONY: docs docs: flake8 --max-line-length=80 docs/examples/ python docs/build/main.py mkdocs build .PHONY: docs-serve docs-serve: python docs/build/main.py mkdocs serve pydantic-1.10.14/README.md000066400000000000000000000047021455251250200147540ustar00rootroot00000000000000# pydantic [![CI](https://github.com/pydantic/pydantic/workflows/CI/badge.svg?event=push)](https://github.com/pydantic/pydantic/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) [![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic.svg?branch=1.10.X-fixes)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic?branch=1.10.X-fixes) [![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) [![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) [![downloads](https://pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) [![versions](https://img.shields.io/pypi/pyversions/pydantic.svg)](https://github.com/pydantic/pydantic) [![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) Data validation and settings management using Python type hints. Fast and extensible, *pydantic* plays nicely with your linters/IDE/brain. Define how data should be in pure, canonical Python 3.7+; validate it with *pydantic*. ## Pydantic Company :rocket: We've started a company based on the principles that I believe have led to Pydantic's success. Learning more from the [Company Announcement](https://pydantic.dev/announcement/). ## Help See [documentation](https://docs.pydantic.dev/) for more details. ## Installation Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`. For more installation options to make *pydantic* even faster, see the [Install](https://docs.pydantic.dev/install/) section in the documentation. ## A Simple Example ```py from datetime import datetime from typing import List, Optional from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: Optional[datetime] = None friends: List[int] = [] external_data = {'id': '123', 'signup_ts': '2017-06-01 12:22', 'friends': [1, '2', b'3']} user = User(**external_data) print(user) #> User id=123 name='John Doe' signup_ts=datetime.datetime(2017, 6, 1, 12, 22) friends=[1, 2, 3] print(user.id) #> 123 ``` ## Contributing For guidance on setting up a development environment and how to make a contribution to *pydantic*, see [Contributing to Pydantic](https://docs.pydantic.dev/contributing/). ## Reporting a Security Vulnerability See our [security policy](https://github.com/pydantic/pydantic/security/policy). pydantic-1.10.14/build-docs.sh000077500000000000000000000004371455251250200160620ustar00rootroot00000000000000#!/usr/bin/env bash # This script is used to build the documentation on CloudFlare Pages, this is just used for build previews # A different script with the same name exists on the `docs-site` branch (where pre-built docs live). set -e set -x python3 -V make install-docs make docs pydantic-1.10.14/changes/000077500000000000000000000000001455251250200151025ustar00rootroot00000000000000pydantic-1.10.14/changes/README.md000066400000000000000000000005771455251250200163720ustar00rootroot00000000000000# Pending Changes This directory contains files describing changes to pydantic since the last release. If you're creating a pull request, please add a new file to this directory called `-.md`. It should be formatted as a single paragraph of markdown The contents of this file will be used to update `HISTORY.md` before the next release. pydantic-1.10.14/changes/make_history.py000077500000000000000000000031331455251250200201550ustar00rootroot00000000000000#!/usr/bin/env python3 import re import sys from datetime import date from importlib.machinery import SourceFileLoader from pathlib import Path THIS_DIR = Path(__file__).parent name_regex = re.compile(r'(\d+)-(.*?)\.md') bullet_list = [] for p in THIS_DIR.glob('*.md'): if p.name == 'README.md': continue m = name_regex.fullmatch(p.name) if not m: raise RuntimeError(f'{p.name!r}: invalid change file name') gh_id, creator = m.groups() content = p.read_text().replace('\r\n', '\n').strip('\n. ') if '\n\n' in content: raise RuntimeError(f'{p.name!r}: content includes multiple paragraphs') content = content.replace('\n', '\n ') priority = 0 if '**breaking change' in content.lower(): priority = 2 elif content.startswith('**'): priority = 1 bullet_list.append((priority, int(gh_id), f'* {content}, #{gh_id} by @{creator}')) if not bullet_list: print('no changes found') sys.exit(0) version = SourceFileLoader('version', 'pydantic/version.py').load_module() chunk_title = f'v{version.VERSION} ({date.today():%Y-%m-%d})' new_chunk = '## {}\n\n{}\n\n'.format(chunk_title, '\n'.join(c for *_, c in sorted(bullet_list, reverse=True))) print(f'{chunk_title}...{len(bullet_list)} items') history_path = THIS_DIR / '..' / 'HISTORY.md' history = new_chunk + history_path.read_text() history_path.write_text(history) for p in THIS_DIR.glob('*.md'): if p.name != 'README.md': p.unlink() print( 'changes deleted and HISTORY.md successfully updated, to reset use:\n\n' ' git checkout -- changes/*-*.md HISTORY.md\n' ) pydantic-1.10.14/docs/000077500000000000000000000000001455251250200144225ustar00rootroot00000000000000pydantic-1.10.14/docs/.benchmarks_table.md000066400000000000000000000011551455251250200203100ustar00rootroot00000000000000[//]: <> (Generated with benchmarks/run.py, DO NOT EDIT THIS FILE DIRECTLY, instead run `SAVE=1 python ./run.py`.) Package | Version | Relative Performance | Mean validation time --- | --- | --- | --- pydantic | `1.7.3` | | 93.7μs attrs + cattrs | `20.3.0` | 1.5x slower | 143.6μs valideer | `0.4.2` | 1.9x slower | 175.9μs marshmallow | `3.10.0` | 2.4x slower | 227.6μs voluptuous | `0.12.1` | 2.7x slower | 257.5μs trafaret | `2.1.0` | 3.2x slower | 296.7μs schematics | `2.1.0` | 10.2x slower | 955.5μs django-rest-framework | `3.12.2` | 12.3x slower | 1148.4μs cerberus | `1.3.2` | 25.9x slower | 2427.6μs pydantic-1.10.14/docs/blog/000077500000000000000000000000001455251250200153455ustar00rootroot00000000000000pydantic-1.10.14/docs/blog/pydantic-v2-alpha.md000066400000000000000000000267331455251250200211250ustar00rootroot00000000000000# Pydantic V2 Pre Release --- We're excited to announce the first alpha release of Pydantic V2! This first Pydantic V2 alpha is no April Fool's joke — for a start we missed our April 1st target date :cry:. After a year's work, we invite you to explore the improvements we've made and give us your feedback. We look forward to hearing your thoughts and working together to improve the library. For many of you, Pydantic is already a key part of your Python toolkit and needs no introduction — we hope you'll find the improvements and additions in Pydantic V2 useful. If you're new to Pydantic: Pydantic is an open-source Python library that provides powerful data parsing and validation — including type coercion and useful error messages when typing issues arise — and settings management capabilities. See [the docs](/) for examples of Pydantic at work. ## Getting started with the Pydantic V2 alpha Your feedback will be a critical part of ensuring that we have made the right tradeoffs with the API changes in V2. To get started with the Pydantic V2 alpha, install it from PyPI. We recommend using a virtual environment to isolate your testing environment: ```bash pip install --pre -U "pydantic>=2.0a1" ``` Note that there are still some rough edges and incomplete features, and while trying out the Pydantic V2 alpha releases you may experience errors. We encourage you to try out the alpha releases in a test environment and not in production. Some features are still in development, and we will continue to make changes to the API. If you do encounter any issues, please [create an issue in GitHub](https://github.com/pydantic/pydantic/issues) using the `bug V2` label. This will help us to actively monitor and track errors, and to continue to improve the library’s performance. This will be the first of several upcoming alpha releases. As you evaluate our changes and enhancements, we encourage you to share your feedback with us. Please let us know: * If you don't like the changes, so we can make sure Pydantic remains a library you enjoy using. * If this breaks your usage of Pydantic so we can fix it, or at least describe a migration path. Thank you for your support, and we look forward to your feedback. --- ## Headlines Here are some of the most interesting new features in the current Pydantic V2 alpha release. For background on plans behind these features, see the earlier [Pydantic V2 Plan](/blog/pydantic-v2/) blog post. The biggest change to Pydantic V2 is [`pydantic-core`](https://github.com/pydantic/pydantic-core) — all validation logic has been rewritten in Rust and moved to a separate package, `pydantic-core`. This has a number of big advantages: * **Performance** - Pydantic V2 is 5-50x faster than Pydantic V1. * **Safety & maintainability** - We've made changes to the architecture that we think will help us maintain Pydantic V2 with far fewer bugs in the long term. With the use of `pydantic-core`, the majority of the logic in the Pydantic library is dedicated to generating "pydantic core schema" — the schema used define the behaviour of the new, high-performance `pydantic-core` validators and serializers. ### Ready for experimentation * **BaseModel** - the core of validation in Pydantic V1 remains, albeit with new method names. * **Dataclasses** - Pydantic dataclasses are improved and ready to test. * **Serialization** - dumping/serialization/marshalling is significantly more flexible, and ready to test. * **Strict mode** - one of the biggest additions in Pydantic V2 is strict mode, which is ready to test. * **JSON Schema** - generation of JSON Schema is much improved and ready to test. * **Generic Models** - are much improved and ready to test. * **Recursive Models** - and validation of recursive data structures is much improved and ready to test. * **Custom Types** - custom types have a new interface and are ready to test. * **Custom Field Modifiers** - used via `Annotated[]` are working and in use in Pydantic itself. * **Validation without a BaseModel** - the new `AnalyzedType` class allows validation without the need for a `BaseModel` class, and it's ready to test. * **TypedDict** - we now have full support for `TypedDict` via `AnalyzedType`, it's ready to test. ### Still under construction * **Documentation** - we're working hard on full documentation for V2, but it's not ready yet. * **Conversion Table** - a big addition to the documentation will be a conversion table showing how types are coerced, this is a WIP. * **BaseSettings** - `BaseSettings` will move to a separate `pydantic-settings` package, it's not yet ready to test. **Notice:** since `pydantic-settings` is not yet ready to release, there's no support for `BaseSettings` in the first alpha release. * **validate_arguments** - the `validate_arguments` decorator remains and is working, but hasn't been updated yet. * **Hypothesis Plugin** - the Hypothesis plugin is yet to be updated. * **computed fields** - we know a lot of people are waiting for this, we will include it in Pydantic V2. * **Error messages** - could use some love, and links to docs in error messages are still to be added. * **Migration Guide** - we have some pointers below, but this needs completing. ## Migration Guide **Please note:** this is just the beginning of a migration guide. We'll work hard up to the final release to prepare a full migration guide, but for now the following pointers should be some help while experimenting with V2. ### Changes to BaseModel * Various method names have been changed; `BaseModel` methods all start with `model_` now. Where possible, we have retained the old method names to help ease migration, but calling them will result in `DeprecationWarning`s. * Some of the built-in data loading functionality has been slated for removal. In particular, `parse_raw` and `parse_file` are now deprecated. You should load the data and then pass it to `model_validate`. * The `from_orm` method has been removed; you can now just use `model_validate` (equivalent to `parse_obj` from Pydantic V1) to achieve something similar, as long as you've set `from_attributes=True` in the model config. * The `__eq__` method has changed for models; models are no longer considered equal to the dicts. * Custom `__init__` overrides won't be called. This should be replaced with a `@root_validator`. * Due to inconsistency with the rest of the library, we have removed the special behavior of models using the `__root__` field, and have disallowed the use of an attribute with this name to prevent confusion. However, you can achieve equivalent behavior with a "standard" field name through the use of `@root_validator`, `@model_serializer`, and `__pydantic_modify_json_schema__`. You can see an example of this [here](https://github.com/pydantic/pydantic/blob/2b9459f20d094a46fa3093b43c34444240f03646/tests/test_parse.py#L95-L113). ### Changes to Pydantic Dataclasses * The `__post_init__` in Pydantic dataclasses will now be called after validation, rather than before. * We no longer support `extra='allow'` for Pydantic dataclasses, where extra attributes passed to the initializer would be stored as extra fields on the dataclass. `extra='ignore'` is still supported for the purposes of allowing extra fields while parsing data; they just aren't stored. * `__post_init_post_parse__` has been removed. * Nested dataclasses no longer accept tuples as input, only dict. ### Changes to Config * To specify config on a model, it is now deprecated to create a class called `Config` in the namespace of the parent `BaseModel` subclass. Instead, you just need to set a class attribute called `model_config` to be a dict with the key/value pairs you want to be used as the config. The following config settings have been removed: * `allow_mutation`. * `error_msg_templates`. * `fields` — this was the source of various bugs, so has been removed. You should be able to use `Annotated` on fields to modify them as desired. * `getter_dict` — `orm_mode` has been removed, and this implementation detail is no longer necessary. * `schema_extra` — you should now use the `json_schema_extra` keyword argument to `pydantic.Field`. * `smart_union`. * `underscore_attrs_are_private` — the Pydantic V2 behavior is now the same as if this was always set to `True` in Pydantic V1. The following config settings have been renamed: * `allow_population_by_field_name` → `populate_by_name` * `anystr_lower` → `str_to_lower` * `anystr_strip_whitespace` → `str_strip_whitespace` * `anystr_upper` → `str_to_upper` * `keep_untouched` → `ignored_types` * `max_anystr_length` → `str_max_length` * `min_anystr_length` → `str_min_length` * `orm_mode` → `from_attributes` * `validate_all` → `validate_default` ### Changes to Validators * Raising a `TypeError` inside a validator no longer produces a `ValidationError`, but just raises the `TypeError` directly. This was necessary to prevent certain common bugs (such as calling functions with invalid signatures) from being unintentionally converted into `ValidationError` and displayed to users. If you really want `TypeError` to be converted to a `ValidationError` you should use a `try: except:` block that will catch it and do the conversion. * `each_item` validators are deprecated and should be replaced with a type annotation using `Annotated` to apply a validator or with a validator that operates on all items at the top level. * Changes to `@validator`-decorated function signatures. * The `stricturl` type has been removed. * Root validators can no longer be run with `skip_on_failure=False`. ### Changes to Validation of specific types * Integers outside the valid range of 64 bit integers will cause `ValidationError`s during parsing. To work around this, use an `IsInstance` validator (more details to come). * Subclasses of built-ins won't validate into their subclass types; you'll need to use an `IsInstance` validator to validate these types. ### Changes to Generic models * While it does not raise an error at runtime yet, subclass checks for parametrized generics should no longer be used. These will result in `TypeError`s and we can't promise they will work forever. However, it will be okay to do subclass checks against _non-parametrized_ generic models ### Other changes * `GetterDict` has been removed, as it was just an implementation detail for `orm_mode`, which has been removed. ### AnalyzedType Pydantic V1 didn't have good support for validation or serializing non-`BaseModel`. To work with them you had to create a "root" model or use the utility functions in `pydantic.tools` (`parse_obj_as` and `schema_of`). In Pydantic V2 this is _a lot_ easier: the `AnalyzedType` class lets you build an object that behaves almost like a `BaseModel` class which you can use for a lot of the use cases of root models and as a complete replacement for `parse_obj_as` and `schema_of`. ```python from typing import List from pydantic import AnalyzedType validator = AnalyzedType(List[int]) assert validator.validate_python(['1', '2', '3']) == [1, 2, 3] print(validator.json_schema()) # {'type': 'array', 'items': {'type': 'integer'}} ``` Note that this API is provisional and may change before the final release of Pydantic V2. pydantic-1.10.14/docs/blog/pydantic-v2.md000066400000000000000000001457751455251250200200520ustar00rootroot00000000000000# Pydantic V2 Plan --- Updated late 10 Jul 2022, see [pydantic#4226](https://github.com/pydantic/pydantic/pull/4226). Update 30 Dec 2022: **The new release deadline for Pydantic V2 is the end of Q1 2023**, see [pydantic#4887](https://github.com/pydantic/pydantic/issues/4887) for more details, further updates will be posted on that issue. --- I've spoken to quite a few people about pydantic V2, and mention it in passing even more. I owe people a proper explanation of the plan for V2: * What we will add * What we will remove * What we will change * How I'm intending to go about completing it and getting it released * Some idea of timeframe :fearful: Here goes... --- Enormous thanks to [Eric Jolibois](https://github.com/PrettyWood), [Laurence Watson](https://github.com/Rabscuttler), [Sebastián Ramírez](https://github.com/tiangolo), [Adrian Garcia Badaracco](https://github.com/adriangb), [Tom Hamilton Stubber](https://github.com/tomhamiltonstubber), [Zac Hatfield-Dodds](https://github.com/Zac-HD), [Tom](https://github.com/czotomo) & [Hasan Ramezani](https://github.com/hramezani) for reviewing this blog post, putting up with (and correcting) my horrible typos and making great suggestions that have made this post and Pydantic V2 materially better. --- ## Plan & Timeframe I'm currently taking a kind of sabbatical after leaving my last job to get pydantic V2 released. Why? I ask myself that question quite often. I'm very proud of how much pydantic is used, but I'm less proud of its internals. Since it's something people seem to care about and use quite a lot (26m downloads a month, used by 72k public repos, 10k stars). I want it to be as good as possible. While I'm on the subject of why, how and my odd sabbatical: if you work for a large company who use pydantic a lot, you might encourage the company to **sponsor me a meaningful amount**, like [Salesforce did](https://twitter.com/samuel_colvin/status/1501288247670063104) (if your organisation is not open to donations, I can also offer consulting services). This is not charity, recruitment or marketing - the argument should be about how much the company will save if pydantic is 10x faster, more stable and more powerful - it would be worth paying me 10% of that to make it happen. Before pydantic V2 can be released, we need to release pydantic V1.10 - there are lots of changes in the main branch of pydantic contributed by the community, it's only fair to provide a release including those changes, many of them will remain unchanged for V2, the rest will act as a requirement to make sure pydantic V2 includes the capabilities they implemented. The basic road map for me is as follows: 1. Implement a few more features in pydantic-core, and release a first version, see [below](#motivation-pydantic-core) 2. Work on getting pydantic V1.10 out - basically merge all open PRs that are finished 3. Release pydantic V1.10 4. Delete all stale PRs which didn't make it into V1.10, apologise profusely to their authors who put their valuable time into pydantic only to have their PRs closed :pray: (and explain when and how they can rebase and recreate the PR) 5. Rename `master` to `main`, seems like a good time to do this 6. Change the main branch of pydantic to target V2 7. Start tearing pydantic code apart and see how many existing tests can be made to pass 8. Rinse, repeat 9. Release pydantic V2 :tada: Plan is to have all this done by the end of October, definitely by the end of the year. ### Breaking Changes & Compatibility :pray: While we'll do our best to avoid breaking changes, some things will break. As per the [greatest pun in modern TV history](https://youtu.be/ezAlySFluEk). > You can't make a Tomelette without breaking some Greggs. Where possible, if breaking changes are unavoidable, we'll try to provide warnings or errors to make sure those changes are obvious to developers. ## Motivation & `pydantic-core` Since pydantic's initial release, with the help of wonderful contributors [Eric Jolibois](https://github.com/PrettyWood), [Sebastián Ramírez](https://github.com/tiangolo), [David Montague](https://github.com/dmontagu) and many others, the package and its usage have grown enormously. The core logic however has remained mostly unchanged since the initial experiment. It's old, it smells, it needs to be rebuilt. The release of version 2 is an opportunity to rebuild pydantic and correct many things that don't make sense - **to make pydantic amazing :rocket:**. The core validation logic of pydantic V2 will be performed by a separate package [pydantic-core](https://github.com/pydantic/pydantic-core) which I've been building over the last few months. *pydantic-core* is written in Rust using the excellent [pyo3](https://pyo3.rs) library which provides rust bindings for python. The motivation for building pydantic-core in Rust is as follows: 1. **Performance**, see [below](#performance) 2. **Recursion and code separation** - with no stack and little-to-no overhead for extra function calls, Rust allows pydantic-core to be implemented as a tree of small validators which call each other, making code easier to understand and extend without harming performance 4. **Safety and complexity** - pydantic-core is a fairly complex piece of code which has to draw distinctions between many different errors, Rust is great in situations like this, it should minimise bugs (:fingers_crossed:) and allow the codebase to be extended for a long time to come !!! note The python interface to pydantic shouldn't change as a result of using pydantic-core, instead pydantic will use type annotations to build a schema for pydantic-core to use. pydantic-core is usable now, albeit with an unintuitive API, if you're interested, please give it a try. pydantic-core provides validators for common data types, [see a list here](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/schema_types.py#L314). Other, less commonly used data types will be supported via validator functions implemented in pydantic, in Python. See [pydantic-core#153](https://github.com/pydantic/pydantic-core/issues/153) for a summary of what needs to be completed before its first release. ## Headlines Here are some of the biggest changes expected in V2. ### Performance :thumbsup: As a result of the move to Rust for the validation logic (and significant improvements in how validation objects are structured) pydantic V2 will be significantly faster than pydantic V1. Looking at the pydantic-core [benchmarks](https://github.com/pydantic/pydantic-core/tree/main/tests/benchmarks) today, pydantic V2 is between 4x and 50x faster than pydantic V1.9.1. In general, pydantic V2 is about 17x faster than V1 when validating a model containing a range of common fields. ### Strict Mode :thumbsup: People have long complained about pydantic for coercing data instead of throwing an error. E.g. input to an `int` field could be `123` or the string `"123"` which would be converted to `123` While this is very useful in many scenarios (think: URL parameters, environment variables, user input), there are some situations where it's not desirable. pydantic-core comes with "strict mode" built in. With this, only the exact data type is allowed, e.g. passing `"123"` to an `int` field would result in a validation error. This will allow pydantic V2 to offer a `strict` switch which can be set on either a model or a field. ### Formalised Conversion Table :thumbsup: As well as complaints about coercion, another legitimate complaint was inconsistency around data conversion. In pydantic V2, the following principle will govern when data should be converted in "lax mode" (`strict=False`): > If the input data has a SINGLE and INTUITIVE representation, in the field's type, AND no data is lost > during the conversion, then the data will be converted; otherwise a validation error is raised. > There is one exception to this rule: string fields - > virtually all data has an intuitive representation as a string (e.g. `repr()` and `str()`), therefore > a custom rule is required: only `str`, `bytes` and `bytearray` are valid as inputs to string fields. Some examples of what that means in practice: | Field Type | Input | Single & Intuitive R. | All Data Preserved | Result | |------------|-------------------------|-----------------------|--------------------|---------| | `int` | `"123"` | :material-check: | :material-check: | Convert | | `int` | `123.0` | :material-check: | :material-check: | Convert | | `int` | `123.1` | :material-check: | :material-close: | Error | | `date` | `"2020-01-01"` | :material-check: | :material-check: | Convert | | `date` | `"2020-01-01T00:00:00"` | :material-check: | :material-check: | Convert | | `date` | `"2020-01-01T12:00:00"` | :material-check: | :material-close: | Error | | `int` | `b"1"` | :material-close: | :material-check: | Error | (For the last case converting `bytes` to an `int` could reasonably mean `int(bytes_data.decode())` or `int.from_bytes(b'1', 'big/little')`, hence an error) In addition to the general rule, we'll provide a conversion table which defines exactly what data will be allowed to which field types. See [the table below](#conversion-table) for a start on this. ### Built in JSON support :thumbsup: pydantic-core can parse JSON directly into a model or output type, this both improves performance and avoids issue with strictness - e.g. if you have a strict model with a `datetime` field, the input must be a `datetime` object, but clearly that makes no sense when parsing JSON which has no `datatime` type. Same with `bytes` and many other types. Pydantic V2 will therefore allow some conversion when validating JSON directly, even in strict mode (e.g. `ISO8601 string -> datetime`, `str -> bytes`) even though this would not be allowed when validating a python object. In future direct validation of JSON will also allow: * parsing in a separate thread while starting validation in the main thread * line numbers from JSON to be included in the validation errors (These features will not be included in V2, but instead will hopefully be added later.) !!! note Pydantic has always had special support for JSON, that is not going to change. While in theory other formats could be specifically supported, the overheads and development time are significant and I don't think there's another format that's used widely enough to be worth specific logic. Other formats can be parsed to python then validated, similarly when serialising, data can be exported to a python object, then serialised, see [below](#improvements-to-dumpingserializationexport). ### Validation without a Model :thumbsup: In pydantic V1 the core of all validation was a pydantic model, this led to a significant performance penalty and extra complexity when the output data type was not a model. pydantic-core operates on a tree of validators with no "model" type required at the base of that tree. It can therefore validate a single `string` or `datetime` value, a `TypedDict` or a `Model` equally easily. This feature will provide significant addition performance improvements in scenarios like: * Adding validation to `dataclasses` * Validating URL arguments, query strings, headers, etc. in FastAPI * Adding validation to `TypedDict` * Function argument validation * Adding validation to your custom classes, decorators... In effect - anywhere where you don't care about a traditional model class instance. We'll need to add standalone methods for generating JSON Schema and dumping these objects to JSON, etc. ### Required vs. Nullable Cleanup :thumbsup: Pydantic previously had a somewhat confused idea about "required" vs. "nullable". This mostly resulted from my misgivings about marking a field as `Optional[int]` but requiring a value to be provided but allowing it to be `None` - I didn't like using the word "optional" in relation to a field which was not optional. In pydantic V2, pydantic will move to match dataclasses, thus: ```py title="Required vs. Nullable" from pydantic import BaseModel class Foo(BaseModel): f1: str # required, cannot be None f2: str | None # required, can be None - same as Optional[str] / Union[str, None] f3: str | None = None # not required, can be None f4: str = 'Foobar' # not required, but cannot be None ``` ### Validator Function Improvements :thumbsup: :thumbsup: :thumbsup: This is one of the changes in pydantic V2 that I'm most excited about, I've been talking about something like this for a long time, see [pydantic#1984](https://github.com/pydantic/pydantic/issues/1984), but couldn't find a way to do this until now. Fields which use a function for validation can be any of the following types: * **function before mode** - where the function is called before the inner validator is called * **function after mode** - where the function is called after the inner validator is called * **plain mode** - where there's no inner validator * **wrap mode** - where the function takes a reference to a function which calls the inner validator, and can therefore modify the input before inner validation, modify the output after inner validation, conditionally not call the inner validator or catch errors from the inner validator and return a default value, or change the error An example how a wrap validator might look: ```py title="Wrap mode validator function" from datetime import datetime from pydantic import BaseModel, ValidationError, validator class MyModel(BaseModel): timestamp: datetime @validator('timestamp', mode='wrap') def validate_timestamp(cls, v, handler): if v == 'now': # we don't want to bother with further validation, # just return the new value return datetime.now() try: return handler(v) except ValidationError: # validation failed, in this case we want to # return a default value return datetime(2000, 1, 1) ``` As well as being powerful, this provides a great "escape hatch" when pydantic validation doesn't do what you need. ### More powerful alias(es) :thumbsup: pydantic-core can support alias "paths" as well as simple string aliases to flatten data as it's validated. Best demonstrated with an example: ```py title="Alias paths" from pydantic import BaseModel, Field class Foo(BaseModel): bar: str = Field(aliases=[['baz', 2, 'qux']]) data = { 'baz': [ {'qux': 'a'}, {'qux': 'b'}, {'qux': 'c'}, {'qux': 'd'}, ] } foo = Foo(**data) assert foo.bar == 'c' ``` `aliases` is a list of lists because multiple paths can be provided, if so they're tried in turn until a value is found. Tagged unions will use the same logic as `aliases` meaning nested attributes can be used to select a schema to validate against. ### Improvements to Dumping/Serialization/Export :thumbsup: :confused: (I haven't worked on this yet, so these ideas are only provisional) There has long been a debate about how to handle converting data when extracting it from a model. One of the features people have long requested is the ability to convert data to JSON compliant types while converting a model to a dict. My plan is to move data export into pydantic-core, with that, one implementation can support all export modes without compromising (and hopefully significantly improving) performance. I see four different export/serialisation scenarios: 1. Extracting the field values of a model with no conversion, effectively `model.__dict__` but with the current filtering logic provided by `.dict()` 2. Extracting the field values of a model recursively (effectively what `.dict()` does now) - sub-models are converted to dicts, but other fields remain unchanged. 3. Extracting data and converting at the same time (e.g. to JSON compliant types) 4. Serialising data straight to JSON I think all 4 modes can be supported in a single implementation, with a kind of "3.5" mode where a python function is used to convert the data as the user wishes. The current `include` and `exclude` logic is extremely complicated, but hopefully it won't be too hard to translate it to Rust. We should also add support for `validate_alias` and `dump_alias` as well as the standard `alias` to allow for customising field keys. ### Validation Context :thumbsup: Pydantic V2 will add a new optional `context` argument to `model_validate` and `model_validate_json` which will allow you to pass information not available when creating a model to validators. See [pydantic#1549](https://github.com/pydantic/pydantic/issues/1549) for motivation. Here's an example of `context` might be used: ```py title="Context during Validation" from pydantic import BaseModel, EmailStr, validator class User(BaseModel): email: EmailStr home_country: str @validator('home_country') def check_home_country(cls, v, context): if v not in context['countries']: raise ValueError('invalid country choice') return v async def add_user(post_data: bytes): countries = set(await db_connection.fetch_all('select code from country')) user = User.model_validate_json(post_data, context={'countries': countries}) ... ``` !!! note We (actually mostly Sebastián :wink:) will have to make some changes to FastAPI to fully leverage `context` as we'd need some kind of dependency injection to build context before validation so models can still be passed as arguments to views. I'm sure he'll be game. !!! warning Although this will make it slightly easier to run synchronous IO (HTTP requests, DB. queries, etc.) from within validators, I strongly advise you keep IO separate from validation - do it before and use context, do it afterwards, avoid where possible making queries inside validation. ### Model Namespace Cleanup :thumbsup: For years I've wanted to clean up the model namespace, see [pydantic#1001](https://github.com/pydantic/pydantic/issues/1001). This would avoid confusing gotchas when field names clash with methods on a model, it would also make it safer to add more methods to a model without risking new clashes. After much deliberation (and even giving a lightning talk at the python language submit about alternatives, see [this discussion](https://discuss.python.org/t/better-fields-access-and-allowing-a-new-character-at-the-start-of-identifiers/14529)). I've decided to go with the simplest and clearest approach, at the expense of a bit more typing: All methods on models will start with `model_`, fields' names will not be allowed to start with `"model"` (aliases can be used if required). This will mean `BaseModel` will have roughly the following signature. ```{.py .annotate title="New BaseModel methods"} class BaseModel: model_fields: List[FieldInfo] """previously `__fields__`, although the format will change a lot""" @classmethod def model_validate(cls, data: Any, *, context=None) -> Self: # (1) """ previously `parse_obj()`, validate data """ @classmethod def model_validate_json( cls, data: str | bytes | bytearray, *, context=None ) -> Self: """ previously `parse_raw(..., content_type='application/json')` validate data from JSON """ @classmethod def model_is_instance(cls, data: Any, *, context=None) -> bool: # (2) """ new, check if data is value for the model """ @classmethod def model_is_instance_json( cls, data: str | bytes | bytearray, *, context=None ) -> bool: """ Same as `model_is_instance`, but from JSON """ def model_dump( self, include: ... = None, exclude: ... = None, by_alias: bool = False, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, mode: Literal['unchanged', 'dicts', 'json-compliant'] = 'unchanged', converter: Callable[[Any], Any] | None = None ) -> Any: """ previously `dict()`, as before with new `mode` argument """ def model_dump_json(self, ...) -> str: """ previously `json()`, arguments as above effectively equivalent to `json.dump(self.model_dump(..., mode='json'))`, but more performant """ def model_json_schema(self, ...) -> dict[str, Any]: """ previously `schema()`, arguments roughly as before JSON schema as a dict """ def model_update_forward_refs(self) -> None: """ previously `update_forward_refs()`, update forward references """ @classmethod def model_construct( self, _fields_set: set[str] | None = None, **values: Any ) -> Self: """ previously `construct()`, arguments roughly as before construct a model with no validation """ @classmethod def model_customize_schema(cls, schema: dict[str, Any]) -> dict[str, Any]: """ new, way to customize validation, e.g. if you wanted to alter how the model validates certain types, or add validation for a specific type without custom types or decorated validators """ class ModelConfig: """ previously `Config`, configuration class for models """ ``` 1. see [Validation Context](#validation-context) for more information on `context` 2. see [`is_instance` checks](#is_instance-like-checks) The following methods will be removed: * `.parse_file()` - was a mistake, should never have been in pydantic * `.parse_raw()` - partially replaced by `.model_validate_json()`, the other functionality was a mistake * `.from_orm()` - the functionality has been moved to config, see [other improvements](#other-improvements) below * `.schema_json()` - mostly since it causes confusion between pydantic validation schema and JSON schema, and can be replaced with just `json.dumps(m.model_json_schema())` * `.copy()` instead we'll implement `__copy__` and let people use the `copy` module (this removes some functionality) from `copy()` but there are bugs and ambiguities with the functionality anyway ### Strict API & API documentation :thumbsup: When preparing for pydantic V2, we'll make a strict distinction between the public API and private functions & classes. Private objects will be clearly identified as private via a `_internal` sub package to discourage use. The public API will have API documentation. I've recently been working with the wonderful [mkdocstrings](https://github.com/mkdocstrings/mkdocstrings) package for both [dirty-equals](https://dirty-equals.helpmanual.io/) and [watchfiles](https://watchfiles.helpmanual.io/) documentation. I intend to use `mkdocstrings` to generate complete API documentation for V2. This wouldn't replace the current example-based somewhat informal documentation style but instead will augment it. ### Error descriptions :thumbsup: The way line errors (the individual errors within a `ValidationError`) are built has become much more sophisticated in pydantic-core. There's a well-defined [set of error codes and messages](https://github.com/pydantic/pydantic-core/blob/main/src/errors/kinds.rs). More will be added when other types are validated via pure python validators in pydantic. I would like to add a dedicated section to the documentation with extra information for each type of error. This would be another key in a line error: `documentation`, which would link to the appropriate section in the docs. Thus, errors might look like: ```py title="Line Errors Example" [ { 'kind': 'greater_than_equal', 'loc': ['age'], 'message': 'Value must be greater than or equal to 18', 'input_value': 11, 'context': {'ge': 18}, 'documentation': 'https://pydantic.dev/errors/#greater_than_equal', }, { 'kind': 'bool_parsing', 'loc': ['is_developer'], 'message': 'Value must be a valid boolean, unable to interpret input', 'input_value': 'foobar', 'documentation': 'https://pydantic.dev/errors/#bool_parsing', }, ] ``` I own the `pydantic.dev` domain and will use it for at least these errors so that even if the docs URL changes, the error will still link to the correct documentation. If developers don't want to show these errors to users, they can always process the errors list and filter out items from each error they don't need or want. ### No pure python implementation :frowning: Since pydantic-core is written in Rust, and I have absolutely no intention of rewriting it in python, pydantic V2 will only work where a binary package can be installed. pydantic-core will provide binaries in PyPI for (at least): * **Linux**: `x86_64`, `aarch64`, `i686`, `armv7l`, `musl-x86_64` & `musl-aarch64` * **MacOS**: `x86_64` & `arm64` (except python 3.7) * **Windows**: `amd64` & `win32` * **Web Assembly**: `wasm32` (pydantic-core is [already](https://github.com/pydantic/pydantic-core/runs/7214195252?check_suite_focus=true) compiled for wasm32 using emscripten and unit tests pass, except where cpython itself has [problems](https://github.com/pyodide/pyodide/issues/2841)) Binaries for pypy are a work in progress and will be added if possible, see [pydantic-core#154](https://github.com/pydantic/pydantic-core/issues/154). Other binaries can be added provided they can be (cross-)compiled on github actions. If no binary is available from PyPI, pydantic-core can be compiled from source if Rust stable is available. The only place where I know this will cause problems is Raspberry Pi, which is a [mess](https://github.com/piwheels/packages/issues/254) when it comes to packages written in Rust for Python. Effectively, until that's fixed you'll likely have to install pydantic with `pip install -i https://pypi.org/simple/ pydantic`. ### Pydantic becomes a pure python package :thumbsup: Pydantic V1.X is a pure python code base but is compiled with cython to provide some performance improvements. Since the "hot" code is moved to pydantic-core, pydantic itself can go back to being a pure python package. This should significantly reduce the size of the pydantic package and make unit tests of pydantic much faster. In addition: * some constraints on pydantic code can be removed once it no-longer has to be compilable with cython * debugging will be easier as you'll be able to drop straight into the pydantic codebase as you can with other, pure python packages Some pieces of edge logic could get a little slower as they're no longer compiled. ### `is_instance` like checks :thumbsup: Strict mode also means it makes sense to provide an `is_instance` method on models which effectively run validation then throws away the result while avoiding the (admittedly small) overhead of creating and raising an error or returning the validation result. To be clear, this isn't a real `isinstance` call, rather it is equivalent to ```py title="is_instance" class BaseModel: ... @classmethod def model_is_instance(cls, data: Any) -> bool: try: cls(**data) except ValidationError: return False else: return True ``` ### I'm dropping the word "parse" and just using "validate" :neutral_face: Partly due to the issues with the lack of strict mode, I've gone back and forth between using the terms "parse" and "validate" for what pydantic does. While pydantic is not simply a validation library (and I'm sure some would argue validation is not strictly what it does), most people use the word **"validation"**. It's time to stop fighting that, and use consistent names. The word "parse" will no longer be used except when talking about JSON parsing, see [model methods](#model-namespace-cleanup) above. ### Changes to custom field types :neutral_face: Since the core structure of validators has changed from "a list of validators to call one after another" to "a tree of validators which call each other", the [`__get_validators__`](https://docs.pydantic.dev/usage/types/#classes-with-__get_validators__) way of defining custom field types no longer makes sense. Instead, we'll look for the attribute `__pydantic_validation_schema__` which must be a pydantic-core compliant schema for validating data to this field type (the `function` item can be a string, if so a function of that name will be taken from the class, see `'validate'` below). Here's an example of how a custom field type could be defined: ```py title="New custom field types" from pydantic import ValidationSchema class Foobar: def __init__(self, value: str): self.value = value __pydantic_validation_schema__: ValidationSchema = { 'type': 'function', 'mode': 'after', 'function': 'validate', 'schema': {'type': 'str'} } @classmethod def validate(cls, value): if 'foobar' in value: return Foobar(value) else: raise ValueError('expected foobar') ``` What's going on here: `__pydantic_validation_schema__` defines a schema which effectively says: > Validate input data as a string, then call the `validate` function with that string, use the returned value > as the final result of validation. `ValidationSchema` is just an alias to [`pydantic_core.Schema`](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/_types.py#L291) which is a type defining the schema for validation schemas. !!! note pydantic-core schema has full type definitions although since the type is recursive, mypy can't provide static type analysis, pyright however can. We can probably provide one or more helper functions to make `__pydantic_validation_schema__` easier to generate. ## Other Improvements :thumbsup: Some other things which will also change, IMHO for the better: 1. Recursive models with cyclic references - although recursive models were supported by pydantic V1, data with cyclic references caused recursion errors, in pydantic-core cyclic references are correctly detected and a validation error is raised 2. The reason I've been so keen to get pydantic-core to compile and run with wasm is that I want all examples in the docs of pydantic V2 to be editable and runnable in the browser 3. Full support for `TypedDict`, including `total=False` - e.g. omitted keys, providing validation schema to a `TypedDict` field/item will use `Annotated`, e.g. `Annotated[str, Field(strict=True)]` 4. `from_orm` has become `from_attributes` and is now defined at schema generation time (either via model config or field config) 5. `input_value` has been added to each line error in a `ValidationError`, making errors easier to understand, and more comprehensive details of errors to be provided to end users, [pydantic#784](https://github.com/pydantic/pydantic/issues/784) 6. `on_error` logic in a schema which allows either a default value to be used in the event of an error, or that value to be omitted (in the case of a `total=False` `TypedDict`), [pydantic-core#151](https://github.com/pydantic/pydantic-core/issues/151) 7. `datetime`, `date`, `time` & `timedelta` validation is improved, see the [speedate] Rust library I built specifically for this purpose for more details 8. Powerful "priority" system for optionally merging or overriding config in sub-models for nested schemas 9. Pydantic will support [annotated-types](https://github.com/annotated-types/annotated-types), so you can do stuff like `Annotated[set[int], Len(0, 10)]` or `Name = Annotated[str, Len(1, 1024)]` 10. A single decorator for general usage - we should add a `validate` decorator which can be used: * on functions (replacing `validate_arguments`) * on dataclasses, `pydantic.dataclasses.dataclass` will become an alias of this * on `TypedDict`s * On any supported type, e.g. `Union[...]`, `Dict[str, Thing]` * On Custom field types - e.g. anything with a `__pydantic_schema__` attribute 11. Easier validation error creation, I've often found myself wanting to raise `ValidationError`s outside models, particularly in FastAPI ([here](https://github.com/samuelcolvin/foxglove/blob/a4aaacf372178f345e5ff1d569ee8fd9d10746a4/foxglove/exceptions.py#L137-L149) is one method I've used), we should provide utilities to generate these errors 12. Improve the performance of `__eq__` on models 13. Computed fields, these having been an idea for a long time in pydantic - we should get them right 14. Model validation that avoids instances of subclasses leaking data (particularly important for FastAPI), see [pydantic-core#155](https://github.com/pydantic/pydantic-core/issues/155) 15. We'll now follow [semvar](https://semver.org/) properly and avoid breaking changes between minor versions, as a result, major versions will become more common 16. Improve generics to use `M(Basemodel, Generic[T])` instead of `M(GenericModel, Generic[T])` - e.g. `GenericModel` can be removed; this results from no-longer needing to compile pydantic code with cython ## Removed Features & Limitations :frowning: The emoji here is just for variation, I'm not frowning about any of this, these changes are either good IMHO (will make pydantic cleaner, easier to learn and easier to maintain) or irrelevant to 99.9+% of users. 1. `__root__` custom root models are no longer necessary since validation on any supported data type is allowed without a model 2. `.parse_file()` and `.parse_raw()`, partially replaced with `.model_validate_json()`, see [model methods](#model-namespace-cleanup) 3. `.schema_json()` & `.copy()`, see [model methods](#model-namespace-cleanup) 4. `TypeError` are no longer considered as validation errors, but rather as internal errors, this is to better catch errors in argument names in function validators. 5. Subclasses of builtin types like `str`, `bytes` and `int` are coerced to their parent builtin type, this is a limitation of how pydantic-core converts these types to Rust types during validation, if you have a specific need to keep the type, you can use wrap validators or custom type validation as described above 6. integers are represented in rust code as `i64`, meaning if you want to use ints where `abs(v) > 2^63 − 1` (9,223,372,036,854,775,807), you'll need to use a [wrap validator](#validator-function-improvements) and your own logic 7. [Settings Management](https://docs.pydantic.dev/usage/settings/) ??? - I definitely don't want to remove the functionality, but it's something of a historical curiosity that it lives within pydantic, perhaps it should move to a separate package, perhaps installable alongside pydantic with `pip install pydantic[settings]`? 8. The following `Config` properties will be removed: * `fields` - it's very old (it pre-dates `Field`), can be removed * `allow_mutation` will be removed, instead `frozen` will be used * `error_msg_templates`, it's not properly documented anyway, error messages can be customized with external logic if required * `getter_dict` - pydantic-core has hardcoded `from_attributes` logic * `json_loads` - again this is hard coded in pydantic-core * `json_dumps` - possibly * `json_encoders` - see the export "mode" discussion [above](#improvements-to-dumpingserializationexport) * `underscore_attrs_are_private` we should just choose a sensible default * `smart_union` - all unions are now "smart" 9. `dict(model)` functionality should be removed, there's a much clearer distinction now that in 2017 when I implemented this between a model and a dict ## Features Remaining :neutral_face: The following features will remain (mostly) unchanged: * JSONSchema, internally this will need to change a lot, but hopefully the external interface will remain unchanged * `dataclass` support, again internals might change, but not the external interface * `validate_arguments`, might be renamed, but otherwise remain * hypothesis plugin, might be able to improve this as part of the general cleanup ## Questions :question: I hope the explanation above is useful. I'm sure people will have questions and feedback; I'm aware I've skipped over some features with limited detail (this post is already fairly long :sleeping:). To allow feedback without being overwhelmed, I've created a "Pydantic V2" category for [discussions on github](https://github.com/pydantic/pydantic/discussions/categories/pydantic-v2) - please feel free to create a discussion if you have any questions or suggestions. We will endeavour to read and respond to everyone. --- ## Implementation Details :nerd: (This is yet to be built, so these are nascent ideas which might change) At the center of pydantic v2 will be a `PydanticValidator` class which looks roughly like this (note: this is just pseudo-code, it's not even valid python and is only supposed to be used to demonstrate the idea): ```py title="PydanticValidator" # type identifying data which has been validated, # as per pydantic-core, this can include "fields_set" data ValidData = ... # any type we can perform validation for AnyOutputType = ... class PydanticValidator: def __init__(self, output_type: AnyOutputType, config: Config): ... def validate(self, input_data: Any) -> ValidData: ... def validate_json(self, input_data: str | bytes | bytearray) -> ValidData: ... def is_instance(self, input_data: Any) -> bool: ... def is_instance_json(self, input_data: str | bytes | bytearray) -> bool: ... def json_schema(self) -> dict: ... def dump( self, data: ValidData, include: ... = None, exclude: ... = None, by_alias: bool = False, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, mode: Literal['unchanged', 'dicts', 'json-compliant'] = 'unchanged', converter: Callable[[Any], Any] | None = None ) -> Any: ... def dump_json(self, ...) -> str: ... ``` This could be used directly, but more commonly will be used by the following: * `BaseModel` * the `validate` decorator described above * `pydantic.dataclasses.dataclass` (which might be an alias of `validate`) * generics The aim will be to get pydantic V2 to a place were the vast majority of tests continue to pass unchanged. Thereby guaranteeing (as much as possible) that the external interface to pydantic and its behaviour are unchanged. ## Conversion Table :material-table: The table below provisionally defines what input value types are allowed to which field types. An updated and complete version of this table will be included in the docs for V2. !!!note Some type conversion shown here is a significant departure from existing behavior, we may have to provide a config flag for backwards compatibility for a few of them, however pydantic V2 cannot be entirely backward compatible, see [pydantic-core#152](https://github.com/pydantic/pydantic-core/issues/152). | Field Type | Input | Mode | Input Source | Conditions | |---------------|-------------|--------|--------------|-----------------------------------------------------------------------------| | `str` | `str` | both | python, JSON | - | | `str` | `bytes` | lax | python | assumes UTF-8, error on unicode decoding error | | `str` | `bytearray` | lax | python | assumes UTF-8, error on unicode decoding error | | `bytes` | `bytes` | both | python | - | | `bytes` | `str` | both | JSON | - | | `bytes` | `str` | lax | python | - | | `bytes` | `bytearray` | lax | python | - | | `int` | `int` | strict | python, JSON | max abs value 2^64 - `i64` is used internally, `bool` explicitly forbidden | | `int` | `int` | lax | python, JSON | `i64` | | `int` | `float` | lax | python, JSON | `i64`, must be exact int, e.g. `f % 1 == 0`, `nan`, `inf` raise errors | | `int` | `Decimal` | lax | python, JSON | `i64`, must be exact int, e.g. `f % 1 == 0` | | `int` | `bool` | lax | python, JSON | - | | `int` | `str` | lax | python, JSON | `i64`, must be numeric only, e.g. `[0-9]+` | | `float` | `float` | strict | python, JSON | `bool` explicitly forbidden | | `float` | `float` | lax | python, JSON | - | | `float` | `int` | lax | python, JSON | - | | `float` | `str` | lax | python, JSON | must match `[0-9]+(\.[0-9]+)?` | | `float` | `Decimal` | lax | python | - | | `float` | `bool` | lax | python, JSON | - | | `bool` | `bool` | both | python, JSON | - | | `bool` | `int` | lax | python, JSON | allowed: `0, 1` | | `bool` | `float` | lax | python, JSON | allowed: `0, 1` | | `bool` | `Decimal` | lax | python, JSON | allowed: `0, 1` | | `bool` | `str` | lax | python, JSON | allowed: `'f', 'n', 'no', 'off', 'false', 't', 'y', 'on', 'yes', 'true'` | | `None` | `None` | both | python, JSON | - | | `date` | `date` | both | python | - | | `date` | `datetime` | lax | python | must be exact date, eg. no H, M, S, f | | `date` | `str` | both | JSON | format `YYYY-MM-DD` | | `date` | `str` | lax | python | format `YYYY-MM-DD` | | `date` | `bytes` | lax | python | format `YYYY-MM-DD` (UTF-8) | | `date` | `int` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate], must be exact date | | `date` | `float` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate], must be exact date | | `datetime` | `datetime` | both | python | - | | `datetime` | `date` | lax | python | - | | `datetime` | `str` | both | JSON | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate] | | `datetime` | `str` | lax | python | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate] | | `datetime` | `bytes` | lax | python | format `YYYY-MM-DDTHH:MM:SS.f` etc. see [speedate], (UTF-8) | | `datetime` | `int` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate] | | `datetime` | `float` | lax | python, JSON | interpreted as seconds or ms from epoch, see [speedate] | | `time` | `time` | both | python | - | | `time` | `str` | both | JSON | format `HH:MM:SS.FFFFFF` etc. see [speedate] | | `time` | `str` | lax | python | format `HH:MM:SS.FFFFFF` etc. see [speedate] | | `time` | `bytes` | lax | python | format `HH:MM:SS.FFFFFF` etc. see [speedate], (UTF-8) | | `time` | `int` | lax | python, JSON | interpreted as seconds, range 0 - 86399 | | `time` | `float` | lax | python, JSON | interpreted as seconds, range 0 - 86399.9* | | `time` | `Decimal` | lax | python, JSON | interpreted as seconds, range 0 - 86399.9* | | `timedelta` | `timedelta` | both | python | - | | `timedelta` | `str` | both | JSON | format ISO8601 etc. see [speedate] | | `timedelta` | `str` | lax | python | format ISO8601 etc. see [speedate] | | `timedelta` | `bytes` | lax | python | format ISO8601 etc. see [speedate], (UTF-8) | | `timedelta` | `int` | lax | python, JSON | interpreted as seconds | | `timedelta` | `float` | lax | python, JSON | interpreted as seconds | | `timedelta` | `Decimal` | lax | python, JSON | interpreted as seconds | | `dict` | `dict` | both | python | - | | `dict` | `Object` | both | JSON | - | | `dict` | `mapping` | lax | python | must implement the mapping interface and have an `items()` method | | `TypedDict` | `dict` | both | python | - | | `TypedDict` | `Object` | both | JSON | - | | `TypedDict` | `Any` | both | python | builtins not allowed, uses `getattr`, requires `from_attributes=True` | | `TypedDict` | `mapping` | lax | python | must implement the mapping interface and have an `items()` method | | `list` | `list` | both | python | - | | `list` | `Array` | both | JSON | - | | `list` | `tuple` | lax | python | - | | `list` | `set` | lax | python | - | | `list` | `frozenset` | lax | python | - | | `list` | `dict_keys` | lax | python | - | | `tuple` | `tuple` | both | python | - | | `tuple` | `Array` | both | JSON | - | | `tuple` | `list` | lax | python | - | | `tuple` | `set` | lax | python | - | | `tuple` | `frozenset` | lax | python | - | | `tuple` | `dict_keys` | lax | python | - | | `set` | `set` | both | python | - | | `set` | `Array` | both | JSON | - | | `set` | `list` | lax | python | - | | `set` | `tuple` | lax | python | - | | `set` | `frozenset` | lax | python | - | | `set` | `dict_keys` | lax | python | - | | `frozenset` | `frozenset` | both | python | - | | `frozenset` | `Array` | both | JSON | - | | `frozenset` | `list` | lax | python | - | | `frozenset` | `tuple` | lax | python | - | | `frozenset` | `set` | lax | python | - | | `frozenset` | `dict_keys` | lax | python | - | | `is_instance` | `Any` | both | python | `isinstance()` check returns `True` | | `is_instance` | - | both | JSON | never valid | | `callable` | `Any` | both | python | `callable()` check returns `True` | | `callable` | - | both | JSON | never valid | The `ModelClass` validator (use to create instances of a class) uses the `TypedDict` validator, then creates an instance with `__dict__` and `__fields_set__` set, so same rules apply as `TypedDict`. [speedate]: https://docs.rs/speedate/latest/speedate/ pydantic-1.10.14/docs/build/000077500000000000000000000000001455251250200155215ustar00rootroot00000000000000pydantic-1.10.14/docs/build/exec_examples.py000077500000000000000000000340141455251250200207220ustar00rootroot00000000000000#!/usr/bin/env python3 from __future__ import annotations import importlib import json import os import re import shutil import subprocess import sys import textwrap import traceback from pathlib import Path from typing import Any, Callable from unittest.mock import patch from ansi2html import Ansi2HTMLConverter from devtools import PrettyFormat THIS_DIR = Path(__file__).parent DOCS_DIR = (THIS_DIR / '..').resolve() EXAMPLES_DIR = DOCS_DIR / 'examples' TMP_EXAMPLES_DIR = DOCS_DIR / '.tmp_examples' UPGRADED_TMP_EXAMPLES_DIR = TMP_EXAMPLES_DIR / 'upgraded' MAX_LINE_LENGTH = int( re.search(r'max_line_length = (\d+)', (EXAMPLES_DIR / '.editorconfig').read_text()).group(1) # type: ignore ) LONG_LINE = 50 LOWEST_VERSION = (3, 7) HIGHEST_VERSION = (3, 10) pformat = PrettyFormat(simple_cutoff=LONG_LINE) Error = Callable[..., None] Version = tuple[int, int] PYTHON_CODE_MD_TMPL = """ === "Python {version} and above" ```py {code} ``` """.strip() JSON_OUTPUT_MD_TMPL = """ Outputs: ```json {output} ``` """ def to_string(value: Any) -> str: # attempt to build a pretty equivalent of the print output if isinstance(value, (dict, list, tuple, set)): return pformat(value) elif isinstance(value, str) and any(re.fullmatch(r, value, flags=re.DOTALL) for r in ['{".+}', r'\[.+\]']): try: obj = json.loads(value) except ValueError: # not JSON, not a problem pass else: s = json.dumps(obj) if len(s) > LONG_LINE: json.dumps(obj, indent=2) else: return s return str(value) class MockPrint: def __init__(self, file: Path) -> None: self.file = file self.statements: list[tuple[int, str]] = [] def __call__(self, *args: Any, sep: str = ' ', **kwargs: Any) -> None: frame = sys._getframe(4) if sys.version_info >= (3, 8) else sys._getframe(3) if not self.file.samefile(frame.f_code.co_filename): # happens when index_error.py imports index_main.py return s = sep.join(map(to_string, args)) self.statements.append((frame.f_lineno, s)) class MockPath: def read_text(self, *args: Any, **kwargs: Any) -> str: return '{"foobar": "spam"}' def build_print_lines(s: str, max_len_reduction: int = 0) -> list[str]: print_lines = [] max_len = MAX_LINE_LENGTH - 3 - max_len_reduction for line in s.split('\n'): if len(line) > max_len: print_lines += textwrap.wrap(line, width=max_len) else: print_lines.append(line) return print_lines def build_print_statement(line_no: int, s: str, lines: list[str]) -> None: indent = '' for back in range(1, 100): m = re.search(r'^( *)print\(', lines[line_no - back]) if m: indent = m.group(1) break print_lines = build_print_lines(s, len(indent)) if len(print_lines) > 2: text = textwrap.indent('"""\n{}\n"""'.format('\n'.join(print_lines)), indent) else: text = '\n'.join(f'{indent}#> {line}' for line in print_lines) lines.insert(line_no, text) def all_md_contents() -> str: file_contents = [] for f in DOCS_DIR.glob('**/*.md'): file_contents.append(f.read_text()) return '\n\n\n'.join(file_contents) def gen_ansi_output() -> None: conv = Ansi2HTMLConverter() input_file = EXAMPLES_DIR / 'devtools_main.py' os.environ['PY_DEVTOOLS_HIGHLIGHT'] = 'true' p = subprocess.run((sys.executable, str(input_file)), stdout=subprocess.PIPE, check=True, encoding='utf8') html = conv.convert(p.stdout, full=False).strip('\r\n') full_html = f'
\n
\n{html}\n
\n
' path = TMP_EXAMPLES_DIR / f'{input_file.stem}.html' path.write_text(full_html) print(f'generated ansi output to {path}') dont_execute_re = re.compile(r'^# dont-execute\n', flags=re.M | re.I) dont_upgrade_re = re.compile(r'^# dont-upgrade\n', flags=re.M | re.I) requires_re = re.compile(r'^# requires: *(.+)\n', flags=re.M | re.I) required_py_re = re.compile(r'^# *requires *python *(\d+).(\d+)', flags=re.M) def should_execute(file_name: str, file_text: str) -> tuple[str, bool, Version]: m = required_py_re.search(file_text) if m: lowest_version = (int(m.groups()[0]), int(m.groups()[1])) if sys.version_info >= lowest_version: return required_py_re.sub('', file_text), True, lowest_version else: v = '.'.join(m.groups()) print(f'WARNING: {file_name} requires python {v}, not running') return ( required_py_re.sub(f'# requires python {v}, NOT EXECUTED!', file_text), False, lowest_version, ) elif dont_execute_re.search(file_text): return dont_execute_re.sub('', file_text), False, LOWEST_VERSION return file_text, True, LOWEST_VERSION def should_upgrade(file_text: str) -> tuple[str, bool]: if dont_upgrade_re.search(file_text): return dont_upgrade_re.sub('', file_text), False return file_text, True def get_requirements(file_text: str) -> tuple[str, str | None]: m = requires_re.search(file_text) if m: return requires_re.sub('', file_text), m.groups()[0] return file_text, None def exec_file(file: Path, file_text: str, error: Error) -> tuple[list[str], str | None]: no_print_intercept_re = re.compile(r'^# no-print-intercept\n', flags=re.M) print_intercept = not bool(no_print_intercept_re.search(file_text)) if not print_intercept: file_text = no_print_intercept_re.sub('', file_text) if file.stem in sys.modules: del sys.modules[file.stem] mp = MockPrint(file) mod = None with patch.object(Path, 'read_text', MockPath.read_text), patch('builtins.print') as patch_print: if print_intercept: patch_print.side_effect = mp try: mod = importlib.import_module(file.stem) except Exception: tb = traceback.format_exception(*sys.exc_info()) error(''.join(e for e in tb if '/pydantic/docs/examples/' in e or not e.startswith(' File '))) if mod and mod.__file__ != str(file): error(f'module path "{mod.__file__}" is not same as "{file}", name may shadow another module?') lines = file_text.split('\n') to_json_line = '# output-json' if to_json_line in lines: lines = [line for line in lines if line != to_json_line] if len(mp.statements) != 1: error('should have exactly one print statement') print_lines = build_print_lines(mp.statements[0][1]) return lines, '\n'.join(print_lines) + '\n' else: for line_no, print_string in reversed(mp.statements): build_print_statement(line_no, print_string, lines) return lines, None def filter_lines(lines: list[str], error: Any) -> tuple[list[str], bool]: ignored_above = False try: ignore_above = lines.index('# ignore-above') except ValueError: pass else: ignored_above = True lines = lines[ignore_above + 1 :] try: ignore_below = lines.index('# ignore-below') except ValueError: pass else: lines = lines[:ignore_below] lines = '\n'.join(lines).split('\n') if any(len(line) > MAX_LINE_LENGTH for line in lines): error(f'lines longer than {MAX_LINE_LENGTH} characters') return lines, ignored_above def upgrade_code(content: str, min_version: Version = HIGHEST_VERSION) -> str: import pyupgrade._main # type: ignore import autoflake # type: ignore upgraded = pyupgrade._main._fix_plugins( content, settings=pyupgrade._main.Settings( min_version=min_version, keep_percent_format=True, keep_mock=False, keep_runtime_typing=True, ), ) upgraded = autoflake.fix_code(upgraded, remove_all_unused_imports=True) return upgraded def ensure_used(file: Path, all_md: str, error: Error) -> None: """Ensures that example is used appropriately""" file_tmpl = '{{!.tmp_examples/{}!}}' md_name = file.stem + '.md' if file_tmpl.format(md_name) not in all_md: if file_tmpl.format(file.name) in all_md: error( f'incorrect usage, change filename to {md_name!r} in docs.' "make sure you don't specify ```py code blocks around examples," 'they are automatically generated now.' ) else: error( 'file not used anywhere. correct usage:', file_tmpl.format(md_name), ) def check_style(file_text: str, error: Error) -> None: if '\n\n\n\n' in file_text: error('too many new lines') if not file_text.endswith('\n'): error('no trailing new line') if re.search('^ *# *>', file_text, flags=re.M): error('contains comments with print output, please remove') def populate_upgraded_versions(file: Path, file_text: str, lowest_version: Version) -> list[tuple[Path, str, Version]]: versions = [] major, minor = lowest_version assert major == HIGHEST_VERSION[0], 'Wow, Python 4 is out? Congrats!' upgraded_file_text = file_text while minor < HIGHEST_VERSION[1]: minor += 1 new_file_text = upgrade_code(file_text, min_version=(major, minor)) if upgraded_file_text != new_file_text: upgraded_file_text = new_file_text new_file = UPGRADED_TMP_EXAMPLES_DIR / (file.stem + f'_{major}_{minor}' + file.suffix) new_file.write_text(upgraded_file_text) versions.append((new_file, upgraded_file_text, (major, minor))) return versions def exec_examples() -> int: # noqa: C901 (I really don't want to decompose it any further) errors = [] all_md = all_md_contents() new_files = {} os.environ.update( { 'my_auth_key': 'xxx', 'my_api_key': 'xxx', 'database_dsn': 'postgres://postgres@localhost:5432/env_db', 'v0': '0', 'sub_model': '{"v1": "json-1", "v2": "json-2"}', 'sub_model__v2': 'nested-2', 'sub_model__v3': '3', 'sub_model__deep__v4': 'v4', } ) sys.path.append(str(EXAMPLES_DIR)) if sys.version_info < HIGHEST_VERSION: print("WARNING: examples for 3.10+ requires python 3.10. They won't be executed") else: UPGRADED_TMP_EXAMPLES_DIR.mkdir(parents=True, exist_ok=True) sys.path.append(str(UPGRADED_TMP_EXAMPLES_DIR)) for file in sorted(EXAMPLES_DIR.iterdir()): markdown_name = file.stem + '.md' def error(*desc: str) -> None: errors.append((file, desc)) previous_frame = sys._getframe(1) filename = Path(previous_frame.f_globals['__file__']).relative_to(Path.cwd()) location = f'{filename}:{previous_frame.f_lineno}' sys.stderr.write(f'{location}: error in {file.relative_to(Path.cwd())}:\n{" ".join(desc)}\n') if not file.is_file(): # __pycache__, maybe others continue if file.suffix != '.py': # just copy new_files[file.name] = file.read_text() continue file_text = file.read_text('utf-8') ensure_used(file, all_md, error) check_style(file_text, error) file_text, execute, lowest_version = should_execute(file.name, file_text) file_text, upgrade = should_upgrade(file_text) file_text, requirements = get_requirements(file_text) if upgrade and upgrade_code(file_text, min_version=lowest_version) != file_text: error("pyupgrade would upgrade file. If it's not desired, add '# dont-upgrade' line at the top of the file") versions: list[tuple[Path, str, Version]] = [(file, file_text, lowest_version)] if upgrade: versions.extend(populate_upgraded_versions(file, file_text, lowest_version)) # flush importlib caches to ensure the code we just generated is discovered importlib.invalidate_caches() json_outputs: set[str | None] = set() should_run_as_is = not requirements final_content: list[str] = [] for file, file_text, lowest_version in versions: if execute and sys.version_info >= lowest_version: lines, json_output = exec_file(file, file_text, error) json_outputs.add(json_output) else: lines = file_text.split('\n') lines, ignored_lines_before_script = filter_lines(lines, error) should_run_as_is = should_run_as_is and not ignored_lines_before_script final_content.append( PYTHON_CODE_MD_TMPL.format( version='.'.join(map(str, lowest_version)), code=textwrap.indent('\n'.join(lines), ' '), ) ) if should_run_as_is: final_content.append('_(This script is complete, it should run "as is")_') elif requirements: final_content.append(f'_(This script requires {requirements})_') else: error( 'script may not run as is, but requirements were not specified.', 'specify `# requires: ` in the end of the script', ) if len(json_outputs) > 1: error('json output should not differ between versions') if json_outputs: json_output, *_ = json_outputs if json_output: final_content.append(JSON_OUTPUT_MD_TMPL.format(output=json_output)) new_files[markdown_name] = '\n'.join(final_content) if errors: print(f'\n{len(errors)} errors, not writing files\n') return 1 if TMP_EXAMPLES_DIR.exists(): shutil.rmtree(TMP_EXAMPLES_DIR) print(f'writing {len(new_files)} example files to {TMP_EXAMPLES_DIR}') TMP_EXAMPLES_DIR.mkdir() for file_name, content in new_files.items(): (TMP_EXAMPLES_DIR / file_name).write_text(content, 'utf-8') gen_ansi_output() return 0 if __name__ == '__main__': sys.exit(exec_examples()) pydantic-1.10.14/docs/build/main.py000077500000000000000000000020041455251250200170160ustar00rootroot00000000000000#!/usr/bin/env python3 import re import sys from importlib.machinery import SourceFileLoader from pathlib import Path THIS_DIR = Path(__file__).parent PROJECT_ROOT = THIS_DIR / '..' / '..' def main() -> int: history = (PROJECT_ROOT / 'HISTORY.md').read_text() history = re.sub(r'(\s)#(\d+)', r'\1[#\2](https://github.com/pydantic/pydantic/issues/\2)', history) history = re.sub(r'(\s)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) history = re.sub('@@', '@', history) (PROJECT_ROOT / 'docs/.changelog.md').write_text(history) version = SourceFileLoader('version', str(PROJECT_ROOT / 'pydantic/version.py')).load_module() (PROJECT_ROOT / 'docs/.version.md').write_text(f'Documentation for version: **v{version.VERSION}**\n') sys.path.append(str(THIS_DIR.resolve())) from schema_mapping import build_schema_mappings from exec_examples import exec_examples build_schema_mappings() return exec_examples() if __name__ == '__main__': sys.exit(main()) pydantic-1.10.14/docs/build/schema_mapping.py000077500000000000000000000314751455251250200210630ustar00rootroot00000000000000#!/usr/bin/env python3 """ Build a table of Python / Pydantic to JSON Schema mappings. Done like this rather than as a raw rst table to make future edits easier. Please edit this file directly not .tmp_schema_mappings.html """ from __future__ import annotations import json import re from pathlib import Path from typing import Any table: list[tuple[str, str, str | dict[str, Any], str, str]] = [ ( 'None', 'null', '', 'JSON Schema Core', 'Same for `type(None)` or `Literal[None]`', ), ( 'bool', 'boolean', '', 'JSON Schema Core', '', ), ( 'str', 'string', '', 'JSON Schema Core', '', ), ( 'float', 'number', '', 'JSON Schema Core', '', ), ( 'int', 'integer', '', 'JSON Schema Validation', '', ), ( 'dict', 'object', '', 'JSON Schema Core', '', ), ( 'list', 'array', {'items': {}}, 'JSON Schema Core', '', ), ( 'tuple', 'array', {'items': {}}, 'JSON Schema Core', '', ), ( 'set', 'array', {'items': {}, 'uniqueItems': True}, 'JSON Schema Validation', '', ), ( 'frozenset', 'array', {'items': {}, 'uniqueItems': True}, 'JSON Schema Validation', '', ), ( 'List[str]', 'array', {'items': {'type': 'string'}}, 'JSON Schema Validation', 'And equivalently for any other sub type, e.g. `List[int]`.', ), ( 'Tuple[str, ...]', 'array', {'items': {'type': 'string'}}, 'JSON Schema Validation', 'And equivalently for any other sub type, e.g. `Tuple[int, ...]`.', ), ( 'Tuple[str, int]', 'array', {'items': [{'type': 'string'}, {'type': 'integer'}], 'minItems': 2, 'maxItems': 2}, 'JSON Schema Validation', ( 'And equivalently for any other set of subtypes. Note: If using schemas for OpenAPI, ' "you shouldn't use this declaration, as it would not be valid in OpenAPI (although it is " 'valid in JSON Schema).' ), ), ( 'Dict[str, int]', 'object', {'additionalProperties': {'type': 'integer'}}, 'JSON Schema Validation', ( 'And equivalently for any other subfields for dicts. Have in mind that although you can use other types as ' 'keys for dicts with Pydantic, only strings are valid keys for JSON, and so, only str is valid as ' 'JSON Schema key types.' ), ), ( 'Union[str, int]', 'anyOf', {'anyOf': [{'type': 'string'}, {'type': 'integer'}]}, 'JSON Schema Validation', 'And equivalently for any other subfields for unions.', ), ( 'Enum', 'enum', '{"enum": [...]}', 'JSON Schema Validation', 'All the literal values in the enum are included in the definition.', ), ( 'SecretStr', 'string', {'writeOnly': True}, 'JSON Schema Validation', '', ), ( 'SecretBytes', 'string', {'writeOnly': True}, 'JSON Schema Validation', '', ), ( 'EmailStr', 'string', {'format': 'email'}, 'JSON Schema Validation', '', ), ( 'NameEmail', 'string', {'format': 'name-email'}, 'Pydantic standard "format" extension', '', ), ( 'AnyUrl', 'string', {'format': 'uri'}, 'JSON Schema Validation', '', ), ( 'Pattern', 'string', {'format': 'regex'}, 'JSON Schema Validation', '', ), ( 'bytes', 'string', {'format': 'binary'}, 'OpenAPI', '', ), ( 'Decimal', 'number', '', 'JSON Schema Core', '', ), ( 'UUID1', 'string', {'format': 'uuid1'}, 'Pydantic standard "format" extension', '', ), ( 'UUID3', 'string', {'format': 'uuid3'}, 'Pydantic standard "format" extension', '', ), ( 'UUID4', 'string', {'format': 'uuid4'}, 'Pydantic standard "format" extension', '', ), ( 'UUID5', 'string', {'format': 'uuid5'}, 'Pydantic standard "format" extension', '', ), ( 'UUID', 'string', {'format': 'uuid'}, 'Pydantic standard "format" extension', 'Suggested in OpenAPI.', ), ( 'FilePath', 'string', {'format': 'file-path'}, 'Pydantic standard "format" extension', '', ), ( 'DirectoryPath', 'string', {'format': 'directory-path'}, 'Pydantic standard "format" extension', '', ), ( 'Path', 'string', {'format': 'path'}, 'Pydantic standard "format" extension', '', ), ( 'datetime', 'string', {'format': 'date-time'}, 'JSON Schema Validation', '', ), ( 'date', 'string', {'format': 'date'}, 'JSON Schema Validation', '', ), ( 'time', 'string', {'format': 'time'}, 'JSON Schema Validation', '', ), ( 'timedelta', 'number', {'format': 'time-delta'}, 'Difference in seconds (a `float`), with Pydantic standard "format" extension', "Suggested in JSON Schema repository's issues by maintainer.", ), ( 'Json', 'string', {'format': 'json-string'}, 'Pydantic standard "format" extension', '', ), ( 'IPv4Address', 'string', {'format': 'ipv4'}, 'JSON Schema Validation', '', ), ( 'IPv6Address', 'string', {'format': 'ipv6'}, 'JSON Schema Validation', '', ), ( 'IPvAnyAddress', 'string', {'format': 'ipvanyaddress'}, 'Pydantic standard "format" extension', 'IPv4 or IPv6 address as used in `ipaddress` module', ), ( 'IPv4Interface', 'string', {'format': 'ipv4interface'}, 'Pydantic standard "format" extension', 'IPv4 interface as used in `ipaddress` module', ), ( 'IPv6Interface', 'string', {'format': 'ipv6interface'}, 'Pydantic standard "format" extension', 'IPv6 interface as used in `ipaddress` module', ), ( 'IPvAnyInterface', 'string', {'format': 'ipvanyinterface'}, 'Pydantic standard "format" extension', 'IPv4 or IPv6 interface as used in `ipaddress` module', ), ( 'IPv4Network', 'string', {'format': 'ipv4network'}, 'Pydantic standard "format" extension', 'IPv4 network as used in `ipaddress` module', ), ( 'IPv6Network', 'string', {'format': 'ipv6network'}, 'Pydantic standard "format" extension', 'IPv6 network as used in `ipaddress` module', ), ( 'IPvAnyNetwork', 'string', {'format': 'ipvanynetwork'}, 'Pydantic standard "format" extension', 'IPv4 or IPv6 network as used in `ipaddress` module', ), ( 'StrictBool', 'boolean', '', 'JSON Schema Core', '', ), ( 'StrictStr', 'string', '', 'JSON Schema Core', '', ), ( 'ConstrainedStr', 'string', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `constr` below.' ), ), ( "constr(regex='^text$', min_length=2, max_length=10)", 'string', {'pattern': '^text$', 'minLength': 2, 'maxLength': 10}, 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.', ), ( 'ConstrainedInt', 'integer', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `conint` below.' ), ), ( 'conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'integer', {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, '', 'Any argument not passed to the function (not defined) will not be included in the schema.', ), ( 'PositiveInt', 'integer', {'exclusiveMinimum': 0}, 'JSON Schema Validation', '', ), ( 'NegativeInt', 'integer', {'exclusiveMaximum': 0}, 'JSON Schema Validation', '', ), ( 'NonNegativeInt', 'integer', {'minimum': 0}, 'JSON Schema Validation', '', ), ( 'NonPositiveInt', 'integer', {'maximum': 0}, 'JSON Schema Validation', '', ), ( 'ConstrainedFloat', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `confloat` below.' ), ), ( 'confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.', ), ( 'PositiveFloat', 'number', {'exclusiveMinimum': 0}, 'JSON Schema Validation', '', ), ( 'NegativeFloat', 'number', {'exclusiveMaximum': 0}, 'JSON Schema Validation', '', ), ( 'NonNegativeFloat', 'number', {'minimum': 0}, 'JSON Schema Validation', '', ), ( 'NonPositiveFloat', 'number', {'maximum': 0}, 'JSON Schema Validation', '', ), ( 'ConstrainedDecimal', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `condecimal` below.' ), ), ( 'condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', {'maximum': 5, 'exclusiveMaximum': 6, 'minimum': 2, 'exclusiveMinimum': 1, 'multipleOf': 2}, 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.', ), ( 'BaseModel', 'object', '', 'JSON Schema Core', 'All the properties defined will be defined with standard JSON Schema, including submodels.', ), ( 'Color', 'string', {'format': 'color'}, 'Pydantic standard "format" extension', '', ), ] headings = [ 'Python type', 'JSON Schema Type', 'Additional JSON Schema', 'Defined in', ] def md2html(s: str) -> str: return re.sub(r'`(.+?)`', r'\1', s) def build_schema_mappings() -> None: rows = [] for py_type, json_type, additional, defined_in, notes in table: if additional and not isinstance(additional, str): additional = json.dumps(additional) cols = [ f'{py_type}', f'{json_type}', f'{additional}' if additional else '', md2html(defined_in) ] rows.append('\n'.join(f' \n {c}\n ' for c in cols)) if notes: rows.append( f' \n' f' {md2html(notes)}\n' f' ' ) heading = '\n'.join(f' {h}' for h in headings) body = '\n\n\n'.join(rows) text = f"""\ {heading} {body}
""" (Path(__file__).parent / '..' / '.tmp_schema_mappings.html').write_text(text) if __name__ == '__main__': build_schema_mappings() pydantic-1.10.14/docs/changelog.md000066400000000000000000000000221455251250200166650ustar00rootroot00000000000000{!.changelog.md!} pydantic-1.10.14/docs/contributing.md000066400000000000000000000057761455251250200174720ustar00rootroot00000000000000We'd love you to contribute to *pydantic*! ## Issues Questions, feature requests and bug reports are all welcome as [discussions or issues](https://github.com/pydantic/pydantic/issues/new/choose). **However, to report a security vulnerability, please see our [security policy](https://github.com/pydantic/pydantic/security/policy).** To make it as simple as possible for us to help you, please include the output of the following call in your issue: ```bash python -c "import pydantic.utils; print(pydantic.utils.version_info())" ``` If you're using *pydantic* prior to **v1.3** (when `version_info()` was added), please manually include OS, Python version and pydantic version. Please try to always include the above unless you're unable to install *pydantic* or **know** it's not relevant to your question or feature request. ## Pull Requests It should be extremely simple to get started and create a Pull Request. *pydantic* is released regularly so you should see your improvements release in a matter of days or weeks. !!! note Unless your change is trivial (typo, docs tweak etc.), please create an issue to discuss the change before creating a pull request. If you're looking for something to get your teeth into, check out the ["help wanted"](https://github.com/pydantic/pydantic/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) label on github. To make contributing as easy and fast as possible, you'll want to run tests and linting locally. Luckily, *pydantic* has few dependencies, doesn't require compiling and tests don't need access to databases, etc. Because of this, setting up and running the tests should be very simple. You'll need to have a version between **Python 3.7 and 3.11**, **virtualenv**, **git**, **pdm** and **make** installed. ```bash # 1. clone your fork and cd into the repo directory git clone git@github.com:/pydantic.git cd pydantic # 2. Set up a virtualenv for running tests virtualenv -p `which python3.8` env source env/bin/activate # Building docs requires 3.8. If you don't need to build docs you can use # whichever version; 3.7 will work too. # 3. Install pydantic, dependencies, test dependencies and doc dependencies make install # 4. Checkout a new branch and make your changes git checkout -b my-new-feature-branch # make your changes... # 5. Fix formatting and imports make format # Pydantic uses black to enforce formatting and isort to fix imports # (https://github.com/ambv/black, https://github.com/timothycrosley/isort) # 6. Run tests and linting make # there are a few sub-commands in Makefile like `test`, `testcov` and `lint` # which you might want to use, but generally just `make` should be all you need # 7. Build documentation make docs # if you have changed the documentation make sure it builds successfully # you can also use `make docs-serve` to serve the documentation at localhost:8000 # ... commit, push, and create your pull request ``` **tl;dr**: use `make format` to fix formatting, `make` to run tests and linting & `make docs` to build the docs. pydantic-1.10.14/docs/datamodel_code_generator.md000066400000000000000000000033751455251250200217460ustar00rootroot00000000000000The [datamodel-code-generator](https://github.com/koxudaxi/datamodel-code-generator/) project is a library and command-line utility to generate pydantic models from just about any data source, including: * OpenAPI 3 (YAML/JSON) * JSON Schema * JSON/YAML Data (which will converted to JSON Schema) Whenever you find yourself with any data convertible JSON but without pydantic models, this tool will allow you to generate type-safe model hierarchies on demand. ## Installation ```bash pip install datamodel-code-generator ``` ## Example In this case, datamodel-code-generator creates pydantic models from a JSON Schema file. ```bash datamodel-codegen --input person.json --input-file-type jsonschema --output model.py ``` person.json: ```json { "$id": "person.json", "$schema": "http://json-schema.org/draft-07/schema#", "title": "Person", "type": "object", "properties": { "first_name": { "type": "string", "description": "The person's first name." }, "last_name": { "type": "string", "description": "The person's last name." }, "age": { "description": "Age in years.", "type": "integer", "minimum": 0 }, "pets": { "type": "array", "items": [ { "$ref": "#/definitions/Pet" } ] }, "comment": { "type": "null" } }, "required": [ "first_name", "last_name" ], "definitions": { "Pet": { "properties": { "name": { "type": "string" }, "age": { "type": "integer" } } } } } ``` model.py: {!.tmp_examples/generate_models_person_model.md!} More information can be found on the [official documentation](https://koxudaxi.github.io/datamodel-code-generator/) pydantic-1.10.14/docs/examples/000077500000000000000000000000001455251250200162405ustar00rootroot00000000000000pydantic-1.10.14/docs/examples/.editorconfig000066400000000000000000000001161455251250200207130ustar00rootroot00000000000000root = true [*.py] indent_style = space indent_size = 4 max_line_length = 80 pydantic-1.10.14/docs/examples/annotated_types_named_tuple.py000066400000000000000000000004161455251250200243710ustar00rootroot00000000000000from typing import NamedTuple from pydantic import BaseModel, ValidationError class Point(NamedTuple): x: int y: int class Model(BaseModel): p: Point print(Model(p=('1', '2'))) try: Model(p=('1.3', '2')) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/annotated_types_typed_dict.py000066400000000000000000000016271455251250200242310ustar00rootroot00000000000000from typing_extensions import TypedDict from pydantic import BaseModel, Extra, ValidationError # `total=False` means keys are non-required class UserIdentity(TypedDict, total=False): name: str surname: str class User(TypedDict): identity: UserIdentity age: int class Model(BaseModel): u: User class Config: extra = Extra.forbid print(Model(u={'identity': {'name': 'Smith', 'surname': 'John'}, 'age': '37'})) print(Model(u={'identity': {'name': None, 'surname': 'John'}, 'age': '37'})) print(Model(u={'identity': {}, 'age': '37'})) try: Model(u={'identity': {'name': ['Smith'], 'surname': 'John'}, 'age': '24'}) except ValidationError as e: print(e) try: Model( u={ 'identity': {'name': 'Smith', 'surname': 'John'}, 'age': '37', 'email': 'john.smith@me.com', } ) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/dataclasses_arbitrary_types_allowed.py000066400000000000000000000013371455251250200261170ustar00rootroot00000000000000import dataclasses import pydantic class ArbitraryType: def __init__(self, value): self.value = value def __repr__(self): return f'ArbitraryType(value={self.value!r})' @dataclasses.dataclass class DC: a: ArbitraryType b: str # valid as it is a builtin dataclass without validation my_dc = DC(a=ArbitraryType(value=3), b='qwe') try: class Model(pydantic.BaseModel): dc: DC other: str Model(dc=my_dc, other='other') except RuntimeError as e: # invalid as it is now a pydantic dataclass print(e) class Model(pydantic.BaseModel): dc: DC other: str class Config: arbitrary_types_allowed = True m = Model(dc=my_dc, other='other') print(repr(m)) pydantic-1.10.14/docs/examples/dataclasses_config.py000066400000000000000000000011211455251250200224210ustar00rootroot00000000000000from pydantic import ConfigDict from pydantic.dataclasses import dataclass # Option 1 - use directly a dict # Note: `mypy` will still raise typo error @dataclass(config=dict(validate_assignment=True)) class MyDataclass1: a: int # Option 2 - use `ConfigDict` # (same as before at runtime since it's a `TypedDict` but with intellisense) @dataclass(config=ConfigDict(validate_assignment=True)) class MyDataclass2: a: int # Option 3 - use a `Config` class like for a `BaseModel` class Config: validate_assignment = True @dataclass(config=Config) class MyDataclass3: a: int pydantic-1.10.14/docs/examples/dataclasses_default_schema.py000066400000000000000000000010521455251250200241230ustar00rootroot00000000000000import dataclasses from typing import List, Optional from pydantic import Field from pydantic.dataclasses import dataclass @dataclass class User: id: int name: str = 'John Doe' friends: List[int] = dataclasses.field(default_factory=lambda: [0]) age: Optional[int] = dataclasses.field( default=None, metadata=dict(title='The age of the user', description='do not lie!') ) height: Optional[int] = Field(None, title='The height in cm', ge=50, le=300) user = User(id='42') print(user.__pydantic_model__.schema()) pydantic-1.10.14/docs/examples/dataclasses_initvars.py000066400000000000000000000011261455251250200230200ustar00rootroot00000000000000from dataclasses import InitVar from pathlib import Path from typing import Optional from pydantic.dataclasses import dataclass @dataclass class PathData: path: Path base_path: InitVar[Optional[Path]] def __post_init__(self, base_path): print(f'Received path={self.path!r}, base_path={base_path!r}') def __post_init_post_parse__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathData('world', base_path='/hello') # Received path='world', base_path='/hello' assert path_data.path == Path('/hello/world') pydantic-1.10.14/docs/examples/dataclasses_json_dumps.py000066400000000000000000000005511455251250200233430ustar00rootroot00000000000000import dataclasses import json from typing import List from pydantic.dataclasses import dataclass from pydantic.json import pydantic_encoder @dataclass class User: id: int name: str = 'John Doe' friends: List[int] = dataclasses.field(default_factory=lambda: [0]) user = User(id='42') print(json.dumps(user, indent=4, default=pydantic_encoder)) pydantic-1.10.14/docs/examples/dataclasses_main.py000066400000000000000000000003511455251250200221040ustar00rootroot00000000000000from datetime import datetime from pydantic.dataclasses import dataclass @dataclass class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id='42', signup_ts='2032-06-21T12:00') print(user) pydantic-1.10.14/docs/examples/dataclasses_nested.py000066400000000000000000000003561455251250200224470ustar00rootroot00000000000000from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class NavbarButton: href: AnyUrl @dataclass class Navbar: button: NavbarButton navbar = Navbar(button=('https://example.com',)) print(navbar) pydantic-1.10.14/docs/examples/dataclasses_post_init_post_parse.py000066400000000000000000000005251455251250200254320ustar00rootroot00000000000000from pydantic.dataclasses import dataclass @dataclass class Birth: year: int month: int day: int @dataclass class User: birth: Birth def __post_init__(self): print(self.birth) def __post_init_post_parse__(self): print(self.birth) user = User(**{'birth': {'year': 1995, 'month': 3, 'day': 2}}) pydantic-1.10.14/docs/examples/dataclasses_stdlib_inheritance.py000066400000000000000000000004661455251250200250210ustar00rootroot00000000000000import dataclasses import pydantic @dataclasses.dataclass class Z: z: int @dataclasses.dataclass class Y(Z): y: int = 0 @pydantic.dataclasses.dataclass class X(Y): x: int = 0 foo = X(x=b'1', y='2', z='3') print(foo) try: X(z='pika') except pydantic.ValidationError as e: print(e) pydantic-1.10.14/docs/examples/dataclasses_stdlib_run_validation.py000066400000000000000000000010651455251250200255420ustar00rootroot00000000000000import dataclasses from pydantic import ValidationError from pydantic.dataclasses import dataclass as pydantic_dataclass, set_validation @dataclasses.dataclass class User: id: int name: str # Enhance stdlib dataclass pydantic_dataclass(User) user1 = User(id='whatever', name='I want') # validate data of `user1` try: user1.__pydantic_validate_values__() except ValidationError as e: print(e) # Enforce validation try: with set_validation(User, True): User(id='whatever', name='I want') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/dataclasses_stdlib_to_pydantic.py000066400000000000000000000016441455251250200250440ustar00rootroot00000000000000import dataclasses from datetime import datetime from typing import Optional import pydantic @dataclasses.dataclass class Meta: modified_date: Optional[datetime] seen_count: int @dataclasses.dataclass class File(Meta): filename: str # `ValidatedFile` will be a proxy around `File` ValidatedFile = pydantic.dataclasses.dataclass(File) # the original dataclass is the `__dataclass__` attribute assert ValidatedFile.__dataclass__ is File validated_file = ValidatedFile( filename=b'thefilename', modified_date='2020-01-01T00:00', seen_count='7', ) print(validated_file) try: ValidatedFile( filename=['not', 'a', 'string'], modified_date=None, seen_count=3, ) except pydantic.ValidationError as e: print(e) # `File` is not altered and still does no validation by default print(File( filename=['not', 'a', 'string'], modified_date=None, seen_count=3, )) pydantic-1.10.14/docs/examples/dataclasses_stdlib_with_basemodel.py000066400000000000000000000013461455251250200255140ustar00rootroot00000000000000import dataclasses from datetime import datetime from typing import Optional from pydantic import BaseModel, ValidationError @dataclasses.dataclass(frozen=True) class User: name: str @dataclasses.dataclass class File: filename: str last_modification_time: Optional[datetime] = None class Foo(BaseModel): file: File user: Optional[User] = None file = File( filename=['not', 'a', 'string'], last_modification_time='2020-01-01T00:00', ) # nothing is validated as expected print(file) try: Foo(file=file) except ValidationError as e: print(e) foo = Foo(file=File(filename='myfile'), user=User(name='pika')) try: foo.user.name = 'bulbi' except dataclasses.FrozenInstanceError as e: print(e) pydantic-1.10.14/docs/examples/devtools_main.py000066400000000000000000000011401455251250200214510ustar00rootroot00000000000000# no-print-intercept from datetime import datetime from typing import List from pydantic import BaseModel from devtools import debug class Address(BaseModel): street: str country: str lat: float lng: float class User(BaseModel): id: int name: str signup_ts: datetime friends: List[int] address: Address user = User( id='123', name='John Doe', signup_ts='2019-06-01 12:22', friends=[1234, 4567, 7890], address=dict(street='Testing', country='uk', lat=51.5, lng=0), ) debug(user) print('\nshould be much easier read than:\n') print('user:', user) pydantic-1.10.14/docs/examples/exporting_models_copy.py000066400000000000000000000010151455251250200232230ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) print(m.copy(include={'foo', 'bar'})) print(m.copy(exclude={'foo', 'bar'})) print(m.copy(update={'banana': 0})) print(id(m.bar), id(m.copy().bar)) # normal copy gives the same object reference for `bar` print(id(m.bar), id(m.copy(deep=True).bar)) # deep copy gives a new object reference for `bar` pydantic-1.10.14/docs/examples/exporting_models_dict.py000066400000000000000000000005271455251250200232030ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) # returns a dictionary: print(m.dict()) print(m.dict(include={'foo', 'bar'})) print(m.dict(exclude={'foo', 'bar'})) pydantic-1.10.14/docs/examples/exporting_models_exclude1.py000066400000000000000000000010561455251250200237700ustar00rootroot00000000000000from pydantic import BaseModel, SecretStr class User(BaseModel): id: int username: str password: SecretStr class Transaction(BaseModel): id: str user: User value: int t = Transaction( id='1234567890', user=User( id=42, username='JohnDoe', password='hashedpassword' ), value=9876543210, ) # using a set: print(t.dict(exclude={'user', 'value'})) # using a dict: print(t.dict(exclude={'user': {'username', 'password'}, 'value': True})) print(t.dict(include={'id': True, 'user': {'id'}})) pydantic-1.10.14/docs/examples/exporting_models_exclude2.py000066400000000000000000000030241455251250200237660ustar00rootroot00000000000000import datetime from typing import List from pydantic import BaseModel, SecretStr class Country(BaseModel): name: str phone_code: int class Address(BaseModel): post_code: int country: Country class CardDetails(BaseModel): number: SecretStr expires: datetime.date class Hobby(BaseModel): name: str info: str class User(BaseModel): first_name: str second_name: str address: Address card_details: CardDetails hobbies: List[Hobby] user = User( first_name='John', second_name='Doe', address=Address( post_code=123456, country=Country( name='USA', phone_code=1 ) ), card_details=CardDetails( number=4212934504460000, expires=datetime.date(2020, 5, 1) ), hobbies=[ Hobby(name='Programming', info='Writing code and stuff'), Hobby(name='Gaming', info='Hell Yeah!!!'), ], ) exclude_keys = { 'second_name': True, 'address': {'post_code': True, 'country': {'phone_code'}}, 'card_details': True, # You can exclude fields from specific members of a tuple/list by index: 'hobbies': {-1: {'info'}}, } include_keys = { 'first_name': True, 'address': {'country': {'name'}}, 'hobbies': {0: True, -1: {'name'}}, } # would be the same as user.dict(exclude=exclude_keys) in this case: print(user.dict(include=include_keys)) # To exclude a field from all members of a nested list or tuple, use "__all__": print(user.dict(exclude={'hobbies': {'__all__': {'info'}}})) pydantic-1.10.14/docs/examples/exporting_models_exclude3.py000066400000000000000000000007751455251250200240010ustar00rootroot00000000000000from pydantic import BaseModel, Field, SecretStr class User(BaseModel): id: int username: str password: SecretStr = Field(..., exclude=True) class Transaction(BaseModel): id: str user: User = Field(..., exclude={'username'}) value: int class Config: fields = {'value': {'exclude': True}} t = Transaction( id='1234567890', user=User( id=42, username='JohnDoe', password='hashedpassword' ), value=9876543210, ) print(t.dict()) pydantic-1.10.14/docs/examples/exporting_models_exclude4.py000066400000000000000000000007431455251250200237750ustar00rootroot00000000000000from pydantic import BaseModel, Field, SecretStr class User(BaseModel): id: int username: str # overridden by explicit exclude password: SecretStr = Field(exclude=True) class Transaction(BaseModel): id: str user: User value: int t = Transaction( id='1234567890', user=User( id=42, username='JohnDoe', password='hashedpassword' ), value=9876543210, ) print(t.dict(exclude={'value': True, 'user': {'username'}})) pydantic-1.10.14/docs/examples/exporting_models_exclude5.py000066400000000000000000000007721455251250200240000ustar00rootroot00000000000000from pydantic import BaseModel, Field, SecretStr class User(BaseModel): id: int = Field(..., include=True) username: str = Field(..., include=True) # overridden by explicit include password: SecretStr class Transaction(BaseModel): id: str user: User value: int t = Transaction( id='1234567890', user=User( id=42, username='JohnDoe', password='hashedpassword' ), value=9876543210, ) print(t.dict(include={'id': True, 'user': {'id'}})) pydantic-1.10.14/docs/examples/exporting_models_iterate.py000066400000000000000000000004461455251250200237150ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) print(dict(m)) for name, value in m: print(f'{name}: {value}') pydantic-1.10.14/docs/examples/exporting_models_json.py000066400000000000000000000004171455251250200232270ustar00rootroot00000000000000from datetime import datetime from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): foo: datetime bar: BarModel m = FooBarModel(foo=datetime(2032, 6, 1, 12, 13, 14), bar={'whatever': 123}) print(m.json()) pydantic-1.10.14/docs/examples/exporting_models_json_encoders.py000066400000000000000000000006611455251250200251120ustar00rootroot00000000000000from datetime import datetime, timedelta from pydantic import BaseModel from pydantic.json import timedelta_isoformat class WithCustomEncoders(BaseModel): dt: datetime diff: timedelta class Config: json_encoders = { datetime: lambda v: v.timestamp(), timedelta: timedelta_isoformat, } m = WithCustomEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) print(m.json()) pydantic-1.10.14/docs/examples/exporting_models_json_encoders_merge.py000066400000000000000000000010431455251250200262640ustar00rootroot00000000000000from datetime import datetime, timedelta from pydantic import BaseModel from pydantic.json import timedelta_isoformat class BaseClassWithEncoders(BaseModel): dt: datetime diff: timedelta class Config: json_encoders = { datetime: lambda v: v.timestamp() } class ChildClassWithEncoders(BaseClassWithEncoders): class Config: json_encoders = { timedelta: timedelta_isoformat } m = ChildClassWithEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) print(m.json()) pydantic-1.10.14/docs/examples/exporting_models_json_forward_ref.py000066400000000000000000000014601455251250200256060ustar00rootroot00000000000000from typing import List, Optional from pydantic import BaseModel class Address(BaseModel): city: str country: str class User(BaseModel): name: str address: Address friends: Optional[List['User']] = None class Config: json_encoders = { Address: lambda a: f'{a.city} ({a.country})', 'User': lambda u: f'{u.name} in {u.address.city} ' f'({u.address.country[:2].upper()})', } User.update_forward_refs() wolfgang = User( name='Wolfgang', address=Address(city='Berlin', country='Deutschland'), friends=[ User(name='Pierre', address=Address(city='Paris', country='France')), User(name='John', address=Address(city='London', country='UK')), ], ) print(wolfgang.json(models_as_dict=False)) pydantic-1.10.14/docs/examples/exporting_models_json_subclass.py000066400000000000000000000011211455251250200251170ustar00rootroot00000000000000from datetime import date, timedelta from pydantic import BaseModel from pydantic.validators import int_validator class DayThisYear(date): """ Contrived example of a special type of date that takes an int and interprets it as a day in the current year """ @classmethod def __get_validators__(cls): yield int_validator yield cls.validate @classmethod def validate(cls, v: int): return date.today().replace(month=1, day=1) + timedelta(days=v) class FooModel(BaseModel): date: DayThisYear m = FooModel(date=300) print(m.json()) pydantic-1.10.14/docs/examples/exporting_models_orjson.py000066400000000000000000000010071455251250200235640ustar00rootroot00000000000000from datetime import datetime import orjson from pydantic import BaseModel def orjson_dumps(v, *, default): # orjson.dumps returns bytes, to match standard json.dumps we need to decode return orjson.dumps(v, default=default).decode() class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None class Config: json_loads = orjson.loads json_dumps = orjson_dumps user = User.parse_raw('{"id":123,"signup_ts":1234567890,"name":"John Doe"}') print(user.json()) pydantic-1.10.14/docs/examples/exporting_models_pickle.py000066400000000000000000000003251455251250200235230ustar00rootroot00000000000000import pickle from pydantic import BaseModel class FooBarModel(BaseModel): a: str b: int m = FooBarModel(a='hello', b=123) print(m) data = pickle.dumps(m) print(data) m2 = pickle.loads(data) print(m2) pydantic-1.10.14/docs/examples/exporting_models_ujson.py000066400000000000000000000004641455251250200234160ustar00rootroot00000000000000from datetime import datetime import ujson from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None class Config: json_loads = ujson.loads user = User.parse_raw('{"id": 123,"signup_ts":1234567890,"name":"John Doe"}') print(user) pydantic-1.10.14/docs/examples/generate_models_person_model.py000066400000000000000000000011771455251250200245230ustar00rootroot00000000000000# dont-upgrade # generated by datamodel-codegen: # filename: person.json # timestamp: 2020-05-19T15:07:31+00:00 from __future__ import annotations from typing import Any, List, Optional from pydantic import BaseModel, Field, conint class Pet(BaseModel): name: Optional[str] = None age: Optional[int] = None class Person(BaseModel): first_name: str = Field(..., description="The person's first name.") last_name: str = Field(..., description="The person's last name.") age: Optional[conint(ge=0)] = Field(None, description='Age in years.') pets: Optional[List[Pet]] = None comment: Optional[Any] = None pydantic-1.10.14/docs/examples/hypothesis_property_based_test.py000066400000000000000000000014141455251250200251520ustar00rootroot00000000000000import typing from hypothesis import given, strategies as st from pydantic import BaseModel, EmailStr, PaymentCardNumber, PositiveFloat class Model(BaseModel): card: PaymentCardNumber price: PositiveFloat users: typing.List[EmailStr] @given(st.builds(Model)) def test_property(instance): # Hypothesis calls this test function many times with varied Models, # so you can write a test that should pass given *any* instance. assert 0 < instance.price assert all('@' in email for email in instance.users) @given(st.builds(Model, price=st.floats(100, 200))) def test_with_discount(instance): # This test shows how you can override specific fields, # and let Hypothesis fill in any you don't care about. assert 100 <= instance.price <= 200 pydantic-1.10.14/docs/examples/index_error.py000066400000000000000000000003701455251250200211320ustar00rootroot00000000000000# output-json from index_main import User # ignore-above from pydantic import ValidationError try: User(signup_ts='broken', friends=[1, 2, 'not number']) except ValidationError as e: print(e.json()) # requires: User from previous example pydantic-1.10.14/docs/examples/index_main.py000066400000000000000000000006661455251250200207350ustar00rootroot00000000000000from datetime import datetime from typing import List, Optional from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: Optional[datetime] = None friends: List[int] = [] external_data = { 'id': '123', 'signup_ts': '2019-06-01 12:22', 'friends': [1, 2, '3'], } user = User(**external_data) print(user.id) print(repr(user.signup_ts)) print(user.friends) print(user.dict()) pydantic-1.10.14/docs/examples/model_config_alias_generator.py000066400000000000000000000005541455251250200244620ustar00rootroot00000000000000from pydantic import BaseModel def to_camel(string: str) -> str: return ''.join(word.capitalize() for word in string.split('_')) class Voice(BaseModel): name: str language_code: str class Config: alias_generator = to_camel voice = Voice(Name='Filiz', LanguageCode='tr-TR') print(voice.language_code) print(voice.dict(by_alias=True)) pydantic-1.10.14/docs/examples/model_config_alias_precedence.py000066400000000000000000000010211455251250200245570ustar00rootroot00000000000000from pydantic import BaseModel, Field class Voice(BaseModel): name: str = Field(None, alias='ActorName') language_code: str = None mood: str = None class Character(Voice): act: int = 1 class Config: fields = {'language_code': 'lang'} @classmethod def alias_generator(cls, string: str) -> str: # this is the same as `alias_generator = to_camel` above return ''.join(word.capitalize() for word in string.split('_')) print(Character.schema(by_alias=True)) pydantic-1.10.14/docs/examples/model_config_change_globally_custom.py000066400000000000000000000003461455251250200260260ustar00rootroot00000000000000from pydantic import BaseModel as PydanticBaseModel class BaseModel(PydanticBaseModel): class Config: arbitrary_types_allowed = True class MyClass: """A random class""" class Model(BaseModel): x: MyClass pydantic-1.10.14/docs/examples/model_config_class_kwargs.py000066400000000000000000000003001455251250200237730ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, Extra class Model(BaseModel, extra=Extra.forbid): a: str try: Model(a='spam', b='oh no') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/model_config_dataclass.py000066400000000000000000000010201455251250200232470ustar00rootroot00000000000000from datetime import datetime from pydantic import ValidationError from pydantic.dataclasses import dataclass class MyConfig: max_anystr_length = 10 validate_assignment = True error_msg_templates = { 'value_error.any_str.max_length': 'max_length:{limit_value}', } @dataclass(config=MyConfig) class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id='42', signup_ts='2032-06-21T12:00') try: user.name = 'x' * 20 except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/model_config_main.py000066400000000000000000000005021455251250200222400ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class Model(BaseModel): v: str class Config: max_anystr_length = 10 error_msg_templates = { 'value_error.any_str.max_length': 'max_length:{limit_value}', } try: Model(v='x' * 20) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/model_config_smart_union_off.py000066400000000000000000000003401455251250200245040ustar00rootroot00000000000000from typing import Union from pydantic import BaseModel class Foo(BaseModel): pass class Bar(BaseModel): pass class Model(BaseModel): x: Union[str, int] y: Union[Foo, Bar] print(Model(x=1, y=Bar())) pydantic-1.10.14/docs/examples/model_config_smart_union_on.py000066400000000000000000000004161455251250200243520ustar00rootroot00000000000000from typing import Union from pydantic import BaseModel class Foo(BaseModel): pass class Bar(BaseModel): pass class Model(BaseModel): x: Union[str, int] y: Union[Foo, Bar] class Config: smart_union = True print(Model(x=1, y=Bar())) pydantic-1.10.14/docs/examples/model_config_smart_union_on_edge_case.py000066400000000000000000000003531455251250200263310ustar00rootroot00000000000000from typing import List, Union from pydantic import BaseModel class Model(BaseModel, smart_union=True): x: Union[List[str], List[int]] # Expected coercion print(Model(x=[1, '2'])) # Unexpected coercion print(Model(x=[1, 2])) pydantic-1.10.14/docs/examples/models_abc.py000066400000000000000000000002611455251250200207010ustar00rootroot00000000000000import abc from pydantic import BaseModel class FooBarModel(BaseModel, abc.ABC): a: str b: int @abc.abstractmethod def my_abstract_method(self): pass pydantic-1.10.14/docs/examples/models_construct.py000066400000000000000000000012721455251250200222030ustar00rootroot00000000000000from pydantic import BaseModel class User(BaseModel): id: int age: int name: str = 'John Doe' original_user = User(id=123, age=32) user_data = original_user.dict() print(user_data) fields_set = original_user.__fields_set__ print(fields_set) # ... # pass user_data and fields_set to RPC or save to the database etc. # ... # you can then create a new instance of User without # re-running validation which would be unnecessary at this point: new_user = User.construct(_fields_set=fields_set, **user_data) print(repr(new_user)) print(new_user.__fields_set__) # construct can be dangerous, only use it with validated data!: bad_user = User.construct(id='dog') print(repr(bad_user)) pydantic-1.10.14/docs/examples/models_custom_root_access.py000066400000000000000000000004751455251250200240610ustar00rootroot00000000000000from typing import List from pydantic import BaseModel class Pets(BaseModel): __root__: List[str] def __iter__(self): return iter(self.__root__) def __getitem__(self, item): return self.__root__[item] pets = Pets.parse_obj(['dog', 'cat']) print(pets[0]) print([pet for pet in pets]) pydantic-1.10.14/docs/examples/models_custom_root_field.py000066400000000000000000000005531455251250200237000ustar00rootroot00000000000000from typing import List import json from pydantic import BaseModel from pydantic.schema import schema class Pets(BaseModel): __root__: List[str] print(Pets(__root__=['dog', 'cat'])) print(Pets(__root__=['dog', 'cat']).json()) print(Pets.parse_obj(['dog', 'cat'])) print(Pets.schema()) pets_schema = schema([Pets]) print(json.dumps(pets_schema, indent=2)) pydantic-1.10.14/docs/examples/models_custom_root_field_parse_obj.py000066400000000000000000000007361455251250200257270ustar00rootroot00000000000000from typing import List, Dict from pydantic import BaseModel, ValidationError class Pets(BaseModel): __root__: List[str] print(Pets.parse_obj(['dog', 'cat'])) print(Pets.parse_obj({'__root__': ['dog', 'cat']})) # not recommended class PetsByName(BaseModel): __root__: Dict[str, str] print(PetsByName.parse_obj({'Otis': 'dog', 'Milo': 'cat'})) try: PetsByName.parse_obj({'__root__': {'Otis': 'dog', 'Milo': 'cat'}}) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_data_conversion.py000066400000000000000000000002171455251250200233330ustar00rootroot00000000000000from pydantic import BaseModel class Model(BaseModel): a: int b: float c: str print(Model(a=3.1415, b=' 2.72 ', c=123).dict()) pydantic-1.10.14/docs/examples/models_default_factory.py000066400000000000000000000005111455251250200233250ustar00rootroot00000000000000from datetime import datetime from uuid import UUID, uuid4 from pydantic import BaseModel, Field class Model(BaseModel): uid: UUID = Field(default_factory=uuid4) updated: datetime = Field(default_factory=datetime.utcnow) m1 = Model() m2 = Model() print(f'{m1.uid} != {m2.uid}') print(f'{m1.updated} != {m2.updated}') pydantic-1.10.14/docs/examples/models_dynamic_creation.py000066400000000000000000000003051455251250200234630ustar00rootroot00000000000000from pydantic import BaseModel, create_model DynamicFoobarModel = create_model('DynamicFoobarModel', foo=(str, ...), bar=123) class StaticFoobarModel(BaseModel): foo: str bar: int = 123 pydantic-1.10.14/docs/examples/models_dynamic_inheritance.py000066400000000000000000000004111455251250200241460ustar00rootroot00000000000000from pydantic import BaseModel, create_model class FooModel(BaseModel): foo: str bar: int = 123 BarModel = create_model( 'BarModel', apple='russet', banana='yellow', __base__=FooModel, ) print(BarModel) print(BarModel.__fields__.keys()) pydantic-1.10.14/docs/examples/models_dynamic_validators.py000066400000000000000000000007471455251250200240410ustar00rootroot00000000000000from pydantic import create_model, ValidationError, validator def username_alphanumeric(cls, v): assert v.isalnum(), 'must be alphanumeric' return v validators = { 'username_validator': validator('username')(username_alphanumeric) } UserModel = create_model( 'UserModel', username=(str, ...), __validators__=validators ) user = UserModel(username='scolvin') print(user) try: UserModel(username='scolvi%n') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_errors1.py000066400000000000000000000011241455251250200215500ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, ValidationError, conint class Location(BaseModel): lat = 0.1 lng = 10.1 class Model(BaseModel): is_required: float gt_int: conint(gt=42) list_of_ints: List[int] = None a_float: float = None recursive_model: Location = None data = dict( list_of_ints=['1', 2, 'bad'], a_float='not a float', recursive_model={'lat': 4.2, 'lng': 'New York'}, gt_int=21, ) try: Model(**data) except ValidationError as e: print(e) try: Model(**data) except ValidationError as e: print(e.json()) pydantic-1.10.14/docs/examples/models_errors2.py000066400000000000000000000005131455251250200215520ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, validator class Model(BaseModel): foo: str @validator('foo') def value_must_equal_bar(cls, v): if v != 'bar': raise ValueError('value must be "bar"') return v try: Model(foo='ber') except ValidationError as e: print(e.errors()) pydantic-1.10.14/docs/examples/models_errors3.py000066400000000000000000000007241455251250200215570ustar00rootroot00000000000000from pydantic import BaseModel, PydanticValueError, ValidationError, validator class NotABarError(PydanticValueError): code = 'not_a_bar' msg_template = 'value is not "bar", got "{wrong_value}"' class Model(BaseModel): foo: str @validator('foo') def value_must_equal_bar(cls, v): if v != 'bar': raise NotABarError(wrong_value=v) return v try: Model(foo='ber') except ValidationError as e: print(e.json()) pydantic-1.10.14/docs/examples/models_field_order.py000066400000000000000000000005461455251250200224400ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class Model(BaseModel): a: int b = 2 c: int = 1 d = 0 e: float print(Model.__fields__.keys()) m = Model(e=2, a=1) print(m.dict()) try: Model(a='x', b='x', c='x', d='x', e='x') except ValidationError as e: error_locations = [e['loc'] for e in e.errors()] print(error_locations) pydantic-1.10.14/docs/examples/models_from_typeddict.py000066400000000000000000000005771455251250200232020ustar00rootroot00000000000000from typing_extensions import TypedDict from pydantic import ValidationError, create_model_from_typeddict class User(TypedDict): name: str id: int class Config: extra = 'forbid' UserM = create_model_from_typeddict(User, __config__=Config) print(repr(UserM(name=123, id='3'))) try: UserM(name=123, id='3', other='no') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_generics.py000066400000000000000000000021421455251250200217530ustar00rootroot00000000000000from typing import Generic, TypeVar, Optional, List from pydantic import BaseModel, validator, ValidationError from pydantic.generics import GenericModel DataT = TypeVar('DataT') class Error(BaseModel): code: int message: str class DataModel(BaseModel): numbers: List[int] people: List[str] class Response(GenericModel, Generic[DataT]): data: Optional[DataT] error: Optional[Error] @validator('error', always=True) def check_consistency(cls, v, values): if v is not None and values['data'] is not None: raise ValueError('must not provide both data and error') if v is None and values.get('data') is None: raise ValueError('must provide data or error') return v data = DataModel(numbers=[1, 2, 3], people=[]) error = Error(code=404, message='Not found') print(Response[int](data=1)) print(Response[str](data='value')) print(Response[str](data='value').dict()) print(Response[DataModel](data=data).dict()) print(Response[DataModel](error=error).dict()) try: Response[int](data='value') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_generics_inheritance.py000066400000000000000000000004751455251250200243330ustar00rootroot00000000000000from typing import TypeVar, Generic from pydantic.generics import GenericModel TypeX = TypeVar('TypeX') class BaseClass(GenericModel, Generic[TypeX]): X: TypeX class ChildClass(BaseClass[TypeX], Generic[TypeX]): # Inherit from Generic[TypeX] pass # Replace TypeX by int print(ChildClass[int](X=1)) pydantic-1.10.14/docs/examples/models_generics_inheritance_extend.py000066400000000000000000000006021455251250200256720ustar00rootroot00000000000000from typing import TypeVar, Generic from pydantic.generics import GenericModel TypeX = TypeVar('TypeX') TypeY = TypeVar('TypeY') TypeZ = TypeVar('TypeZ') class BaseClass(GenericModel, Generic[TypeX, TypeY]): x: TypeX y: TypeY class ChildClass(BaseClass[int, TypeY], Generic[TypeY, TypeZ]): z: TypeZ # Replace TypeY by str print(ChildClass[str, int](x=1, y='y', z=3)) pydantic-1.10.14/docs/examples/models_generics_naming.py000066400000000000000000000006401455251250200233050ustar00rootroot00000000000000from typing import Generic, TypeVar, Type, Any, Tuple from pydantic.generics import GenericModel DataT = TypeVar('DataT') class Response(GenericModel, Generic[DataT]): data: DataT @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: return f'{params[0].__name__.title()}Response' print(repr(Response[int](data=1))) print(repr(Response[str](data='a'))) pydantic-1.10.14/docs/examples/models_generics_nested.py000066400000000000000000000007321455251250200233200ustar00rootroot00000000000000from typing import Generic, TypeVar from pydantic import ValidationError from pydantic.generics import GenericModel T = TypeVar('T') class InnerT(GenericModel, Generic[T]): inner: T class OuterT(GenericModel, Generic[T]): outer: T nested: InnerT[T] nested = InnerT[int](inner=1) print(OuterT[int](outer=1, nested=nested)) try: nested = InnerT[str](inner='a') print(OuterT[int](outer='a', nested=nested)) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_generics_typevars.py000066400000000000000000000007661455251250200237220ustar00rootroot00000000000000from typing import Generic, TypeVar from pydantic import ValidationError from pydantic.generics import GenericModel AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT print(Model(a='a', b='a')) IntT = TypeVar('IntT', bound=int) typevar_model = Model[int, IntT] print(typevar_model(a=1, b=1)) try: typevar_model(a='a', b='a') except ValidationError as exc: print(exc) concrete_model = typevar_model[int] print(concrete_model(a=1, b=1)) pydantic-1.10.14/docs/examples/models_mutation.py000066400000000000000000000005211455251250200220130ustar00rootroot00000000000000from pydantic import BaseModel class FooBarModel(BaseModel): a: str b: dict class Config: allow_mutation = False foobar = FooBarModel(a='hello', b={'apple': 'pear'}) try: foobar.a = 'different' except TypeError as e: print(e) print(foobar.a) print(foobar.b) foobar.b['apple'] = 'grape' print(foobar.b) pydantic-1.10.14/docs/examples/models_orm_mode.py000066400000000000000000000016241455251250200217610ustar00rootroot00000000000000from typing import List from sqlalchemy import Column, Integer, String from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.ext.declarative import declarative_base from pydantic import BaseModel, constr Base = declarative_base() class CompanyOrm(Base): __tablename__ = 'companies' id = Column(Integer, primary_key=True, nullable=False) public_key = Column(String(20), index=True, nullable=False, unique=True) name = Column(String(63), unique=True) domains = Column(ARRAY(String(255))) class CompanyModel(BaseModel): id: int public_key: constr(max_length=20) name: constr(max_length=63) domains: List[constr(max_length=255)] class Config: orm_mode = True co_orm = CompanyOrm( id=123, public_key='foobar', name='Testing', domains=['example.com', 'foobar.com'], ) print(co_orm) co_model = CompanyModel.from_orm(co_orm) print(co_model) pydantic-1.10.14/docs/examples/models_orm_mode_data_binding.py000066400000000000000000000016061455251250200244440ustar00rootroot00000000000000from pydantic import BaseModel from typing import Any, Optional from pydantic.utils import GetterDict from xml.etree.ElementTree import fromstring xmlstring = """ """ class UserGetter(GetterDict): def get(self, key: str, default: Any) -> Any: # element attributes if key in {'Id', 'Status'}: return self._obj.attrib.get(key, default) # element children else: try: return self._obj.find(key).attrib['Value'] except (AttributeError, KeyError): return default class User(BaseModel): Id: int Status: Optional[str] FirstName: Optional[str] LastName: Optional[str] LoggedIn: bool class Config: orm_mode = True getter_dict = UserGetter user = User.from_orm(fromstring(xmlstring)) pydantic-1.10.14/docs/examples/models_orm_mode_recursive.py000066400000000000000000000014061455251250200240460ustar00rootroot00000000000000from typing import List from pydantic import BaseModel class PetCls: def __init__(self, *, name: str, species: str): self.name = name self.species = species class PersonCls: def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): self.name = name self.age = age self.pets = pets class Pet(BaseModel): name: str species: str class Config: orm_mode = True class Person(BaseModel): name: str age: float = None pets: List[Pet] class Config: orm_mode = True bones = PetCls(name='Bones', species='dog') orion = PetCls(name='Orion', species='cat') anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) anna_model = Person.from_orm(anna) print(anna_model) pydantic-1.10.14/docs/examples/models_orm_mode_reserved_name.py000066400000000000000000000012531455251250200246560ustar00rootroot00000000000000import typing from pydantic import BaseModel, Field import sqlalchemy as sa from sqlalchemy.ext.declarative import declarative_base class MyModel(BaseModel): metadata: typing.Dict[str, str] = Field(alias='metadata_') class Config: orm_mode = True Base = declarative_base() class SQLModel(Base): __tablename__ = 'my_table' id = sa.Column('id', sa.Integer, primary_key=True) # 'metadata' is reserved by SQLAlchemy, hence the '_' metadata_ = sa.Column('metadata', sa.JSON) sql_model = SQLModel(metadata_={'key': 'val'}, id=1) pydantic_model = MyModel.from_orm(sql_model) print(pydantic_model.dict()) print(pydantic_model.dict(by_alias=True)) pydantic-1.10.14/docs/examples/models_parse.py000066400000000000000000000014741455251250200212750ustar00rootroot00000000000000import pickle from datetime import datetime from pathlib import Path from pydantic import BaseModel, ValidationError class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None m = User.parse_obj({'id': 123, 'name': 'James'}) print(m) try: User.parse_obj(['not', 'a', 'dict']) except ValidationError as e: print(e) # assumes json as no content type passed m = User.parse_raw('{"id": 123, "name": "James"}') print(m) pickle_data = pickle.dumps({ 'id': 123, 'name': 'James', 'signup_ts': datetime(2017, 7, 14) }) m = User.parse_raw( pickle_data, content_type='application/pickle', allow_pickle=True ) print(m) path = Path('data.json') path.write_text('{"id": 123, "name": "James"}') m = User.parse_file(path) print(m) # ignore-below if path.exists(): path.unlink() pydantic-1.10.14/docs/examples/models_recursive.py000066400000000000000000000005331455251250200221650ustar00rootroot00000000000000from typing import List, Optional from pydantic import BaseModel class Foo(BaseModel): count: int size: Optional[float] = None class Bar(BaseModel): apple = 'x' banana = 'y' class Spam(BaseModel): foo: Foo bars: List[Bar] m = Spam(foo={'count': 4}, bars=[{'apple': 'x1'}, {'apple': 'x2'}]) print(m) print(m.dict()) pydantic-1.10.14/docs/examples/models_required_field_optional.py000066400000000000000000000004331455251250200250450ustar00rootroot00000000000000from typing import Optional from pydantic import BaseModel, Field, ValidationError class Model(BaseModel): a: Optional[int] b: Optional[int] = ... c: Optional[int] = Field(...) print(Model(b=1, c=2)) try: Model(a=1, b=2) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/models_required_fields.py000066400000000000000000000001641455251250200233240ustar00rootroot00000000000000from pydantic import BaseModel, Field class Model(BaseModel): a: int b: int = ... c: int = Field(...) pydantic-1.10.14/docs/examples/models_signature.py000066400000000000000000000003371455251250200221610ustar00rootroot00000000000000import inspect from pydantic import BaseModel, Field class FooModel(BaseModel): id: int name: str = None description: str = 'Foo' apple: int = Field(..., alias='pear') print(inspect.signature(FooModel)) pydantic-1.10.14/docs/examples/models_signature_custom_init.py000066400000000000000000000004431455251250200245740ustar00rootroot00000000000000import inspect from pydantic import BaseModel class MyModel(BaseModel): id: int info: str = 'Foo' def __init__(self, id: int = 1, *, bar: str, **data) -> None: """My custom init!""" super().__init__(id=id, bar=bar, **data) print(inspect.signature(MyModel)) pydantic-1.10.14/docs/examples/models_structural_pattern_matching.py000066400000000000000000000005601455251250200257750ustar00rootroot00000000000000# requires python3.10 from pydantic import BaseModel class Pet(BaseModel): name: str species: str a = Pet(name='Bones', species='dog') match a: # match `species` to 'dog', declare and initialize `dog_name` case Pet(species='dog', name=dog_name): print(f'{dog_name} is a dog') # default case case _: print('No dog matched') pydantic-1.10.14/docs/examples/mypy_main.py000066400000000000000000000006671455251250200206250ustar00rootroot00000000000000# dont-execute from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.middle_name) # not a model field! Model() # will raise a validation error for age and list_of_ints pydantic-1.10.14/docs/examples/parse_obj_as.py000066400000000000000000000005421455251250200212420ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, parse_obj_as class Item(BaseModel): id: int name: str # `item_data` could come from an API call, eg., via something like: # item_data = requests.get('https://my-api.com/items').json() item_data = [{'id': 1, 'name': 'My Item'}] items = parse_obj_as(List[Item], item_data) print(items) pydantic-1.10.14/docs/examples/postponed_annotations_broken.py000066400000000000000000000007151455251250200246050ustar00rootroot00000000000000from __future__ import annotations from pydantic import BaseModel from pydantic.errors import ConfigError def this_is_broken(): from pydantic import HttpUrl # HttpUrl is defined in function local scope class Model(BaseModel): a: HttpUrl try: Model(a='https://example.com') except ConfigError as e: print(e) try: Model.update_forward_refs() except NameError as e: print(e) this_is_broken() pydantic-1.10.14/docs/examples/postponed_annotations_forward_ref.py000066400000000000000000000003261455251250200256230ustar00rootroot00000000000000from typing import ForwardRef from pydantic import BaseModel Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None Foo.update_forward_refs() print(Foo()) print(Foo(b={'a': '321'})) pydantic-1.10.14/docs/examples/postponed_annotations_main.py000066400000000000000000000002731455251250200242500ustar00rootroot00000000000000from __future__ import annotations from typing import Any, List from pydantic import BaseModel class Model(BaseModel): a: List[int] b: Any print(Model(a=('1', 2, 3), b='ok')) pydantic-1.10.14/docs/examples/postponed_annotations_self_referencing_annotations.py000066400000000000000000000003561455251250200312430ustar00rootroot00000000000000from __future__ import annotations from pydantic import BaseModel class Foo(BaseModel): a: int = 123 #: The sibling of `Foo` is referenced directly by type sibling: Foo = None print(Foo()) print(Foo(sibling={'a': '321'})) pydantic-1.10.14/docs/examples/postponed_annotations_self_referencing_string.py000066400000000000000000000003061455251250200302070ustar00rootroot00000000000000from pydantic import BaseModel class Foo(BaseModel): a: int = 123 #: The sibling of `Foo` is referenced by string sibling: 'Foo' = None print(Foo()) print(Foo(sibling={'a': '321'})) pydantic-1.10.14/docs/examples/postponed_annotations_works.py000066400000000000000000000004171455251250200244710ustar00rootroot00000000000000from __future__ import annotations from pydantic import BaseModel from pydantic import HttpUrl # HttpUrl is defined in the module's global scope def this_works(): class Model(BaseModel): a: HttpUrl print(Model(a='https://example.com')) this_works() pydantic-1.10.14/docs/examples/private_attributes.py000066400000000000000000000007411455251250200225340ustar00rootroot00000000000000from datetime import datetime from random import randint from pydantic import BaseModel, PrivateAttr class TimeAwareModel(BaseModel): _processed_at: datetime = PrivateAttr(default_factory=datetime.now) _secret_value: str = PrivateAttr() def __init__(self, **data): super().__init__(**data) # this could also be done with default_factory self._secret_value = randint(1, 5) m = TimeAwareModel() print(m._processed_at) print(m._secret_value) pydantic-1.10.14/docs/examples/private_attributes_underscore_attrs_are_private.py000066400000000000000000000005071455251250200305630ustar00rootroot00000000000000from typing import ClassVar from pydantic import BaseModel class Model(BaseModel): _class_var: ClassVar[str] = 'class var value' _private_attr: str = 'private attr value' class Config: underscore_attrs_are_private = True print(Model._class_var) print(Model._private_attr) print(Model()._private_attr) pydantic-1.10.14/docs/examples/schema_ad_hoc.py000066400000000000000000000006221455251250200213470ustar00rootroot00000000000000from typing import Literal, Union from typing_extensions import Annotated from pydantic import BaseModel, Field, schema_json_of class Cat(BaseModel): pet_type: Literal['cat'] cat_name: str class Dog(BaseModel): pet_type: Literal['dog'] dog_name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] print(schema_json_of(Pet, title='The Pet Schema', indent=2)) pydantic-1.10.14/docs/examples/schema_annotated.py000066400000000000000000000003711455251250200221100ustar00rootroot00000000000000from uuid import uuid4 from pydantic import BaseModel, Field from typing_extensions import Annotated class Foo(BaseModel): id: Annotated[str, Field(default_factory=lambda: uuid4().hex)] name: Annotated[str, Field(max_length=256)] = 'Bar' pydantic-1.10.14/docs/examples/schema_custom.py000066400000000000000000000004721455251250200214470ustar00rootroot00000000000000# output-json import json from pydantic import BaseModel from pydantic.schema import schema class Foo(BaseModel): a: int class Model(BaseModel): a: Foo # Default location for OpenAPI top_level_schema = schema([Model], ref_prefix='#/components/schemas/') print(json.dumps(top_level_schema, indent=2)) pydantic-1.10.14/docs/examples/schema_extra_callable.py000066400000000000000000000006151455251250200230760ustar00rootroot00000000000000# output-json from typing import Dict, Any, Type from pydantic import BaseModel class Person(BaseModel): name: str age: int class Config: @staticmethod def schema_extra(schema: Dict[str, Any], model: Type['Person']) -> None: for prop in schema.get('properties', {}).values(): prop.pop('title', None) print(Person.schema_json(indent=2)) pydantic-1.10.14/docs/examples/schema_main.py000066400000000000000000000013241455251250200210560ustar00rootroot00000000000000# output-json from enum import Enum from pydantic import BaseModel, Field class FooBar(BaseModel): count: int size: float = None class Gender(str, Enum): male = 'male' female = 'female' other = 'other' not_given = 'not_given' class MainModel(BaseModel): """ This is the description of the main model """ foo_bar: FooBar = Field(...) gender: Gender = Field(None, alias='Gender') snap: int = Field( 42, title='The Snap', description='this is the value of snap', gt=30, lt=50, ) class Config: title = 'Main' # this is equivalent to json.dumps(MainModel.schema(), indent=2): print(MainModel.schema_json(indent=2)) pydantic-1.10.14/docs/examples/schema_top_level.py000066400000000000000000000004711455251250200221250ustar00rootroot00000000000000# output-json import json from pydantic import BaseModel from pydantic.schema import schema class Foo(BaseModel): a: str = None class Model(BaseModel): b: Foo class Bar(BaseModel): c: int top_level_schema = schema([Model, Bar], title='My Schema') print(json.dumps(top_level_schema, indent=2)) pydantic-1.10.14/docs/examples/schema_unenforced_constraints.py000066400000000000000000000015131455251250200247110ustar00rootroot00000000000000from pydantic import BaseModel, Field, PositiveInt try: # this won't work since PositiveInt takes precedence over the # constraints defined in Field meaning they're ignored class Model(BaseModel): foo: PositiveInt = Field(..., lt=10) except ValueError as e: print(e) # but you can set the schema attribute directly: # (Note: here exclusiveMaximum will not be enforce) class Model(BaseModel): foo: PositiveInt = Field(..., exclusiveMaximum=10) print(Model.schema()) # if you find yourself needing this, an alternative is to declare # the constraints in Field (or you could use conint()) # here both constraints will be enforced: class Model(BaseModel): # Here both constraints will be applied and the schema # will be generated correctly foo: int = Field(..., gt=0, lt=10) print(Model.schema()) pydantic-1.10.14/docs/examples/schema_with_example.py000066400000000000000000000005221455251250200226170ustar00rootroot00000000000000# output-json from pydantic import BaseModel class Person(BaseModel): name: str age: int class Config: schema_extra = { 'examples': [ { 'name': 'John Doe', 'age': 25, } ] } print(Person.schema_json(indent=2)) pydantic-1.10.14/docs/examples/schema_with_field.py000066400000000000000000000017071455251250200222550ustar00rootroot00000000000000# output-json from typing import Any, Callable, Dict, Generator, Optional from pydantic import BaseModel, Field from pydantic.fields import ModelField class RestrictedAlphabetStr(str): @classmethod def __get_validators__(cls) -> Generator[Callable, None, None]: yield cls.validate @classmethod def validate(cls, value: str, field: ModelField): alphabet = field.field_info.extra['alphabet'] if any(c not in alphabet for c in value): raise ValueError(f'{value!r} is not restricted to {alphabet!r}') return cls(value) @classmethod def __modify_schema__( cls, field_schema: Dict[str, Any], field: Optional[ModelField] ): if field: alphabet = field.field_info.extra['alphabet'] field_schema['examples'] = [c * 3 for c in alphabet] class MyModel(BaseModel): value: RestrictedAlphabetStr = Field(alphabet='ABC') print(MyModel.schema_json(indent=2)) pydantic-1.10.14/docs/examples/settings_add_custom_source.py000066400000000000000000000017221455251250200242360ustar00rootroot00000000000000import json from pathlib import Path from typing import Dict, Any from pydantic import BaseSettings def json_config_settings_source(settings: BaseSettings) -> Dict[str, Any]: """ A simple settings source that loads variables from a JSON file at the project's root. Here we happen to choose to use the `env_file_encoding` from Config when reading `config.json` """ encoding = settings.__config__.env_file_encoding return json.loads(Path('config.json').read_text(encoding)) class Settings(BaseSettings): foobar: str class Config: env_file_encoding = 'utf-8' @classmethod def customise_sources( cls, init_settings, env_settings, file_secret_settings, ): return ( init_settings, json_config_settings_source, env_settings, file_secret_settings, ) print(Settings()) pydantic-1.10.14/docs/examples/settings_case_sensitive.py000066400000000000000000000002201455251250200235300ustar00rootroot00000000000000from pydantic import BaseSettings class Settings(BaseSettings): redis_host = 'localhost' class Config: case_sensitive = True pydantic-1.10.14/docs/examples/settings_disable_source.py000066400000000000000000000013111455251250200235110ustar00rootroot00000000000000from typing import Tuple from pydantic import BaseSettings from pydantic.env_settings import SettingsSourceCallable class Settings(BaseSettings): my_api_key: str class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: # here we choose to ignore arguments from init_settings return env_settings, file_secret_settings print(Settings(my_api_key='this is ignored')) # requires: `MY_API_KEY` env variable to be set, e.g. `export MY_API_KEY=xxx` pydantic-1.10.14/docs/examples/settings_env_priority.py000066400000000000000000000011731455251250200232650ustar00rootroot00000000000000from typing import Tuple from pydantic import BaseSettings, PostgresDsn from pydantic.env_settings import SettingsSourceCallable class Settings(BaseSettings): database_dsn: PostgresDsn class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return env_settings, init_settings, file_secret_settings print(Settings(database_dsn='postgres://postgres@localhost:5432/kwargs_db')) pydantic-1.10.14/docs/examples/settings_main.py000066400000000000000000000021411455251250200214540ustar00rootroot00000000000000from typing import Set from pydantic import ( BaseModel, BaseSettings, PyObject, RedisDsn, PostgresDsn, AmqpDsn, Field, ) class SubModel(BaseModel): foo = 'bar' apple = 1 class Settings(BaseSettings): auth_key: str api_key: str = Field(..., env='my_api_key') redis_dsn: RedisDsn = 'redis://user:pass@localhost:6379/1' pg_dsn: PostgresDsn = 'postgres://user:pass@localhost:5432/foobar' amqp_dsn: AmqpDsn = 'amqp://user:pass@localhost:5672/' special_function: PyObject = 'math.cos' # to override domains: # export my_prefix_domains='["foo.com", "bar.com"]' domains: Set[str] = set() # to override more_settings: # export my_prefix_more_settings='{"foo": "x", "apple": 1}' more_settings: SubModel = SubModel() class Config: env_prefix = 'my_prefix_' # defaults to no prefix, i.e. "" fields = { 'auth_key': { 'env': 'my_auth_key', }, 'redis_dsn': { 'env': ['service_redis_dsn', 'redis_url'] } } print(Settings().dict()) pydantic-1.10.14/docs/examples/settings_nested_env.py000066400000000000000000000005121455251250200226620ustar00rootroot00000000000000from pydantic import BaseModel, BaseSettings class DeepSubModel(BaseModel): v4: str class SubModel(BaseModel): v1: str v2: bytes v3: int deep: DeepSubModel class Settings(BaseSettings): v0: str sub_model: SubModel class Config: env_nested_delimiter = '__' print(Settings().dict()) pydantic-1.10.14/docs/examples/settings_with_custom_parsing.py000066400000000000000000000006711455251250200246260ustar00rootroot00000000000000import os from typing import Any, List from pydantic import BaseSettings class Settings(BaseSettings): numbers: List[int] class Config: @classmethod def parse_env_var(cls, field_name: str, raw_val: str) -> Any: if field_name == 'numbers': return [int(x) for x in raw_val.split(',')] return cls.json_loads(raw_val) os.environ['numbers'] = '1,2,3' print(Settings().dict()) pydantic-1.10.14/docs/examples/types_arbitrary_allowed.py000066400000000000000000000016141455251250200235460ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError # This is not a pydantic model, it's an arbitrary class class Pet: def __init__(self, name: str): self.name = name class Model(BaseModel): pet: Pet owner: str class Config: arbitrary_types_allowed = True pet = Pet(name='Hedwig') # A simple check of instance type is used to validate the data model = Model(owner='Harry', pet=pet) print(model) print(model.pet) print(model.pet.name) print(type(model.pet)) try: # If the value is not an instance of the type, it's invalid Model(owner='Harry', pet='Hedwig') except ValidationError as e: print(e) # Nothing in the instance of the arbitrary type is checked # Here name probably should have been a str, but it's not validated pet2 = Pet(name=42) model2 = Model(owner='Harry', pet=pet2) print(model2) print(model2.pet) print(model2.pet.name) print(type(model2.pet)) pydantic-1.10.14/docs/examples/types_bare_type.py000066400000000000000000000005321455251250200220100ustar00rootroot00000000000000# dont-upgrade from typing import Type from pydantic import BaseModel, ValidationError class Foo: pass class LenientSimpleModel(BaseModel): any_class_goes: Type LenientSimpleModel(any_class_goes=int) LenientSimpleModel(any_class_goes=Foo) try: LenientSimpleModel(any_class_goes=Foo()) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_boolean.py000066400000000000000000000004121455251250200214520ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class BooleanModel(BaseModel): bool_value: bool print(BooleanModel(bool_value=False)) print(BooleanModel(bool_value='False')) try: BooleanModel(bool_value=[]) except ValidationError as e: print(str(e)) pydantic-1.10.14/docs/examples/types_bytesize.py000066400000000000000000000004351455251250200216760ustar00rootroot00000000000000from pydantic import BaseModel, ByteSize class MyModel(BaseModel): size: ByteSize print(MyModel(size=52000).size) print(MyModel(size='3000 KiB').size) m = MyModel(size='50 PB') print(m.size.human_readable()) print(m.size.human_readable(decimal=True)) print(m.size.to('TiB')) pydantic-1.10.14/docs/examples/types_callable.py000066400000000000000000000002371455251250200215770ustar00rootroot00000000000000from typing import Callable from pydantic import BaseModel class Foo(BaseModel): callback: Callable[[int], int] m = Foo(callback=lambda x: x) print(m) pydantic-1.10.14/docs/examples/types_choices.py000066400000000000000000000007201455251250200214520ustar00rootroot00000000000000from enum import Enum, IntEnum from pydantic import BaseModel, ValidationError class FruitEnum(str, Enum): pear = 'pear' banana = 'banana' class ToolEnum(IntEnum): spanner = 1 wrench = 2 class CookingModel(BaseModel): fruit: FruitEnum = FruitEnum.pear tool: ToolEnum = ToolEnum.spanner print(CookingModel()) print(CookingModel(tool=2, fruit='banana')) try: CookingModel(fruit='other') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_color.py000066400000000000000000000006121455251250200211530ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError from pydantic.color import Color c = Color('ff00ff') print(c.as_named()) print(c.as_hex()) c2 = Color('green') print(c2.as_rgb_tuple()) print(c2.original()) print(repr(Color('hsl(180, 100%, 50%)'))) class Model(BaseModel): color: Color print(Model(color='purple')) try: Model(color='hello') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_constrained.py000066400000000000000000000030551455251250200223520ustar00rootroot00000000000000from decimal import Decimal from pydantic import ( BaseModel, NegativeFloat, NegativeInt, PositiveFloat, PositiveInt, NonNegativeFloat, NonNegativeInt, NonPositiveFloat, NonPositiveInt, conbytes, condecimal, confloat, conint, conlist, conset, constr, Field, ) class Model(BaseModel): upper_bytes: conbytes(to_upper=True) lower_bytes: conbytes(to_lower=True) short_bytes: conbytes(min_length=2, max_length=10) strip_bytes: conbytes(strip_whitespace=True) upper_str: constr(to_upper=True) lower_str: constr(to_lower=True) short_str: constr(min_length=2, max_length=10) regex_str: constr(regex=r'^apple (pie|tart|sandwich)$') strip_str: constr(strip_whitespace=True) big_int: conint(gt=1000, lt=1024) mod_int: conint(multiple_of=5) pos_int: PositiveInt neg_int: NegativeInt non_neg_int: NonNegativeInt non_pos_int: NonPositiveInt big_float: confloat(gt=1000, lt=1024) unit_interval: confloat(ge=0, le=1) mod_float: confloat(multiple_of=0.5) pos_float: PositiveFloat neg_float: NegativeFloat non_neg_float: NonNegativeFloat non_pos_float: NonPositiveFloat short_list: conlist(int, min_items=1, max_items=4) short_set: conset(int, min_items=1, max_items=4) decimal_positive: condecimal(gt=0) decimal_negative: condecimal(lt=0) decimal_max_digits_and_places: condecimal(max_digits=2, decimal_places=2) mod_decimal: condecimal(multiple_of=Decimal('0.25')) bigger_int: int = Field(..., gt=10000) pydantic-1.10.14/docs/examples/types_custom_type.py000066400000000000000000000042071455251250200224140ustar00rootroot00000000000000import re from pydantic import BaseModel # https://en.wikipedia.org/wiki/Postcodes_in_the_United_Kingdom#Validation post_code_regex = re.compile( r'(?:' r'([A-Z]{1,2}[0-9][A-Z0-9]?|ASCN|STHL|TDCU|BBND|[BFS]IQQ|PCRN|TKCA) ?' r'([0-9][A-Z]{2})|' r'(BFPO) ?([0-9]{1,4})|' r'(KY[0-9]|MSR|VG|AI)[ -]?[0-9]{4}|' r'([A-Z]{2}) ?([0-9]{2})|' r'(GE) ?(CX)|' r'(GIR) ?(0A{2})|' r'(SAN) ?(TA1)' r')' ) class PostCode(str): """ Partial UK postcode validation. Note: this is just an example, and is not intended for use in production; in particular this does NOT guarantee a postcode exists, just that it has a valid format. """ @classmethod def __get_validators__(cls): # one or more validators may be yielded which will be called in the # order to validate the input, each validator will receive as an input # the value returned from the previous validator yield cls.validate @classmethod def __modify_schema__(cls, field_schema): # __modify_schema__ should mutate the dict it receives in place, # the returned value will be ignored field_schema.update( # simplified regex here for brevity, see the wikipedia link above pattern='^[A-Z]{1,2}[0-9][A-Z0-9]? ?[0-9][A-Z]{2}$', # some example postcodes examples=['SP11 9DG', 'w1j7bu'], ) @classmethod def validate(cls, v): if not isinstance(v, str): raise TypeError('string required') m = post_code_regex.fullmatch(v.upper()) if not m: raise ValueError('invalid postcode format') # you could also return a string here which would mean model.post_code # would be a string, pydantic won't care but you could end up with some # confusion since the value's type won't match the type annotation # exactly return cls(f'{m.group(1)} {m.group(2)}') def __repr__(self): return f'PostCode({super().__repr__()})' class Model(BaseModel): post_code: PostCode model = Model(post_code='sw8 5el') print(model) print(model.post_code) print(Model.schema()) pydantic-1.10.14/docs/examples/types_dt.py000066400000000000000000000005241455251250200204460ustar00rootroot00000000000000from datetime import date, datetime, time, timedelta from pydantic import BaseModel class Model(BaseModel): d: date = None dt: datetime = None t: time = None td: timedelta = None m = Model( d=1966280412345.6789, dt='2032-04-23T10:20:30.400+02:30', t=time(4, 8, 16), td='P3DT12H30M5S', ) print(m.dict()) pydantic-1.10.14/docs/examples/types_generics.py000066400000000000000000000061641455251250200216440ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError from pydantic.fields import ModelField from typing import TypeVar, Generic AgedType = TypeVar('AgedType') QualityType = TypeVar('QualityType') # This is not a pydantic model, it's an arbitrary generic class class TastingModel(Generic[AgedType, QualityType]): def __init__(self, name: str, aged: AgedType, quality: QualityType): self.name = name self.aged = aged self.quality = quality @classmethod def __get_validators__(cls): yield cls.validate @classmethod # You don't need to add the "ModelField", but it will help your # editor give you completion and catch errors def validate(cls, v, field: ModelField): if not isinstance(v, cls): # The value is not even a TastingModel raise TypeError('Invalid value') if not field.sub_fields: # Generic parameters were not provided so we don't try to validate # them and just return the value as is return v aged_f = field.sub_fields[0] quality_f = field.sub_fields[1] errors = [] # Here we don't need the validated value, but we want the errors valid_value, error = aged_f.validate(v.aged, {}, loc='aged') if error: errors.append(error) # Here we don't need the validated value, but we want the errors valid_value, error = quality_f.validate(v.quality, {}, loc='quality') if error: errors.append(error) if errors: raise ValidationError(errors, cls) # Validation passed without errors, return the same instance received return v class Model(BaseModel): # for wine, "aged" is an int with years, "quality" is a float wine: TastingModel[int, float] # for cheese, "aged" is a bool, "quality" is a str cheese: TastingModel[bool, str] # for thing, "aged" is a Any, "quality" is Any thing: TastingModel model = Model( # This wine was aged for 20 years and has a quality of 85.6 wine=TastingModel(name='Cabernet Sauvignon', aged=20, quality=85.6), # This cheese is aged (is mature) and has "Good" quality cheese=TastingModel(name='Gouda', aged=True, quality='Good'), # This Python thing has aged "Not much" and has a quality "Awesome" thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), ) print(model) print(model.wine.aged) print(model.wine.quality) print(model.cheese.aged) print(model.cheese.quality) print(model.thing.aged) try: # If the values of the sub-types are invalid, we get an error Model( # For wine, aged should be an int with the years, and quality a float wine=TastingModel(name='Merlot', aged=True, quality='Kinda good'), # For cheese, aged should be a bool, and quality a str cheese=TastingModel(name='Gouda', aged='yeah', quality=5), # For thing, no type parameters are declared, and we skipped validation # in those cases in the Assessment.validate() function thing=TastingModel(name='Python', aged='Not much', quality='Awesome'), ) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_infinite_generator.py000066400000000000000000000004611455251250200237120ustar00rootroot00000000000000from typing import Iterable from pydantic import BaseModel class Model(BaseModel): infinite: Iterable[int] def infinite_ints(): i = 0 while True: yield i i += 1 m = Model(infinite=infinite_ints()) print(m) for i in m.infinite: print(i) if i == 10: break pydantic-1.10.14/docs/examples/types_infinite_generator_validate_first.py000066400000000000000000000024031455251250200267700ustar00rootroot00000000000000import itertools from typing import Iterable from pydantic import BaseModel, validator, ValidationError from pydantic.fields import ModelField class Model(BaseModel): infinite: Iterable[int] @validator('infinite') # You don't need to add the "ModelField", but it will help your # editor give you completion and catch errors def infinite_first_int(cls, iterable, field: ModelField): first_value = next(iterable) if field.sub_fields: # The Iterable had a parameter type, in this case it's int # We use it to validate the first value sub_field = field.sub_fields[0] v, error = sub_field.validate(first_value, {}, loc='first_value') if error: raise ValidationError([error], cls) # This creates a new generator that returns the first value and then # the rest of the values from the (already started) iterable return itertools.chain([first_value], iterable) def infinite_ints(): i = 0 while True: yield i i += 1 m = Model(infinite=infinite_ints()) print(m) def infinite_strs(): while True: yield from 'allthesingleladies' try: Model(infinite=infinite_strs()) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_iterables.py000066400000000000000000000023141455251250200220100ustar00rootroot00000000000000from typing import ( Deque, Dict, FrozenSet, List, Optional, Sequence, Set, Tuple, Union ) from pydantic import BaseModel class Model(BaseModel): simple_list: list = None list_of_ints: List[int] = None simple_tuple: tuple = None tuple_of_different_types: Tuple[int, float, str, bool] = None simple_dict: dict = None dict_str_float: Dict[str, float] = None simple_set: set = None set_bytes: Set[bytes] = None frozen_set: FrozenSet[int] = None str_or_bytes: Union[str, bytes] = None none_or_str: Optional[str] = None sequence_of_ints: Sequence[int] = None compound: Dict[Union[str, bytes], List[Set[int]]] = None deque: Deque[int] = None print(Model(simple_list=['1', '2', '3']).simple_list) print(Model(list_of_ints=['1', '2', '3']).list_of_ints) print(Model(simple_dict={'a': 1, b'b': 2}).simple_dict) print(Model(dict_str_float={'a': 1, b'b': 2}).dict_str_float) print(Model(simple_tuple=[1, 2, 3, 4]).simple_tuple) print(Model(tuple_of_different_types=[4, 3, 2, 1]).tuple_of_different_types) print(Model(sequence_of_ints=[1, 2, 3, 4]).sequence_of_ints) print(Model(sequence_of_ints=(1, 2, 3, 4)).sequence_of_ints) print(Model(deque=[1, 2, 3]).deque) pydantic-1.10.14/docs/examples/types_json_type.py000066400000000000000000000011021455251250200220420ustar00rootroot00000000000000from typing import Any, List from pydantic import BaseModel, Json, ValidationError class AnyJsonModel(BaseModel): json_obj: Json[Any] class ConstrainedJsonModel(BaseModel): json_obj: Json[List[int]] print(AnyJsonModel(json_obj='{"b": 1}')) print(ConstrainedJsonModel(json_obj='[1, 2, 3]')) try: ConstrainedJsonModel(json_obj=12) except ValidationError as e: print(e) try: ConstrainedJsonModel(json_obj='[a, b]') except ValidationError as e: print(e) try: ConstrainedJsonModel(json_obj='["a", "b"]') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_literal1.py000066400000000000000000000004051455251250200215520ustar00rootroot00000000000000from typing import Literal from pydantic import BaseModel, ValidationError class Pie(BaseModel): flavor: Literal['apple', 'pumpkin'] Pie(flavor='apple') Pie(flavor='pumpkin') try: Pie(flavor='cherry') except ValidationError as e: print(str(e)) pydantic-1.10.14/docs/examples/types_literal2.py000066400000000000000000000011541455251250200215550ustar00rootroot00000000000000from typing import ClassVar, List, Union from typing import Literal from pydantic import BaseModel, ValidationError class Cake(BaseModel): kind: Literal['cake'] required_utensils: ClassVar[List[str]] = ['fork', 'knife'] class IceCream(BaseModel): kind: Literal['icecream'] required_utensils: ClassVar[List[str]] = ['spoon'] class Meal(BaseModel): dessert: Union[Cake, IceCream] print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) print(type(Meal(dessert={'kind': 'icecream'}).dessert).__name__) try: Meal(dessert={'kind': 'pie'}) except ValidationError as e: print(str(e)) pydantic-1.10.14/docs/examples/types_literal3.py000066400000000000000000000012511455251250200215540ustar00rootroot00000000000000from typing import Optional, Union from typing import Literal from pydantic import BaseModel class Dessert(BaseModel): kind: str class Pie(Dessert): kind: Literal['pie'] flavor: Optional[str] class ApplePie(Pie): flavor: Literal['apple'] class PumpkinPie(Pie): flavor: Literal['pumpkin'] class Meal(BaseModel): dessert: Union[ApplePie, PumpkinPie, Pie, Dessert] print(type(Meal(dessert={'kind': 'pie', 'flavor': 'apple'}).dessert).__name__) print(type(Meal(dessert={'kind': 'pie', 'flavor': 'pumpkin'}).dessert).__name__) print(type(Meal(dessert={'kind': 'pie'}).dessert).__name__) print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) pydantic-1.10.14/docs/examples/types_payment_card_number.py000066400000000000000000000013101455251250200240470ustar00rootroot00000000000000from datetime import date from pydantic import BaseModel from pydantic.types import PaymentCardBrand, PaymentCardNumber, constr class Card(BaseModel): name: constr(strip_whitespace=True, min_length=1) number: PaymentCardNumber exp: date @property def brand(self) -> PaymentCardBrand: return self.number.brand @property def expired(self) -> bool: return self.exp < date.today() card = Card( name='Georg Wilhelm Friedrich Hegel', number='4000000000000002', exp=date(2023, 9, 30), ) assert card.number.brand == PaymentCardBrand.visa assert card.number.bin == '400000' assert card.number.last4 == '0002' assert card.number.masked == '400000******0002' pydantic-1.10.14/docs/examples/types_secret_types.py000066400000000000000000000023721455251250200225530ustar00rootroot00000000000000from pydantic import BaseModel, SecretStr, SecretBytes, ValidationError class SimpleModel(BaseModel): password: SecretStr password_bytes: SecretBytes sm = SimpleModel(password='IAmSensitive', password_bytes=b'IAmSensitiveBytes') # Standard access methods will not display the secret print(sm) print(sm.password) print(sm.dict()) print(sm.json()) # Use get_secret_value method to see the secret's content. print(sm.password.get_secret_value()) print(sm.password_bytes.get_secret_value()) try: SimpleModel(password=[1, 2, 3], password_bytes=[1, 2, 3]) except ValidationError as e: print(e) # If you want the secret to be dumped as plain-text using the json method, # you can use json_encoders in the Config class. class SimpleModelDumpable(BaseModel): password: SecretStr password_bytes: SecretBytes class Config: json_encoders = { SecretStr: lambda v: v.get_secret_value() if v else None, SecretBytes: lambda v: v.get_secret_value() if v else None, } sm2 = SimpleModelDumpable( password='IAmSensitive', password_bytes=b'IAmSensitiveBytes' ) # Standard access methods will not display the secret print(sm2) print(sm2.password) print(sm2.dict()) # But the json method will print(sm2.json()) pydantic-1.10.14/docs/examples/types_strict.py000066400000000000000000000015661455251250200213560ustar00rootroot00000000000000from pydantic import ( BaseModel, StrictBytes, StrictBool, StrictInt, ValidationError, confloat, ) class StrictBytesModel(BaseModel): strict_bytes: StrictBytes try: StrictBytesModel(strict_bytes='hello world') except ValidationError as e: print(e) class StrictIntModel(BaseModel): strict_int: StrictInt try: StrictIntModel(strict_int=3.14159) except ValidationError as e: print(e) class ConstrainedFloatModel(BaseModel): constrained_float: confloat(strict=True, ge=0.0) try: ConstrainedFloatModel(constrained_float=3) except ValidationError as e: print(e) try: ConstrainedFloatModel(constrained_float=-1.23) except ValidationError as e: print(e) class StrictBoolModel(BaseModel): strict_bool: StrictBool try: StrictBoolModel(strict_bool='False') except ValidationError as e: print(str(e)) pydantic-1.10.14/docs/examples/types_type.py000066400000000000000000000005771455251250200210300ustar00rootroot00000000000000from typing import Type from pydantic import BaseModel from pydantic import ValidationError class Foo: pass class Bar(Foo): pass class Other: pass class SimpleModel(BaseModel): just_subclasses: Type[Foo] SimpleModel(just_subclasses=Foo) SimpleModel(just_subclasses=Bar) try: SimpleModel(just_subclasses=Other) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_typevar.py000066400000000000000000000006621455251250200215340ustar00rootroot00000000000000from typing import TypeVar from pydantic import BaseModel Foobar = TypeVar('Foobar') BoundFloat = TypeVar('BoundFloat', bound=float) IntStr = TypeVar('IntStr', int, str) class Model(BaseModel): a: Foobar # equivalent of ": Any" b: BoundFloat # equivalent of ": float" c: IntStr # equivalent of ": Union[int, str]" print(Model(a=[1], b=4.2, c='x')) # a may be None and is therefore optional print(Model(b=1, c=1)) pydantic-1.10.14/docs/examples/types_union_correct.py000066400000000000000000000004731455251250200227130ustar00rootroot00000000000000from uuid import UUID from typing import Union from pydantic import BaseModel class User(BaseModel): id: Union[UUID, int, str] name: str user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') user_03 = User(id=user_03_uuid, name='John Doe') print(user_03) print(user_03.id) print(user_03_uuid.int) pydantic-1.10.14/docs/examples/types_union_discriminated.py000066400000000000000000000010731455251250200240660ustar00rootroot00000000000000from typing import Literal, Union from pydantic import BaseModel, Field, ValidationError class Cat(BaseModel): pet_type: Literal['cat'] meows: int class Dog(BaseModel): pet_type: Literal['dog'] barks: float class Lizard(BaseModel): pet_type: Literal['reptile', 'lizard'] scales: bool class Model(BaseModel): pet: Union[Cat, Dog, Lizard] = Field(..., discriminator='pet_type') n: int print(Model(pet={'pet_type': 'dog', 'barks': 3.14}, n=1)) try: Model(pet={'pet_type': 'dog'}, n=1) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_union_discriminated_nested.py000066400000000000000000000020451455251250200254300ustar00rootroot00000000000000from typing import Literal, Union from typing_extensions import Annotated from pydantic import BaseModel, Field, ValidationError class BlackCat(BaseModel): pet_type: Literal['cat'] color: Literal['black'] black_name: str class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] white_name: str # Can also be written with a custom root type # # class Cat(BaseModel): # __root__: Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal['dog'] name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Model(BaseModel): pet: Pet n: int m = Model(pet={'pet_type': 'cat', 'color': 'black', 'black_name': 'felix'}, n=1) print(m) try: Model(pet={'pet_type': 'cat', 'color': 'red'}, n='1') except ValidationError as e: print(e) try: Model(pet={'pet_type': 'cat', 'color': 'black'}, n='1') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_union_incorrect.py000066400000000000000000000007201455251250200232350ustar00rootroot00000000000000from uuid import UUID from typing import Union from pydantic import BaseModel class User(BaseModel): id: Union[int, str, UUID] name: str user_01 = User(id=123, name='John Doe') print(user_01) print(user_01.id) user_02 = User(id='1234', name='John Doe') print(user_02) print(user_02.id) user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') user_03 = User(id=user_03_uuid, name='John Doe') print(user_03) print(user_03.id) print(user_03_uuid.int) pydantic-1.10.14/docs/examples/types_url_properties.py000066400000000000000000000013301455251250200231110ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl, PostgresDsn, ValidationError, validator class MyModel(BaseModel): url: HttpUrl m = MyModel(url='http://www.example.com') # the repr() method for a url will display all properties of the url print(repr(m.url)) print(m.url.scheme) print(m.url.host) print(m.url.host_type) print(m.url.port) class MyDatabaseModel(BaseModel): db: PostgresDsn @validator('db') def check_db_name(cls, v): assert v.path and len(v.path) > 1, 'database must be provided' return v m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') print(m.db) try: MyDatabaseModel(db='postgres://user:pass@localhost:5432') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/types_url_punycode.py000066400000000000000000000005231455251250200225460ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl class MyModel(BaseModel): url: HttpUrl m1 = MyModel(url='http://puny£code.com') print(m1.url) print(m1.url.host_type) m2 = MyModel(url='https://www.аррӏе.com/') print(m2.url) print(m2.url.host_type) m3 = MyModel(url='https://www.example.珠宝/') print(m3.url) print(m3.url.host_type) pydantic-1.10.14/docs/examples/types_urls.py000066400000000000000000000005011455251250200210170ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl, ValidationError class MyModel(BaseModel): url: HttpUrl m = MyModel(url='http://www.example.com') print(m.url) try: MyModel(url='ftp://invalid.url') except ValidationError as e: print(e) try: MyModel(url='not a url') except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/validation_decorator_async.py000066400000000000000000000014311455251250200242020ustar00rootroot00000000000000class Connection: async def execute(self, sql, *args): return 'testing@example.com' conn = Connection() # ignore-above import asyncio from pydantic import PositiveInt, ValidationError, validate_arguments @validate_arguments async def get_user_email(user_id: PositiveInt): # `conn` is some fictional connection to a database email = await conn.execute('select email from users where id=$1', user_id) if email is None: raise RuntimeError('user not found') else: return email async def main(): email = await get_user_email(123) print(email) try: await get_user_email(-4) except ValidationError as exc: print(exc.errors()) asyncio.run(main()) # requires: `conn.execute()` that will return `'testing@example.com'` pydantic-1.10.14/docs/examples/validation_decorator_config.py000066400000000000000000000010051455251250200243270ustar00rootroot00000000000000from pydantic import ValidationError, validate_arguments class Foobar: def __init__(self, v: str): self.v = v def __add__(self, other: 'Foobar') -> str: return f'{self} + {other}' def __str__(self) -> str: return f'Foobar({self.v})' @validate_arguments(config=dict(arbitrary_types_allowed=True)) def add_foobars(a: Foobar, b: Foobar): return a + b c = add_foobars(Foobar('a'), Foobar('b')) print(c) try: add_foobars(1, 2) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/validation_decorator_field.py000066400000000000000000000006231455251250200241520ustar00rootroot00000000000000from datetime import datetime from pydantic import validate_arguments, Field, ValidationError from pydantic.typing import Annotated @validate_arguments def how_many(num: Annotated[int, Field(gt=10)]): return num try: how_many(1) except ValidationError as e: print(e) @validate_arguments def when(dt: datetime = Field(default_factory=datetime.now)): return dt print(type(when())) pydantic-1.10.14/docs/examples/validation_decorator_field_alias.py000066400000000000000000000003211455251250200253160ustar00rootroot00000000000000from pydantic import Field, validate_arguments from pydantic.typing import Annotated @validate_arguments def how_many(num: Annotated[int, Field(gt=10, alias='number')]): return num how_many(number=42) pydantic-1.10.14/docs/examples/validation_decorator_main.py000066400000000000000000000005761455251250200240220ustar00rootroot00000000000000from pydantic import validate_arguments, ValidationError @validate_arguments def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: b = s.encode() return separator.join(b for _ in range(count)) a = repeat('hello', 3) print(a) b = repeat('x', '4', separator=' ') print(b) try: c = repeat('hello', 'wrong') except ValidationError as exc: print(exc) pydantic-1.10.14/docs/examples/validation_decorator_parameter_types.py000066400000000000000000000021641455251250200262750ustar00rootroot00000000000000# requires python3.8 from pydantic import validate_arguments @validate_arguments def pos_or_kw(a: int, b: int = 2) -> str: return f'a={a} b={b}' print(pos_or_kw(1)) print(pos_or_kw(a=1)) print(pos_or_kw(1, 3)) print(pos_or_kw(a=1, b=3)) @validate_arguments def kw_only(*, a: int, b: int = 2) -> str: return f'a={a} b={b}' print(kw_only(a=1)) print(kw_only(a=1, b=3)) @validate_arguments def pos_only(a: int, b: int = 2, /) -> str: # python 3.8 only return f'a={a} b={b}' print(pos_only(1)) print(pos_only(1, 2)) @validate_arguments def var_args(*args: int) -> str: return str(args) print(var_args(1)) print(var_args(1, 2)) print(var_args(1, 2, 3)) @validate_arguments def var_kwargs(**kwargs: int) -> str: return str(kwargs) print(var_kwargs(a=1)) print(var_kwargs(a=1, b=2)) @validate_arguments def armageddon( a: int, /, # python 3.8 only b: int, c: int = None, *d: int, e: int, f: int = None, **g: int, ) -> str: return f'a={a} b={b} c={c} d={d} e={e} f={f} g={g}' print(armageddon(1, 2, e=3)) print(armageddon(1, 2, 3, 4, 5, 6, e=8, f=9, g=10, spam=11)) pydantic-1.10.14/docs/examples/validation_decorator_raw_function.py000066400000000000000000000004541455251250200255670ustar00rootroot00000000000000from pydantic import validate_arguments @validate_arguments def repeat(s: str, count: int, *, separator: bytes = b'') -> bytes: b = s.encode() return separator.join(b for _ in range(count)) a = repeat('hello', 3) print(a) b = repeat.raw_function('good bye', 2, separator=b', ') print(b) pydantic-1.10.14/docs/examples/validation_decorator_types.py000066400000000000000000000011031455251250200242250ustar00rootroot00000000000000import os from pathlib import Path from typing import Pattern, Optional from pydantic import validate_arguments, DirectoryPath @validate_arguments def find_file(path: DirectoryPath, regex: Pattern, max=None) -> Optional[Path]: for i, f in enumerate(path.glob('**/*')): if max and i > max: return if f.is_file() and regex.fullmatch(str(f.relative_to(path))): return f # note: this_dir is a string here this_dir = os.path.dirname(__file__) print(find_file(this_dir, '^validation.*')) print(find_file(this_dir, '^foobar.*', max=3)) pydantic-1.10.14/docs/examples/validation_decorator_validate.py000066400000000000000000000004501455251250200246560ustar00rootroot00000000000000from pydantic import validate_arguments, ValidationError @validate_arguments def slow_sum(a: int, b: int) -> int: print(f'Called with a={a}, b={b}') return a + b slow_sum(1, 1) slow_sum.validate(2, 2) try: slow_sum.validate(1, 'b') except ValidationError as exc: print(exc) pydantic-1.10.14/docs/examples/validators_allow_reuse.py000066400000000000000000000010501455251250200233570ustar00rootroot00000000000000from pydantic import BaseModel, validator def normalize(name: str) -> str: return ' '.join((word.capitalize()) for word in name.split(' ')) class Producer(BaseModel): name: str # validators _normalize_name = validator('name', allow_reuse=True)(normalize) class Consumer(BaseModel): name: str # validators _normalize_name = validator('name', allow_reuse=True)(normalize) jane_doe = Producer(name='JaNe DOE') john_doe = Consumer(name='joHN dOe') assert jane_doe.name == 'Jane Doe' assert john_doe.name == 'John Doe' pydantic-1.10.14/docs/examples/validators_always.py000066400000000000000000000004501455251250200223410ustar00rootroot00000000000000from datetime import datetime from pydantic import BaseModel, validator class DemoModel(BaseModel): ts: datetime = None @validator('ts', pre=True, always=True) def set_ts_now(cls, v): return v or datetime.now() print(DemoModel()) print(DemoModel(ts='2017-11-08T14:00')) pydantic-1.10.14/docs/examples/validators_dataclass.py000066400000000000000000000005241455251250200230020ustar00rootroot00000000000000from datetime import datetime from pydantic import validator from pydantic.dataclasses import dataclass @dataclass class DemoDataclass: ts: datetime = None @validator('ts', pre=True, always=True) def set_ts_now(cls, v): return v or datetime.now() print(DemoDataclass()) print(DemoDataclass(ts='2017-11-08T14:00')) pydantic-1.10.14/docs/examples/validators_pre_item.py000066400000000000000000000024331455251250200226500ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, ValidationError, validator class DemoModel(BaseModel): square_numbers: List[int] = [] cube_numbers: List[int] = [] # '*' is the same as 'cube_numbers', 'square_numbers' here: @validator('*', pre=True) def split_str(cls, v): if isinstance(v, str): return v.split('|') return v @validator('cube_numbers', 'square_numbers') def check_sum(cls, v): if sum(v) > 42: raise ValueError('sum of numbers greater than 42') return v @validator('square_numbers', each_item=True) def check_squares(cls, v): assert v ** 0.5 % 1 == 0, f'{v} is not a square number' return v @validator('cube_numbers', each_item=True) def check_cubes(cls, v): # 64 ** (1 / 3) == 3.9999999999999996 (!) # this is not a good way of checking cubes assert v ** (1 / 3) % 1 == 0, f'{v} is not a cubed number' return v print(DemoModel(square_numbers=[1, 4, 9])) print(DemoModel(square_numbers='1|4|16')) print(DemoModel(square_numbers=[16], cube_numbers=[8, 27])) try: DemoModel(square_numbers=[1, 4, 2]) except ValidationError as e: print(e) try: DemoModel(cube_numbers=[27, 27]) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/validators_root.py000066400000000000000000000017331455251250200220310ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, root_validator class UserModel(BaseModel): username: str password1: str password2: str @root_validator(pre=True) def check_card_number_omitted(cls, values): assert 'card_number' not in values, 'card_number should not be included' return values @root_validator def check_passwords_match(cls, values): pw1, pw2 = values.get('password1'), values.get('password2') if pw1 is not None and pw2 is not None and pw1 != pw2: raise ValueError('passwords do not match') return values print(UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn')) try: UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn2') except ValidationError as e: print(e) try: UserModel( username='scolvin', password1='zxcvbn', password2='zxcvbn', card_number='1234', ) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/validators_simple.py000066400000000000000000000017321455251250200223360ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, validator class UserModel(BaseModel): name: str username: str password1: str password2: str @validator('name') def name_must_contain_space(cls, v): if ' ' not in v: raise ValueError('must contain a space') return v.title() @validator('password2') def passwords_match(cls, v, values, **kwargs): if 'password1' in values and v != values['password1']: raise ValueError('passwords do not match') return v @validator('username') def username_alphanumeric(cls, v): assert v.isalnum(), 'must be alphanumeric' return v user = UserModel( name='samuel colvin', username='scolvin', password1='zxcvbn', password2='zxcvbn', ) print(user) try: UserModel( name='samuel', username='scolvin', password1='zxcvbn', password2='zxcvbn2', ) except ValidationError as e: print(e) pydantic-1.10.14/docs/examples/validators_subclass_each_item.py000066400000000000000000000015351455251250200246630ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, ValidationError, validator class ParentModel(BaseModel): names: List[str] class ChildModel(ParentModel): @validator('names', each_item=True) def check_names_not_empty(cls, v): assert v != '', 'Empty strings are not allowed.' return v # This will NOT raise a ValidationError because the validator was not called try: child = ChildModel(names=['Alice', 'Bob', 'Eve', '']) except ValidationError as e: print(e) else: print('No ValidationError caught.') class ChildModel2(ParentModel): @validator('names') def check_names_not_empty(cls, v): for name in v: assert name != '', 'Empty strings are not allowed.' return v try: child = ChildModel2(names=['Alice', 'Bob', 'Eve', '']) except ValidationError as e: print(e) pydantic-1.10.14/docs/extra/000077500000000000000000000000001455251250200155455ustar00rootroot00000000000000pydantic-1.10.14/docs/extra/redirects.js000066400000000000000000000125551455251250200200770ustar00rootroot00000000000000// redirects from the old sphinx docs site to the new // redirects have to be done like this since anchor fragments aren't sent by the browser so server-side redirects // wouldn't work const lookup = { 'install': '/install', 'usage': '/usage/models/', 'pep-484-types': '/usage/types/#typing-iterables', 'id1': '/usage/dataclasses/', 'nested-dataclasses': '/usage/dataclasses/#nested-dataclasses', 'initialize-hooks': '/usage/dataclasses/#initialize-hooks', 'choices': '/usage/types/#enums-and-choices', 'validators': '/usage/validators/', 'pre-and-per-item-validators': '/usage/validators/#pre-and-per-item-validators', 'pre-and-whole-validators': '/usage/validators/#pre-and-per-item-validators', 'validate-always': '/usage/validators/#validate-always', 'root-validators': '/usage/validators/#root-validators', 'id3': '/usage/validators/#root-validators', 'dataclass-validators': '/usage/validators/#dataclass-validators', 'field-checks': '/usage/validators/#field-checks', 'recursive-models': '/usage/models/#recursive-models', 'id4': '/usage/models/#recursive-models', 'self-referencing-models': '/usage/postponed_annotations/#self-referencing-models', 'self-ref-models': '/usage/postponed_annotations/#self-referencing-models', 'generic-models': '/usage/models/#generic-models', 'id5': '/usage/models/#generic-models', 'orm-mode-aka-arbitrary-class-instances': '/usage/models/#orm-mode-aka-arbitrary-class-instances', 'orm-mode': '/usage/models/#orm-mode-aka-arbitrary-class-instances', 'schema-creation': '/usage/schema/', 'schema': '/usage/schema/', 'error-handling': '/usage/models/#error-handling', 'datetime-types': '/usage/types/#datetime-types', 'exotic-types': '/usage/types/', 'booleans': '/usage/types/#booleans', 'strictbool': '/usage/types/#booleans', 'callable': '/usage/types/#callable', 'urls': '/usage/types/#urls', 'url-properties': '/usage/types/#url-properties', 'international-domains': '/usage/types/#international-domains', 'int-domains': '/usage/types/#international-domains', 'underscores-in-hostnames': '/usage/types/#underscores-in-hostnames', 'color-type': '/usage/types/#color-type', 'secret-types': '/usage/types/#secret-types', 'strict-types': '/usage/types/#strict-types', 'json-type': '/usage/types/#json-type', 'literal-type': '/usage/types/#literal-type', 'payment-card-numbers': '/usage/types/#payment-card-numbers', 'type-type': '/usage/types/#type', 'custom-data-types': '/usage/types/#custom-data-types', 'custom-root-types': '/usage/models/#custom-root-types', 'custom-root': '/usage/models/#custom-root-types', 'helper-functions': '/usage/models/#helper-functions', 'model-config': '/usage/model_config/', 'config': '/usage/model_config/', 'alias-generator': '/usage/model_config/#alias-generator', 'settings': '/usage/settings/', 'id6': '/usage/settings/', 'dynamic-model-creation': '/usage/models/#dynamic-model-creation', 'usage-with-mypy': '/usage/mypy/', 'usage-mypy': '/usage/mypy/', 'strict-optional': '/usage/mypy/#strict-optional', 'required-fields-and-mypy': '/usage/models/#required-fields', 'usage-mypy-required': '/usage/models/#required-fields', 'faux-immutability': '/usage/models/#faux-immutability', 'exporting-models': '/usage/exporting_models/', 'copying': '/usage/exporting_models/', 'serialisation': '/usage/exporting_models/', 'model-dict': '/usage/exporting_models/#modeldict', 'dict-model-and-iteration': '/usage/exporting_models/#dictmodel-and-iteration', 'model-copy': '/usage/exporting_models/#modelcopy', 'model-json': '/usage/exporting_models/#modeljson', 'json-dump': '/usage/exporting_models/#modeljson', 'pickle-dumps-model': '/usage/exporting_models/#pickledumpsmodel', 'pickle-serialisation': '/usage/exporting_models/#pickledumpsmodel', 'advanced-include-and-exclude': '/usage/exporting_models/#advanced-include-and-exclude', 'include-exclude': '/usage/exporting_models/#advanced-include-and-exclude', 'custom-json-de-serialisation': '/usage/exporting_models/#custom-json-deserialisation', 'json-encode-decode': '/usage/exporting_models/#custom-json-deserialisation', 'abstract-base-classes': '/usage/models/#abstract-base-classes', 'postponed-annotations': '/usage/postponed_annotations/', 'id7': '/usage/postponed_annotations/', 'id8': '/usage/postponed_annotations/', 'usage-of-union-in-annotations-and-type-order': '/usage/types/#unions', 'contributing-to-pydantic': '/contributing/', 'pycharm-plugin': '/pycharm_plugin/', 'id9': '/pycharm_plugin/', 'history': '/changelog/', } function sanitizeURL(url) { // escape untrusted source by creating an anchor element and letting the browser parse it let a = document.createElement('a'); a.href = url; return a.href; } function main() { // escape nonstandard characters to avoid XSS attacks const fragment = location.hash.substr(1).replace(/[^a-zA-Z0-9-_]/g, '') if (fragment === '' || location.pathname !== '/') { // no fragment or not called from root return } let new_url = lookup[fragment] if (!new_url) { if (!fragment.startsWith('v')) { return } // change the fragments for versions - sphinx replaces dots with a dash while mkdocs removes dots new_url = '/changelog/#' + fragment .replace(/(v\d)-(\d+)-(\d+-\d{4})/, '$1$2$3') .replace(/(v\d)-(\d+-\d{4})/, '$1$2') } window.location = sanitizeURL(new_url) } main() pydantic-1.10.14/docs/extra/terminal.css000066400000000000000000000010231455251250200200660ustar00rootroot00000000000000.terminal { background: #300a24; border-radius: 4px; padding: 5px 10px; } pre.terminal-content { display: inline-block; line-height: 1.3 !important; white-space: pre-wrap; word-wrap: break-word; background: #300a24 !important; color: #d0d0d0 !important; } .ansi2 { font-weight: lighter; } .ansi3 { font-style: italic; } .ansi32 { color: #00aa00; } .ansi34 { color: #5656fe; } .ansi35 { color: #E850A8; } .ansi38-1 { color: #cf0000; } .ansi38-5 { color: #E850A8; } .ansi38-68 { color: #2a54a8; } pydantic-1.10.14/docs/extra/tweaks.css000066400000000000000000000036061455251250200175620ustar00rootroot00000000000000:root { --md-admonition-icon--pied-piper: url('data:image/svg+xml;charset=utf-8,') } .md-typeset .announcement>.admonition-title:before { -webkit-mask-image: var(--md-admonition-icon--pied-piper) !important; mask-image: var(--md-admonition-icon--pied-piper) !important; } .sponsors { display: flex; justify-content: center; flex-wrap: wrap; align-items: center; margin: 1rem 0; } .sponsors > div { text-align: center; width: 33%; padding-bottom: 20px; } .sponsors span { display: block; } @media screen and (max-width: 599px) { .sponsors span { display: none; } } .sponsors img { width: 65%; border-radius: 5px; } /*blog post*/ aside.blog { display: flex; align-items: center; } aside.blog img { width: 50px; height: 50px; border-radius: 25px; margin-right: 20px; } pydantic-1.10.14/docs/favicon.png000066400000000000000000000011221455251250200165510ustar00rootroot00000000000000PNG  IHDR DPLTE%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%d%dhN+tRNSE4 /to<xǩb(׎lOѣ}XKAJMRIDAT8˥钂090 ZSGfSa-E1|>Qruဍz')*%ZZY-@>^MwXu}IπjfW=^#T&m[fʱ_J@J4o/[mZo^zs޻g䰆H#b/,7JA4@MKE0FW%!q=gE3)%E{ua2Gεv!`3oȤ(1-&rT,Amh!}sߗ1М%LsiIA)Rge{!^IENDB`pydantic-1.10.14/docs/hypothesis_plugin.md000066400000000000000000000031511455251250200205210ustar00rootroot00000000000000[Hypothesis](https://hypothesis.readthedocs.io/) is the Python library for [property-based testing](https://increment.com/testing/in-praise-of-property-based-testing/). Hypothesis can infer how to construct type-annotated classes, and supports builtin types, many standard library types, and generic types from the [`typing`](https://docs.python.org/3/library/typing.html) and [`typing_extensions`](https://pypi.org/project/typing-extensions/) modules by default. From Pydantic v1.8 and [Hypothesis v5.29.0](https://hypothesis.readthedocs.io/en/latest/changes.html#v5-29-0), Hypothesis will automatically load support for [custom types](usage/types.md) like `PaymentCardNumber` and `PositiveFloat`, so that the [`st.builds()`](https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.builds) and [`st.from_type()`](https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.from_type) strategies support them without any user configuration. !!! warning Please note, while the plugin supports these types, hypothesis will(currently) generate values outside of given args for the constrained function types. ### Example tests {!.tmp_examples/hypothesis_property_based_test.md!} ### Use with JSON Schemas To test client-side code, you can use [`Model.schema()`](usage/models.md) with the [`hypothesis-jsonschema` package](https://pypi.org/project/hypothesis-jsonschema/) to generate arbitrary JSON instances matching the schema. For web API testing, [Schemathesis](https://schemathesis.readthedocs.io) provides a higher-level wrapper and can detect both errors and security vulnerabilities. pydantic-1.10.14/docs/img/000077500000000000000000000000001455251250200151765ustar00rootroot00000000000000pydantic-1.10.14/docs/img/samuelcolvin.jpg000066400000000000000000001050541455251250200204060ustar00rootroot00000000000000JFIFHH9VExifII*V^(1 f2tiHHGIMP 2.10.142022:07:06 15:48:34G8JFIFC    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?Vn$qѝFXgA3:9vJ;I]Iia48 x8qŠ5]ݶf?tWK^#X r2펾xJ&iK*]9;Tp:Di\>t~70[ܱe5-"2Ӟ+Z%1 ] =Įq,!fڸ):z5#z |@s*G쐵@铊丑ʉ=d=bT8P~`y8hsT^`9/ĮJ]e&c YZ)a,$S*J ? jH7X8ҵ8d2c^a%W!Viv#XP@\{e7 bkV/c UV| Qjvcdq,TsOtpxV$X.?SV?E=ylZ)~}YFc 7m^Q[A <:ӑmry&Py.XsoO丹SM?~?*erMG> @A!Fhp}s'8W{v+ b&G-֫6E(+q=:L[Gw 9dh`ZAi(iv$9mp8!\ObGUC5vK{vc[AR)&g=r>̎H-QIj9N`WGx_Oq$qm `jCmqi182cú{xFQ+5ou)ЍҢJ]A4m]Ydx Ty&74#`(?)OusxLwQ! ~>]z-  ͛˚VU N2k>KDǜ#oW]̾$Z5WaTNk\Frdm_8$+]ҔF7BN )ޕB+DiʅK[G }+IlGQr3'-$+Tz!HrY0xVmm[޵R2 I7Y𶞐#Һ˼qZtCȬ<[42FF:ӃVM1FIɥx!V"g"!B3MjeҞ(~AU { +SqODːx91FTLӄPTI_´[j܍&xV9d?}, *yTd, ʙie3\pO&CidYPs]xn u-\z`m#Vc/(S=1ԭE0SM";&*ЙcQy+9OʶozՐQm:$"'8OZm$pD¿"?ܲYmy]6QA_B-aO4&RL%G@ Ui F2\Dǽ΅Yb7NOAi$Z+ &Ԥ\Insܽ.2q)BF$qUfs$^F:Hvtި.f>14 "cK`k!p95JU]ś`$PжX*(ULH*V=yrV@,rX#'vxj4M@3T%ӿggb#ǵ[]L 3O6"z |$e$ZꢴKHQ;:TN#TU rpL+i+FNǥKVQ}@RQU~{{ABk" (4ԨmsI͌εhTݤiA?]_1XD-\T' W/[k*E3ĩSl&8F:>,8c^1s3Ύ']4T=*]_W'Xu*rBH֮0iQK2{ ? qW(,~u$=Qa\˞ҹao}7i#/ҹ?*0& GTӮtTOdr.umʱcT-W]V]>a;ApB¶n46|8JT\O@zqw%5aIm7m0enyl׃9RH#8B5;fEn|'i Yonf7N-hU[K+pk^b[<u=0SvFHjYODY[7<})7'B[šss[ $`bV$K V"e$cv+t(1h7΀ǵ;I!2ks1̓'OҫEؾgME|Ո|/}4]H 9oUP<-8CHiyeʫ F?Ɲy0O`Д͟X. ;mۃƵ9!Tǚ{GKږȀ$5[NisE;4ٰR(X Jf>m!2 fϸUҴM.ga<"h>8>2۝V[|ᙤ ҄C;V- ۄMG͓֔|pM( 8QOZ&&F <ֱu=n \g\[l1kt*`:F4w"Tc$c֥lfGo3E])53ǼkUB(2ǵt1bO&R.T|SWN}WS:p'Tq^c*ZFrqMX6&BQ\E`*ϭ)3zp'igrG*&Ur:jHG"3t&YbDi\w:C$ѓQV>ʨI dXR5I2Jdfk)`9lw4֝P yVŕVw"8ѢRMuAR)E>(e@ kÖys3WhƓ}G{&-` m.U'88fj.|ېrH>$Y3GBNBwdin=nh!WGr9֝">{^.(A f"1cIy WAw,#MƸ{PN1<#p3qjΟuLP|˖Кi2xW=S* `e9yzyhâ]*,+0fɡldhq;cx;CR\#rKwMҬW|2AִM`zQ&e^)&C)ŒUfhFebcѻIpy~cQZ"D0!aWry"X7^;v"b 8(8ZqX!ը:S&={,_1b3LZmH u ,m$gOY[C~H]Rwv#_[m5xKXvLtNFK\n)Ep]\Ss5v59b=iL'V 8E6k]َ Px/JOiH}OZ1U&s) Ux\h $ irr1/"_ۡWu C=sQ'c,RF3~vb;n#dvI & Ike+-N)+zNc H6#nr72;Fe9̞JMrx`iYs튯/u$j]R:62|60*ٜC².'\P OF@~3WbnMwuZ^;)l>\x*2#zNь68ǹ=딟Hi<oOk ׄ's $s֋9P9+'b(9Lt-qҹ6"ndVYܺcw@n.ÞױjE$|X+0Q4ܥLoS^,J2[}NHQIP:-"[ [c7Q,%by]z(0I<$+ $4so-,]e1˝˓&n79]^͜a33ᗒĽI!;(4e[WQ|[7k۶;(wS]yRC2P?LQ{Hd?R?~M~IMD[_26E2BI#5h:篽_2$T6KNieAp0Q5i]cw;Zniq\l~L?{U_SFgWݴImfА8VfMJ q)cyn%a!'BJLO=|iqїVL$:$b#64#\YawیgJ,F4jڈ :Đ%lgh jA3jѬX(f8fѬ[ƗF +yuU j.AoK4m'8iwr)Ē6)-t<.4Sp }*y5?e| J~NR$27n9MZ &Cn.%f㊳)1d=O\\ui+ZH"r>aZkY6-3<~UIu$͒#F1ƪMhuiraG}֭dW8#5^h gZehSH5|3)jr铯2g=$TR޴o d5h[(**6VFA8.:bAi2D灹@kj9&ii|ר-t gmuuw<ٕ1\pxK[HUvn?ȭ&K!*w9]4GWy; -mW!Ն,#ɹ⢼[6|Χ5[;.n[6u:tN[$xe;?P9"nZZ-mly$sp䄓-adyfv5CnW+&I2 IwfvN聶DvrmBDg O*zU%t6 |ȵAanӔiY( 1'c$s0C/K.+kThMʉXOfmZaOB--ʕδs.Tcx[Cydg4qZۈHprePV3o g"bC"ܬjUʞMיxS#8"8wQZEp@B;SL:Wz UQn=I]LUodSGl%@ fZKpm`Q6?JJ\}2E5o-biTEgXPlVtB:=-M&$gRTn>ֺG6яxV&c^-1 m[&%PIZkc$D#ii4!@cpYAC.@樛w?@`=k7VaV%k_WNe![(1jf lC8ڢpn.F#XzV@ X5t7K:Që*pOkE325ce9eMF: %gMjN(ՍEHY.ai_p[J}|<9RG,-NUΚji܅G\LL.,:Ǵ]CaF>V5A 냌W?SG Ir͡5[(?T.q]UOvsqS둼d]zgt2; ;{m$sY=l_6VIhZt u#AV# *̒[^tĚM*]c+-;ۿ.(q'֜+;6\^+UNvNjeiܿ<2ϪJAQc;F:Y1mrξ* dTiFo(Βed(9;!s(]*47, 2;vUO0g4Wa6΢U..A~X]~ąUMv-deF0wZb@Ao\O4ƍlcxJVC4pr+򮥘߰{S'aPm9ةi/."viyzT;߀#d@z>v簫:dڳ^PoG%s|Z.<OHAf?Mc# d] m֪Uj2,#S^hKw.?5˷$(nG5*$R jz&O:ݜ(u QE^M)ˇM(U$ UGa-@ bQZDf?h uI8ȯEbrA{וhyږ# 3Ng⢻nV 7vhO=kt\YzgQW^uJpL!|T;'s]]բH+[$YH`V8U1HtTlvޣR#]L9Dc} H#s524ӃM}>-sW7WR@qyN!;f{Tv >nt' ZR9U8fШM4dы8UBzzfl  RK(ф8zT Z{#EY٥c#DWxRn=`%UQҚ|SjKsΥ-K"v #;/ R YŊEV΢7,y{UG3U6qCb$@C$ ;AZi:ZۊKddHɭLC<2 7ː90zW7!ifUǩV>~J2_gڴ gڲ4Wi{d#l޲/BAˑ)n+i\ =)Ӑ;R܏1 bf{?4"yOmPH5q)O6+FWI>mNZo:=OZ}>0z$ҦCFF;׵jRڍg 5l`Uk jq[ aT$5s-K~JKED#oZcn8H敆4︎kV(-DH݊PB '܍)~؍rxM{ڽL -cjpwcn|am_o%?#ν8Dp!5<:X>qy$%۝-tLROIֵKe2: \ri3k2,[vAjOcg[;AIh"<ԽDEܬ̧i'˂; ӡAF8Bc\sҜr{So lc<[fD\z/ӊ> "8`kANb坎I>㲹h1(O"G cYpXz5B[J1KNߧBI ]&qJBfO֞ 6qGcHp}h@"Z*y&Ek{֣xH*ˀF:IMJ±H)S Ղa,y#*p2:t)y (Rp}KMZ|B: "泼] :;[p W6TQxu}y{s=:ֵ@ݹXt>\J;Ia״`(Hzč8RҞvk)>UҸ8W9HYT geMU5hMbLP(;=-lN)=Z@BR_t覒vM>TPhzHhn]dC 6Q6pA\zz?gb.譳leTw5_J&.SEnj-]0bkCStQ?ICC_PROFILElcms0mntrRGB XYZ /*acspAPPL-lcms desc @cprt`6wtptchad,rXYZbXYZgXYZrTRC gTRC bTRC chrm4$dmndX$dmdd|$mluc enUS$GIMP built-in sRGBmluc enUSPublic DomainXYZ -sf32 B%nXYZ o8XYZ $XYZ bparaff Y [chrmT|L&g\mluc enUSGIMPmluc enUSsRGBC     C    i_:rLruw7(,kR˶<"TͩyPγeJKfTn`^A吢R#|e3nU M !pvB$铂 NIM4m1J^ NCuhX.U=*^U>mZsO6XR4ZjftR/Df6|9$CKTєbYyBtZtj7]z*:c=2 VA4SawTW4V^RS=U]1瞅- ]'ԟǺMi&oC~B~]U0,8 o5jg6;Y0A4k,d _,P=ްސ9S01̡z"@^6&Bt=N RNooJ=햒@`bod^0G'`+ ZHf΋y[ȣܞAPqH9|q^W 9=)U+pʞlч!ij\` 9kԆ/0WGYIoIgz^U²d:"N/SyDHBqr·};Ҕfˎb& geIoE撕, S*Ѽ d9:[nH{~.\Qd6|{y@J~y ULdQ=t?ַ}|֖_Ҡ0a흐躰~ǝތB>STJ^?&D:€ 60gpؙ4EuԹ#6ɰo:.'x.ggLO<##L 1BT}_ʃ$ qL{::fɅJw:|Mz\voKYvesp}~rTmM ^=ղا= 1 w } }wsрbUeʯ,?fqho=/t |YvI&R[o#R9]C57jM s`5Sͯ}wc|9l ZOj,!"#$13%2A454EOH~ibdh5_  nd!q6m $*j4/}2{Eb$ҪYȠK6rT ],o)AzRk%z:ּaqkbPiSw;}Go$It}AWUJ/mcPm:h1>r Lamޥe*xKM}P=1ꫫϮ%N3oTKթM~hL`C|Ħw*аHq >V"c/Y QM:ME5v"€},PCKG+U'dݕmիF+'+,\[eNʡ;[CR2 |"Z?awx y3?n kDO\x+[ʱƁqa##PB\~*xzHjW!N#Zzg6*1rWWSTt|tpΦZGǨhkc\/tqFE3g;`M[}q׹Wy*KZxؖ&qЛu+0!shK5^odLurj|EĈvpË}PՏP%*J눓xC]P-NC`! 1XgjMQ@$G/+yQ#Kى=Uڱ@ڧ>-P(^^M<"[UAeV^x)7=B,z7TM /S$7S-U5bfF &xݴAYj}av &fl(ꁴ㟆gd+b,_΢E7ϥM+Iy|~0~ꗺ߭&91AB֎cqg'[@2v4Z^oRϒO^Y7 6:j}qre?4 Af$*c+^jg2P^ӟ{~ BEIA_K1w@SUV&!1A "Q#B2?SFIHtd̬4TLd-3%p>Q(PյnN<6h͍H#hhcg{tF{m &8]|l%{I,˂̙6FlsLŊx(QQVi"E-$DO#˥Y"<*!|jlr2\3"*3њ2DtD%|賆ER&>U[sRʊGFH,$ɒQ)TN{Dej#q,ZflphQ%tKEg{&`bU,qU0}Q_[1LtF:\4.DYeCQ鼑hed2R/LZ2RcQeљq^ o8>ɸhQǧ\ iY! qGTfd_-Y"% I~kjȡR.vzGrh՚z=[5'f7.h],Ye"*%#JPAY2pF$&4h|`WbY/tHfЍy$eḌCv&2L$^͋?RUGUHkh|Y7HKٕYj,g[$Klz18fBU ^'!Isj){!0]/Z]welȳRCb=>vY3[QͶ>w'ncNoqdO_<#\"^u"Qjqs '\dyFM*!1AQ"2 a#3Bq?dӑI:_1%NiZt>s%9v+PQa.Dqm#$c Lo㼤6J/'gJ>H6wtHQ 6tt}Y'7> 4u3^}-oE2^:Z#ӍmJQ")(Zjʮ)'<MLE%Rq:^Q+'Zǩ7;r6"tRL'mS:nLQ(z4>; _sNNPcbCl/8Hɮ&c&I)N]NR;eovл9^ܗ{ID[jp#،B\1}]=AIT9YeY{FtNIjd[*ƅDWսLCCeٛN:o# :cǤt~>vdI1rղ"Ed_ R# H##dc;lg )(niKE$%ϯcLtsF)ڎGT2GPɾQDxcEḆSi4Ɠ#emH'ccȦ(4KNʮ ,顩G ?"{Td&fELV("TGv5$sش`| -"5\mpJr9r]rZ_|Imo>ݚqp44]>Q k,z "1)zD`5'LDz#N iz])GO3^LfEtV`5Q窑G~M_W#Equ-Tu)&4cN9j 5jK)61F'>\9Q_r-Z54e*VN+H&h)$j~mKh(|yQۤhicr))}ϓqyC%#7}1;4Wv|}:>42T$Ɠv|HbIS:YjdM! K=BM #O"Hu:lK!Ȝ!>gRM#8R*d 6& 4B\>f_%WF:4ݤjΡO[s?+tӸ'o y }XQ4FBip}gË5gdNJ$!9b*ݲ]T~/F0h;!1AQ"2Ba#qR$3Sbr %4C? A*%FY>IR[Z#uB(To10Ybk Tr ) hWߤ5*V s+&T1tJ|~ѿ)p:|vUJHeG^tfJzQ35]|%jn ãSa ¿ͤq4␽oeŴF.}N&aV/{:P_7h-טiS M-Q\%d].Ri|2÷IkM@3Vy?&Y?l:DEU"wwEX(Ү+J%TDvݷ+7BbS+L.N@CSUMohU_F61A ˬZGTt6Q8\jr%Z]HjT૟t:A)*M`r,FcT!5*vUSn~%i6Nζ2Ϊ{&"|B)q5&Hm7f *& ?ieU01J`jz'&k^c#zmv饇YC|[v- TDf;iN)LӨqaZ72ԙ۷Iw!qod@O-*-mLIA^p_*~T`G}LR*O1՛kĦU6aSRB**ح0 Vf)Sqqg%J;:-XF\;B4U# -B;h*'(ZC'HlfFP!c^US?ye6.|@m}[_iWY:bWU6mb/?wacI)ӨE {)s\Ragom} v_kR9k)n4FXFH'*5}kmwT屲PukT3u:8.F!->MӒv#Yb"o֕j5MlHAi`Wyzٶ.&[k5j#a VyqHۣw-:EELX+ҏ!3o-uPOXO*o)9«VS8[t9دeL5l<y!*aXkUiDO6mzB_gMLj(:\ET *'Vb*ko)E:} ڥkek% H/2R~Қ;ޘ^^pFWɳ "ׂJ!21PTKTj%ZꃘKVM,VxAp'yccx`*_\jcujQ]<Φ=j gn/d &FnYLQ>4˒F: 7PSQro qb,"w {o0ʎX[)ҶpxEIm1ʩ]JmưS`bf{XCvervʬDd[-k*m-<g_v&XS\}Ħxpo姼jՠYYԓ: sic[Y̪{Jj}DjTN^qT9K􇵮`U;[m 7 Az&q 7ZӖN(2Zy]@ W81JHiY ktr/i'\A)G2^QwJ>.gxX,o b&RQc l,u7^#:Bd6m M\=e!˰YR1v} ZUK0N^tS C B*~"B5]Ep,h-< )@t3Au"RN# Ka/oc0SjNUY_CRQh^1q.-c3QSqSZb= t[;ʌi}3v(QKqѯn"&(?fŏ֋C;69 Q-mOl 4j%7)EuxWQV.v@ߗH~ %L >&\o)`=mDQ(3UeJG}JH^!GOϝxUԦ.%XP#sa-"X/a Ѡc8.:eTYQ-eNЭiEj|6c6-c1{ QV>];Zr/M!!FD|5W4x{|--ahۓ=h˽ͭfoe\=EYZkiF] TٷAk)N^oHbO[ːrޠ\TB!TeT۬+>ìuc3&֣w01ʟX:i&!1AQaq?!ю`s8WJj~7m}f+ƀ-\tH(1feXܢJߨQEL͟7xuo2lg1.5zs1%4ykp-p:#7jϘO~+ ڲSԤD#xi6| 6zn,Hf Hx6u{Q^k P>*fUϚ#QܷV`v ܲ/@jԷzG(Qz>2уJ~9GFe.UK'+TXr^^yT}?؉TxVǷbwqkS`[L.|_nsјuYzOryF׈!%dc3}db yaT(}е7ӟeV6(y D>Ezq2sLq"?1]lkUe&еVƇI U8sWP@܃Ty*7 G JӘw8%=D":=x}b"o1YsL^:ɸX@7U8V!`JTrI`y ~zJ:*Kr+IJK9w9-UxB64paGa%<&UkA<Ȋ kS`ʌ l|ZBKQKOy(T?{j^+q{ /0evq:~j !5Y޸)H`=.:U0uISN[C㥹Tk{g#X*vF.t-JTC/.׆|*~7pJlFrmUE:=>N~"iB82 9>ap탁w#HnW(--|_[DbO;ap,}B1 cx#i5fa;٧)Jڕ `L k[oWI`[݂̕)oB_F\G}:동*;^!E;oF=Ew4~XHaU2m5ЭxyC\(Vi"+F=TaZ]ij\;j!ЭOLdSÌGTY)Pm_l+'5FE1\؍I>W-#LCO9٠W r2{azZ E::] ;㺜I 2@*vn{`&j sࣷ!u{< ˤĖHxJ;z$UE+%X qW9"D? o*{9VNaÄzEKԨ5=>jCzP|z39'mA6mŭ,uLl o㨝bZ':Ef%ƾh9@-id._ wC%iaWNo-u0mŀyx\ȵU0{Z{"#.\ڼ}YXek^t~"};_.;Czƅ{|x=CQy澢 ͏)M?jQdQ8ہu ģ,z yZR(a޾ JT_g keaUH*-xmkgɷMB޿Jii:Ϩ5 k 5EAʕLd\w("NI_ T;žB) wRɯZL$QÄLX)|\هQz SM셃hx-ܾ:tUdnF"87gos \'S*CxaePkE~f=Ii:lFE>dd1Tj$]]J(ZJQ+0 z᠚hs(8;M8Z1g֞W, ;w-b|QqOq8FiefS+W;02$sz|'L+"!kGVw5cfZB$+(UF,#a[`D0)N?ixꬄѡ,:pWeqW,mL 2ԯ bmVxR}u8U~^7ڨOAC#L %OYsYG.] ɦ<%UVώjSOe-f9?!^)_<&hy8&'@F˱ƾJh6d?0,+!CS\W.dU> CTrE *U󽙤^C.^{*T2ë e[:r( !p3mCqlS(cLF"n[` *rrV:5c^%EXщtm&{S,Bo;`>b'2hFZ{~iS8_Kؖd_s5(BC %И, A[aj=U &lJ|-81u0hs#s1=q8 ^P<G#Oj7J')Zn٘_am ą˳(Dۻ=+1*lMQow}B,k߅xdK-F+O.tѩTժ7d1 IGu]23 ̶i@'p_6l+|lTfɤF+$|ɔEX+;%tQ !1AQq a?ಸK|?[ - z' \7VpcЁ/4[mh(埀tHhcjY8dvtZ|]xʲCy-rGK Og=m Y + n0: ɀv?˱$V|lոc GH}Yz_vX>FFHGoeq'_Y"4 g%hg AN! `p&M̱cn$TMISQ̄2bRAHL=?fJ>X; ]q'ܴ; ԻoD$rE&5<pL{ cnڽc!6H!Jn푒 1/c'aˣfʱx9T(` O9pp ijwrӟpclG(;t3gZZ5оG^}y#R~-cF^ceį`E&leѻj +鿫mM0CnFu zyjxyu xczHj2T?#іN;,yk"n8:r,` Z]2?dGQyfC[7g_ 慇d_XBag6!t bg]?VZH tz cy?O_V@3_ryVazpXU[CWe?'(nϷCazer-v`Aߒ, |O !1AQaq?F6>axʑj7k#.}w%<[iGZr' O8;(' Ȑl9M^$li"2ա݈+$.,X;| tվ%z`N;/FJ{+d|o/h ?`YzC?EvxӒYk14o g洚rl_hܻM"w',X|(1I[7e. c1${f9n^ nHg0axٶ%~Z[ތ\[.&?ac:}g8IvoA Gau[- p ;+4vd$9aksJt嫪6:-#_ "%@YՖY0Cw8̅5/kBLz$"Њ6ׂ X>K</>X Gn<!q9t$~7 *nGGal͵e ÿi'8n069i#L9䮩'K0^ ZѮ^Caa6߫vGNBwr@ ,WWG更dFl]7 9?L ckhXpmO3˃/2pME_hxf31ȹFh'OLaR4D^0yfǫbn~/X íTz\_͖ײvVt?;  cqk$enG.Bx,6%!1AQaq?Ĉ @3t4*֓tD{iL'z !ɽcfmŊ/< >!K)a%5hYɇ~ܫ$to kdO\V8$d*|ü55*<"x,*|y."Q`yLn=E01v %$J[|e&OH,pe,>SK!M{k&8Yx((W`)[59$ u W|M 8Ƌ 3d 5 z&0N7޿ WIOck;VB42e?"o y ('w6^jiif&tŤQ8յOJhpU <7/.i&EcN4/˲xh/ Sq 6❌  :ڵ:Bi|޳VS;=aXu$g¥k֎bAiJ{~PV<w l)ET`6h.GU-2#B5yN4J5 զ_iF<hUfj}Hi.~p)QWZFNZw| VubP~2bMO.Eq{5uz[&y0w\_TΦOJN7 i6=/ù6&6tOnL&ŷFqYh.@9@3){[5뜆'KUR7LY~P_\>Ts7k;b&NˁP#& rK4pڊG`bTEZ]#4`90Qo8lQ҂T+xb%ߌQH( ĀO "Nn<xgS| ɻ1Q 6!THП; FǑEiW :i!aqz9jn&+-i {sG|xtg xb`Pn1S=3%u?1E:Cݾ]7Ÿ \!iiIBQ1=>u_~gLN㚺ɂpy?:/6pD{yx#Y6Z9?87of7KCk7 ‰}W) WǾKYe@F4 văv`X40+p'ZlGǼhxVG|]Y0u*KbKU5D6͕y}A8.v/(&_/ǜ(۷IY8 eFmqþPلС'p{`zB|1c<204WP6 (n&r(ܺ)6xQ4+񋯲 7J.9ɐD) O%$V)F +!7 MW֧c69)o"HvrBm_xh`$(.+]ߟx]<޳΢k54Z=OY7 4 :~r8mzl?LnkKeaz5 P.u cZ*|TsňZujb]hTtPX\x u˂tJm(<]ޠ21rE!61 ]}z=COX\)cOmrn`\|DpwX}ν,H΅p"TJ"u]}l*о>@*XMx5M3vZyUs*ie7 \{GXf(?܎GQߜF@ oW.R"".CM9馣zrjBRU)B}XDQ 9ϓUMK|`[QL+B/ Z]6a"  [xAnjC&"t Z^u^8LMvR Ww)iQG'5L58-v&3:Sdi֍~بZ)^]}RAG5խDl[A(ji9[VôۭapvHyB_8K6tGw_!:tGC_[\ZGЎS¢z l|)2=(' |b6kW:͆VݮQ|␏\ 6jm`;4a.1lߎc9d9*U$xJc%uDPfG<4e&4ŷ` |w2B`v}JwÝ DtnϛS{dA^7\Z ) )umdXi '0Q1\/@B 8Zi =ݙԒ}3/: k^H/_M >|w*2G4dh#Q쒍4k_&aց.~ۡ۔hƆ̩W{WG ':8|9#:F[8nbPiC_9/$D1yQO_Ox7t~2F qW5x)<\lB/=Luo|/_XmQzOc`;N` ?a'd H52^ Ր|LT$w74! Czϗ1X{>I Y#3Ư@V|OE#7 )/rx=X < z_]sl@y #ˆm7/rlUo \PB>֚~ǒ}~"كw?pydantic-1.10.14/docs/img/terrencedorsey.jpg000066400000000000000000002030141455251250200207350ustar00rootroot00000000000000JFIFHHC     C  " J!1"AQ2aq#B $3Rb%Cr45S&6DE3!1"AQ2a#qB3$45 ?$UZc*N % #<(+ XRsPUJԲ6NBGMUGhS( />eOTΘr_VrYOX\"B݌Ttz% S-ŦeʲH;)x2k]M;j+|{`i)S˰Fs5;fד546ԾrB*qiU{uDkqNd}a)q#ǏyrCt %ԃCU_c;#*V萀HQQc%qM2r"<M2QVVq@ A7 wfjNKCWJ|lR<Z#N:tzUM6U2%@0}@j:Uxh ?񕲾N<ZW gmK\ (d)pd`vZAuРw8CO·b/֘'~ǽ* z/2UA=Gǩ5878N _Mތf8iMd%c)a^|M1Ba\0Pi1[nRԀ)@#'\$HTO)=([]'ָ2Rs9B884 1+LCӷSe\\&>Z qO\zU֏\r*0~|pTr:?ZktfKwd18X|IBz٫twa=>S;ۖfQUYNG8f=t4s ` 9Mz+k6-saaN#=R7g"˜:b Ғrdu)鞼UE-]Җ\uҚdV׼b\&X;ӳPOM租SIﲝ̭<2G$ uemBds$u#S(\Z¥Q*śP,FSK-(jO׏GiN5Wi (uܕHx3M(~4Xq3Y&#Zۛm(aR''wVBBOۚ=?4fp u\nxZlYqRLJ/-)L?{$3۸{BnvuaXZDzdl ,Ei[Jsa1#.?t߲4U:v^5n2A O>YMv~!vtb%m!x(aԟr7yW=Wi2.eZc"$uؖӘ$(z1Kɱ[ o)L(ؕO(Isk.T zeu8-F\zJHmD>f㽗h\`*cq.%Gx)J#<(λ0گGޫOtԓ8!ť+V,)K|E#2CR6t)ǯ5<2r[ {@MaZ6uaMgEVwgdD"ne[Q)KP~e?=bo@p?Bʰ1R#OJRVo'`fx:alRY-e ~,%a_ir\wȊۊHKktW)66--v(+l5w,B(>hEeiHi9mт^r:¥?_CPMoy 6AJT9,?cS'fl֎`]FAJ|7=!RJAw>2?-ZƘ&lCm]‰y,Uwr qe((>/G>Y8&jMlOkRUTծݶNn40`z|ZTP d,#?/*cvrh44)J21C3˰:!?&J&[H݃ t=no1 -ڙyJN+nJ'{\%R EJ75oӉx6*4r4J+,c/eAC(T]"=;BTD曻$8C!HISJs)yBS1{+뭵Kq 'j'miFM4 *Jj^i)bh@ک )%`~#[RlKu< Ltşuinv48ɌVRl%pm80 ͋L,FB6>F:|G>'M`r੩iꤣ)Z:`ұ+Kֻb$OB <9W}EɚjM60T΂nzL[\jcSe0#ΉK 8蚎xKVwv@>{+1-ڒ(7L˔ѻqR2lL[#DR[ rDՂeŐXm2A4zypwzj{,m3`6Sm0Ir>U2q^m!2U3'8RiYolki0!:[1^AXJI0:˾UMw[;[j PT3ޤy%GCOF%fpx`4;6ҖUeYT]tc>ۆI`[vJ/9 9vm:CҶWgEmISQ/qLJ<i6r” ӽ14)*RY'f]A,7"ܒ{դrsEswW%:-* ytKr) \W87F;&LDk:qFSUc)d]G F^6\DǕ9/n M5{S[iNz}*~[-;ހ@* =LҳS6zA6˥J$J,:8[Y txh3nmaՂ̄ܓ5xoSj~Qn2r$?*He!.4}BwVce:*K$$ERln@eɐԾ.)Y_Һ)ȃH )̄I=#PmB&M7i[DTD#O+0CtX$ A j]1ulIwiĺ܆.iw{Z7BoM]j&L67}gYo՝/˒%C ecLUdGi*9oo3|̻j0ÞxWLt+q)%䑗(ɣ*2F~ Jr@s8?81Jpҡ4yӕ'B 񓊬;eI6Oգ+OǶ1.x$gsQvv=s{ey 8/(ShȐ#֠}SA9_5KNb UlsʮZBOGR]`"w(YIJ4H sJ>YdqJ`_%n'7 KGd: sWVk~c/rG^O{T{v`yO_4ř=3.^IIљ?K̗Zhcoe$zeJ8g8> pd:h թO[h`v!-o(Ω.6ik R\mm@<}+hzs ~v:UǬJK0f:[?= 59!IyIUCѴa 1me%GH; O6?xx?+i1/KNR$>ҋ\\xrPU7\x—$Sb-R[PTWV}}+iV] NZ#8 ?/Jj/\@'ӂ?Zb/|PdP)"·Dd >m9ogY"z=QWM,qUape܄;+ˀ!rśm,GliDIuiAnKmrw+J#K.g@&E{{ u I~c·Y]j*- KKx=|ʼi)Q8U -)C$UC +PjDK9om-ϖRWʈty.:xCJs'qj"5 IP>u/>`Gu6>SCҬ)LvԢ I$*ź[RNԓv)mQe3o)眀1)7W1*{m 8pSœQ.1V)2A ?i+wDJΥ"im%C;JGlh6]@K oP}yAyՕ/N07+NzbuJSȊPmYP?/QNBi C@Qt `iI) !!.}.zZAYNBO@3Mu]܈Ңc eV9}hQY L`6,,m8/i˻"]]oϼzzYH",Cڑd-8mpनqI~wy3**dV'VP'>iv>~֚-FL17bK(Bp4G(4֥h-ZeqSMzPU˻Qk:"Д`S V7$!L9n;Iֽi^뭤($pËOآA0iAMkezĦwn0Ǧ] ٵ+sPR!XL|=&5F֚ L+ WVX%H&nܦȷ*Cސi%9WEZuwdv{x\NpG;Ze%B PHOΥ]5ZmN1gxXw$$-4Q*i -ܼFT~܊i-ZyRJlq#BڄZ1.y3pe攩$}ʾE At8R$zdj E4 6voyAo($t!-< H[Zd\Č\ 'Ts5"TzQnI R SaYmE؏n.εDMShd6ָ8g={nӽqXwe=A~$:i=utɉʽЕ!c;~R6\sl8wScRIr`$Y7xhONojl7Bдiu ~?1M$"9 fZFOZOPH,yNO7xҡrGb۝OjΔ Z;bA6HO;O5euvy훑 GVyh19(zзE <{z`{0:UkH5Hv  G'#wA-$!LK#Њv҈=} |[<{qҕ|7GɄڜ~?JEjm *NlvF䤅>?]N:6tEuZVLC[iZqtXpKdG?jOEF Ye GxҬ-!5ی(@\RiD]۝fjr3R@Jq0 5)TZypOJ^ Xy8:Es'f,_[+̴֘y>&=U aTVf0BPUat GsͰ_$jKҡ(z3B:dO-ү.wb|+ '}j]GҰ}˩چzI[? KZD]e7V! F>Of,??Q~[k>/QB&aB[+r}>Y߳ b,FT(D9Ӗ!mm8J"߳9 ʦbZ zBO&b9{OLuǥG+wA!I YԸRRIHх y[!Y;J?z[ %[R7+iW'41yn>}#JG4 c ‰Ð~7.MRV)-FGGZEߊze[V6,{pP~`TA@/ y~TWZW(_ТNyZFU*OjI"01u#4C Ң&2]* gQ9#vm8B6d: y. 0(r!(H^r8lTC8D>#Rf0 {¼@P+S[I=sGˑmfഭnW)99ԧl:UHhk@kfqJ6-@JN>/S 8*ַ 8rs*ʖVV<`y;L^Ucl^ڒQDaGq#օ7x6!aA[i>}jԖhqg4 imiWk#oJ,moz{P]"^[{D09{K,Z:?UYkf mH_\`WH&rM2K )nè)?z./Ч}Am@%eAX$+R<=ɥ بɕeR,;ݼ$A*(<;յlܱ[(\;0緑ETjnD]TեkeX `?i^дImk^=/mÉҩB{޴^qW*T wӥ ΛЌrVs՗#qiZ=sg NU {uBq_{g'x(gZf=n6]* '*;>B+`sҔJp<\֩H:Tz ZCx\ k<'d?;7 HAģ;; .KjPJUإpX q!<'+"&H@Im8l}ڇ.rԅJñX?lPBBBS .ǚj+ uL޵ rVtks,ո⏐EkKV;3ol%!Amuh $Gy$/M2[#r7 $`$S3L'X¤(s=1Wh5Jay¹؞cT'iBMw\J̇YAUBۑHc!Lr- \N}Yg[X1zܗ&2ҕJV\iQEкeSzr!+V2O815qه T~@}@ɳ:7U/O ;%Av s 2ڶh`j;%m턥)O/-qĸR%-i8~SN$!I xǕ%qa)aEJZ2ymR -VOzS+OEw"~CPma# jq0Vy# JiSl*(JZ!MIlx%N7 C N/%[L6 >9RaDuvm%^FsE #p;t!%I5ⷹǯE'5LXakFԫm "')>< Ajֿ{Q[X.Ab rG9.ぶ*1G:O?֦,5 jIV<δ!69ŊϷCL_,Ok E]Iav(OJO8㚆:ezsRS%tH=EEGs{P#qFo#\-Zgf )8$ucҎud C KвBH?j!)BXB%e5PV8iw4¶G9UZY*b[!9ܑʋyIFmiRx*#VlQw闽pL1AO<#\wij†G ɘᅰlR@L==ma|R:~wz+vzԤt*3VA.^_·V/NDóŃ(d@q@^ gi*9RP&;~ͷ콹YvP.hR6cupc ^G);?L/JIԧUo6Iq',cSЊ d曺08?ʜjF@[v=$[khsgAF8PwP(@a2H>gDsFwjyO2 ?Hz[ Sla9#5yӜ4(JARp 68rp)]"[ 4`Vcצ}; *?%&C)Wl{'~o=2cJK zȣ_gA؜bdy)B ?\AE7^>P+p :Tކ 7;ˉ-ZLx 8!'5mOD2H68vDۏwQ܇Ii'ȟ:ORGr3ި%#O4KPlmz,]̧ YiT'l6.:Ȉ?CW&h {Iz;HE-jA Ԭsfn˲Ax.;彌Y\v[")KQm)=y䚯զ,%Py>OFrTVDr*n1Hqo6ďw NAWWe$,nVyֶKuVR[mڿ0:Cw&a+= ;y8j&晘pNG#M#wy!H@i/8`eZ]'um,S6wҥM⼓<<z*6֒6!~C4;,A?^'-jej N?Eޔ(7(ʨb+qCmVky"erҰ)Q<1nf2/9P?~)9U3Qض/ @ 9&cIRwdS9qm t MHv+a'+P|;SӁ`Iie;'l!I@RRs1ůr%Kh@%;O.v#~Roĭx>oG{jyc*#4,lhz;UEJj[RThJJ^$%K@{KT(ڥ2NGU.#ݡ$zZ㭲PT3ER[ !INR|\*d0oF8)VqɼG)R%$n?ѝXآrR:~tGp2dC] ݐ Ou'1%nJ9$[Kh P ;.v!K@#' CU0YuQʔYj'F8қr2b<./5r[xº]eԐސ~ӏ4xym\+j_eIhn=mUЄ7`=R[RtzQCEOs̆zIB VeN"2ӹ'9jTV֗6%xK]Zކi$(|**u,p+' ֗y2f2Vמ~ԇIaO>̖RKH<+Jꔋ|R @TR^,^Ja>)rGڵ!-\ʘwGFv2AH?* ;/`w⺂:(|heO=eQݲ@u?Fu8ƽ˖msBA A&oiwǸNKD+-ڣqEE˘އbmvcl)x5d{,)A+Ή-&]:ތn O%3{mʎw?FbpGZH>&hϱZD@ȏ%9wPWvG< cUSq CNа'ItDrw2wGZtz(q)[>^3SB 81yK\f[7"|ŵ-*RNMͅ=c G_JVv w`vHS.VGz9VKMc:WG-h)0WluYW޺k-DGoN߈# %C>U&tg<*M\NS`xȹHe V6{ zIi'izr*^ݣ}i--S5L62<0奲p@6e'l'Alc3C:bTjG$hSQ2|#B%02RG՟{  !(,PJ5ƙz{ H)`~mǣR `kol=ߺӻKLn:psQwZءR8Ҡ'@q/:H:jZPSx#_tsN1 i8⶷\DaIWM>Vp@AL?y5I[bѦF)).11^fK̂~84e[ЧRUܺQ/*m#-iIICmaGy4Iԕ5’GZ{yqc~ c?5ywKCŁO~GE\E~J!-PL. syԵd-x=I&hv0}L)WZZ#'5I ߏqZ=jtWܭà9 AJKw|H*o Z@Cͅ =1KZ*H_@B XMWTG04$, :j;L[{Ē8#Up#43ĔZH5w(Sg)IQAoDdi+I0ߘ!egE>-w#J9GM)qc';3(~J7ХgCWDwqM'! ^Ni@5 9V gx'QEcOo 'jr((SiP#8?XWkr\[$c"J mNʜJ= $Rhge}pAUL_v|.뎢VG}턐JAT#*qFQ!i)#Zf0uڮB|i֡MTs\l}jbdHol+:b&7yul}C~޵څtC(u9FA3?kt[Ge% 'Ѝn :_#]dԧ{y^xCߥCc(R 1>wJ$0xTMqZ <ԡEy]:#iJ{-rerRNU*a56כH FPE/;m&!6}FzRiP۲彄#Ih֫J|@#֤Yy-sqm%>9^BDZ;q{#''P?qB3fU"INS#-֎{JX((nUAKy!K}YMjN#zs3Rx*6[׉)y2b@cQ RQ%)j"ii- FY-|Qӊ2ׁ:xʲ$*܊Rۓ霏^#5'k]T–'P%%ǛC@y+Aq܄<ԥxʾaHvPSJ"mɍF.i*z<SI(SQFl<hYm+q{?f[Hi]³8l&Som;п S9rω Wk{쪎e#PEϻv–jrJ qӷov /sb0/^B HG#T N!ƶB(ilg=:쥯{,$g|q7mMK} ,8>[Rq*O9 v3%Ium>W-*8d*Q#GՇʒACS mAcv $G)'sKHh#(Kh7ˑg pi'.іI(BTc" ~SaI ҈JMonyI8<9eKjo7YCB-`H9GBq҇$k_r%@@:S'oPۊ7aB~Z:+$PZO}Q^![VC{ԒP4uZR@[M0I-ԩ\F3<6q!N3DV쀅6Obv I:>*_4[(RԂ 4Q'J^:Tš!2S@(1q(isp<&yڶp7gBwg0"˪T~}JNU*gL[}-'*ۉ`SZ9HyKiyN-]P1Rv iT)w O4Nqi̘jdrRyZ}aC;b'3*@?:ۃ孆$%naMCq͍('47q#d4Dh-D0յ%!YU46@]8!=h37nwO]\ q+N 7֥eu8dFAN3aoXx) ZQ:i.eU1Z9r.i&Dr MJnQ{so'$tXi¦Zܬ1hkm{ܝi&amlmOtnMa%ܖP㎸~tkɓge|%/BH` 97yр9xKI#7t: H';a`Փ e9߶5urkp8/b$’}@@J6ޙ)tzqm`c|ZH p1TW`(*;Gz2I'o[[nrjqC:>v9΄ [GKm8KsKQ\LXwgZnň!I[+QYɦ!HJ\룠Lңv k#pyS#PQz+RVG\ n"J[pr8-zϴ(-xU8*9e;TcZBp))֐E|ĉMi%d@Z~sjCxMb jѷzyd}(n 1ەۨ w-nVZP\Jjoq[Ty#Ιkk/{ET[}ڗepOʒxWB eO=*ˌwږս, *Ġ: %gQ5pW~JJG @ gԶt}<ЖD )<:!jp08QV|F2G)v^6F¯N>!CWZ.6B^c5kKUG!+)㑞V~p<6Y6ŧ,uicMޣXBKI |6qSOAAWKbJF󦱳))1F/F~u!'o #.b*C@=}i9Vf (O8YU]8"HXZˈC\U7xgG5kae c}F }!ZDqV:"g۽%ICaypk+ػ9-YuOv9S,_-KOk=g6C-گNp2t9(V| qF"Ԋn.(g80B >U[vOVT9׵,widYty!VuHO=Z7 "@Q'4>@w/vV;c^Ӝ6 Gz yQHh ]pGJA'4g$nqJ%01J c U +pkIOڶ'*ˁ$чe5o- c҃#D:/5\!juH`aVݶ]j%┒6 ѳ+򪗶Ï[G 4hvv/wcV)KAzW}HH- 6| v|Ty: ^.+[Zv6 !i8BXGܱ '_g˗z^Rm+p}=*w[mH^ );;kdgD]ޮV>sr@׻Tf[HqڏE k)})o .8Lt, O\r'D%-?&1ꧦZ7U+Pq'א{ALvfJm`68ևrp:,Ʈݹ ^܀BSk6M=ll298cҹᦥh4@SA(RT)Z|}O\ݿ&!K$:;;Oh FL Nc?fiy#w\C t?1zGsQZ-mA`I I S@ /#*ܟ`X{$+9vvB Pʅl^q +ZnVm8:̖mJイ>Ulow>Ah$qzP4Z8hJ’ڐ,%BiN)կ)o3F\CII#'">hƟlgS:V̇$!GC}W/.JgpG^ΨΩ׭$:R;,xSyƱ]]#kMP,FzL'8b/J)iFd.:VTYF>~feGCZ9Q=3vai ҕtQsI3-+oIhkv7owϾٟmI}dGS] ~S!$Z#M-"Jrx8gM-Ũ |v값p>٫OH 5ڕ\u.Uq@[r9kN+jI)!C}xǑ>4=E=ԿbGJ{ԫ*jiW;d(% ';3hKr,:yGn2K-1cT;a46E'sPh q 8}q@ww*+GI7egQldT/)7wgow[I}63Sg a^$GJJCJF O#דJĂ.6m  ׁ)go(YJQ{aI|Okm@ڇ3X{H\[)2~(mr+A{gSh}} ſ^2HV`Sq w)QT<[IS'< "!ET'E#.N@3QPwr#׍c9vU(f[Ȯ{O"oq"VkWB\{`WdCq*qVP85Sv%YЇB ֟+GƷ-mA6CZdp}p:}j{L'B]!/sg-Xu)┈rig(΍!'{սZ:Ÿ˱pw2<,Dsؤ9loH!i>^l#4ٙCoTINim/pCo*=BjɲYx:;Y#ڎ[JNO$^[ l6xGI^mJRVEgkm2Y*>$FV/1 5?n&sڈWMiuR OQ3~u[vӞMUl_5Mz<8Lsʳ jh_h|${?1K 7~ӛtHUjrz#UhyQ xLJiJ4mXVS, y:"ڒGx&ZQ#4T2Jc >#d$V3})$ըdB!)׏ݒk{XAWee1unuee3 ڭ79!޴a)GH-s^2ۣSKix3qPwi;~N]6lvz#45B)f*b)} ZT=9=Gvgi*fD2A3JY^#Ot\ri'Ҍ)+Z RNQ=nrPa ~sO/5 Y.W btb|VI($qk;,?^"XFP)#<.㽛(#`IjkX)Ǯ1\v\4[Eh3 "gONܡ.o,)$mp3M@F2ٶj+Ys%8Oա{V-)8ī9؛-585\iJBa~OLUtyloz2xA?Nhũw8 rz8Ԑ&q;HJ>-( >u%B6*74 Z8I)^k+%O^j*7 d;I>Lx@\Pu$j?3cė-˶kyZGWtyvrI|ŵH˱RKhNV~F3\f)D%iIX$K[{p}ql _&$8AP?S_컨qؼ7E tMtiF:w ոRR|h]"PŜL1_ jՏ$ЎTLD$g뚌woft)1ЖVI$¹VfX;mCqK!`<U_+Sg2ڗOg֤3,ۼFXnfde~Dy-2Z02ۈR 'yoْx APv>f[pX^z5 P:@4P4vQvKa\?4XB12<`\ZcƎHY1ԥ yTV>qj~ B'Έk)GRl%%$ %co-BPWڻPg$xS]w>%[8?Lt|_ގƷ0`*GF:F#h%~apg t;cszm#N֐BVrp\֍|^B#u^-+ii (?Jd\h.?\N{ 'q^) ( Զ@F6pˎHto !cj@1iۗl$Nh.':CKER$)$gq<|U,%4:Gomo7-CGw"q86X=% Q茒ۏ*J/lmMRnwIP`tBw~HgJksm靠 ~t0|$Rnip{ybR9?Gg:C*yͨK[2ֽuZv@;8&I!8UZvƱk'0yyhCoz{m()RA?oa|#?yhZ^1WΨ Ucj>R8Hh2idg]gвǟcr=idեޕJ85¯&mY?*jjٜ֞|Ԩrأ4VRnLyڔSc)<#GrybH:HmNr8 2*6}ǖ6W0O,j>w.qQ&=;q nPrCd?J-K'AfZ~HO^j1Yն w`\۟ ײ>ךQ182[xZRʰiC[ֳ .GL) TF(I ʸfrq8䂒 Y3U|ҷ.А9a@*j!)-8ɥN$8vRSO K}Oji8tHC<Z""9!ʈrojڂ4Rm 1!ܯ^+&ی;=IfO$t<Ə($wml~Zn=u,mDͭCL qs5};BiqySm97=UǐP?ΣDzxzWeοΞH{<$|Qܞ pcjWj:Ws <QRvwrgޜ}ˠG5 >kGk#UZ-/\+W|U _^v知U%xI 8#'qs'jFU[{J~CYl@_gLo*EZGj2 i-sojd1gVwұvN$9F)?x܎<_T&V?Tk\4އF0P;ZLy#o^hJi;bUfNJiCaw`4[Wjݟ mW)ITT:KvorY/2VZÈsJHdT:&5tYQF7ӰIJFSMYvQ]S{;{Q:H|x\>j 3*՚CRClV8w\hh׷%q2a`-B}UWDZ[uӅ cv:йܼ`G)N9(v!͉# Eک>{$6{o"oerR?jn{=iN[eWL[b?'*ge詍hoϴ|ȺEitF$&Kum^=QDG;?mf26{|3 4[Cxi4*>"dFZrZljS'{ِ7:V- , HiAs$Zm̖Q8矹5u)ks)RHFt&oH3>خ&=6fѽvI-ۛvI*/3tvb""H6$z^Z?.31^x4[MvX˪;GKJ1-rpBjգroG6Lv{uz祦@r.˓MH zF*-6Eڛltct k)/lBsyTkQwfݩ"z]m8We&pZ$pGvRu!}%[~mecz K޶URnx[i{JaZ735]?x|ȔdO\ V|Q^vq-L^%9".@;TՠըTEe*&ӣoM4tfcZ(_Pk*VDTdJRӌ܇ WiYJ2v0]ZOsxI --*.EOv$Q~򐒺HX>T-OJg9JEȥ: غUsw/^Zc(E4riK"9 }if*єJahc3p{57;6/"Czxwej^l[[=r-S+Όפz%=1 9Ȏh:ŕ(6@#5X[b~ ' ]=}I:O-}$aJ*JrOZ n;-$3^BYRU`OS(J8Yh}g#&>KduUz=*#Fŋ* wifdz}؎yr׉+x8H5پJyMH!rXpUwEȒOvR1һS6YHz&+[ ծ;WneqR8Z@=jk gOZ/نPN8@3V7-wiSG^}+&\#q}GmDz{2o ׉3J`fϮ2P2$*Lò!A8W]v$i7Զ܅wp$ j[C|5(|{:S?I:`mRJŸD2A3O;I%;>/Lg69m\qXFĔ5ZvǑ1d=+KOKCn2~3JT<]in8POxq]}nҐeQjjPlǂ+]8QB%Ye$HI $y r&_Q\=fg ?^+D-}Ӑ[*e7PjĔ$|iQV-|ebZ+c{p<|ڂ+N*"[!cGQ;Q.wQ |(6^-l6#>bw}Cȫ0CvCj5ݨ8]iTn+I O"Ėtu@+n=QX;SGz.ZVZ0G%v҉m3˼}hײ;&'rU& 3-lvPn*sU?DA)iSwޕHP 9?*Զ~Nٞ] TON*Vu*W+.eEj-4Uhz }}T^eڎ?:*N]HJBWn}[$vp.?3/Ш '$I+EWM8~LIͼ߽ 3J43]Ly:3jЖ@#ҪOzՋ&y.HIaEپz9Ҥ}V;ˆ>j1J.gk >IwZJ{m]>.ș$sqқj=qBA4I4Uu $ViD(m1J `T"̘LGcl(ڥwiТqӥ j{Yw2Пʓy+Ϊ,}@;!C=)2cO(\e NAԛsI{BvynDT1,S”u֊u7hhʈ<)/RzЦL#w I].6P&*OztSi|ܤ @j]l@'Ng)iѷ(m\sN}O΅lO [*^3U/z2LfUJ<$%GeJy6K;swK9#GgNmk8ҎďOY-,PW hʛ– Zz nS1T+M y*2ۤE@mg<ؗp2;R?f"4{ IY+ϕr5=>J׆%Bs:+4fkZB(ePvCI!N/vYmj8Օ/Hg4Gp_6f-2}*APV5Z E.)4"2q)A?_TݶXf):dQ 2`m^:7l!7{Oy+ȯN9hn2pmPʒGJ]{e8%WKv-o=͈A-ݢ6Co $O=C&t2\l+$%ď|9ŭѭȶ!j1e”ےp18yM +Đa` ݭ>SqT)ƝS+ @)ǝIx>C`sό-PQ9 uTRTgʅ+b9.=dqՕ'ex$m'+rKiowaĔcT|ݬ).gnOJn9/2Co+|9F~tZXQurxKzНy\o Y9Qin{r 莠~V U󞧹.NiҔPJw-NMxz~ty[|t8%TfɱJ·GM-*NyFA;.#hqɩGo?jg%+e =*Y ZaYOֱ\MZIlR"߯SpSQdh{(Nc0I|ȮPf?z?gUNc4qB͞!ҵs $Vĝh=i5@ϕ{%!ʛ*+ Rʲ~WM(21ϭ]&O_ڗo΂<Nq(0)$3J#BBI}QB.ӌϗ|H29\WnPGJY8$aE)' ltW (T߿(%!.r*xE=)d_7; 5h]mTt^CaIBTSuUi1%t|e : (VeA5Biޣ#4R1M ͷ@`9 SCe9ΏeDYIhBfD5ꫩ”#I4cɁv2d/-gU*t.mJs֝hڎ֨ArA{iMCm&**)>c5XPUaf4L%(Nd0fC{J yvSMhZP:Q^eR_K kJ3X{i`"+JI$ӎ('&CoSNa쟄.KB #hGoNDqyF7:\SS_`8W#=i.Htdm \mݷf{тecs]ۙeHsxVnA'ٸEo ⰑIט*riCfT`d-1єb@&: uY)gQB]i_HJ)T8lAQe,nsʖoqP˱@F2#emy8LUe^k&{[lqtO#ňJ%$V[HA+qI,]O9)O_v9G;x :iNjr'qVqLN--&OCßZ/{ "ろ}צRbR8aPdCjve!.7I=򡛊֧ )kKa^%nPZ[4lV$IOΘ=LV5',DΊ @[SH /uPX莂CvG(< WM/6.{ PO `:٦Jd޽Ek]1 e$$WWfm#jAU${0l7$Qp 6ARs[&zMW2[(J71V|G$!T2NjfrۜzեcpHq'F&S85_ev*KhF$rMvˏ:C<|HQE7*.kSc5 :}+G᎕G[քVdTu ʛ8¹U63<9jV̬?J{oV-q uvw<#9?jO-3ɦ&[`c^D8E5GlRzq&OD-])U!e=oͫ 0 lC_Yِ{<ʚ%ğ=Gvo'tZ)ҳs% SF>U~26x $mKkARԬ|I@[ݣ;)[}&$qDAw66PnR2>tMp5zظd{KB~Ukz/1$#9OEsqj Z= S_8܈όRzD4B;;z s׊}wejvBݐ@9=k5NCKxNMQ=Cq>)҅ӊIHufjp}Qa!r:Uw|7  V|$ Ȗ2p +=G, I__-rZ~4r|n-ceKly"N(l$`-;rzzr6Hm뼥O_vZZ² ֢Px35gs+`+!%|kÇ#zWtD#1RTG'̊n\͡8>`zFy)DDfL%Z2( ?ҹr6!J#)/^/*gZxiE)+4 O+l MGspz7S '+kk+3^YI?1Loͤlu@>u~44S\r9UƼI^ׅG< ?Vў9چ+5:\T=MROO% ATI8sY[= c IisȺHeKm9$zٛ]v5k_%N`"wIFݱXl[$ёziվ6PVW?ռ]Ȫ#h=< U 6 c5tZiܚE=ySgjBz z+s$J-s?i]iV%$nC ȒO-HRGsM^f }crr085.͖|<)(Ox *"@NK%=dq}mͪI5aXJ$(?,tn[J!)y+DEmYցb]/Zp)RR8?,3(cL)nsZ ηN28F1['<#6z҃j " : 氐6ⲟCQujaiO@V mih3T&cLuܰ=3[a,)Ki jKfK.$uO#Ua@`yzIRv2C)*9~ HKh#xRJsFa,\˝ӕ5qI8*ӸKR`[ݎ.@- y~З-5j*Tm#/}ZEtq7pm|+}%b)J=3W5oqee8pݎhWؗ{| oR&q)OU1Vol y<Vv|GT~, );$M,^ յS- ![BJUcsq]f eG}(Ilؠ<ͧ [y'G`[R\w8M!c%.ɛ\6H})@ACO#ǎZvt Jh;i+9=)0”rIlȮEq=+Aئ ͩ[㤠@ 5MFv_D(ε)8R)HqЊS <4-Rdwa*E1Pg *办[99֙#0zY9%,JN䧧v*"(ʊJ֤eIv\('+'OTLxxPcBR2v[I)- \`.#M]̗TMϠ! AÓH5hB2*L[M"?~Ť7f-K액CA.Ҩ`))D|U$p)D?h+}1\8'+Dp@y57 VpO5rCx*us(wct3pCrFv}OJ-,5ʖ_\UUjqQ޼&#z3Juzh =*}lXNA9rⓄ dIh߭wFñzY F@BmYRJO$Cr`VzS $%;=()z@"\mAړև.hhh3$$UȢ9u(*PQ>buÏ-թ-#(JN:Җ,R IGG!xU,S A(MHyM#<|fj cY'֩MyF/F>&U߷!)AO]?ڭ7y$m)R ix=lԔ Rd0ODRJ\8ie[ tģȕ+"8/b"mMJ!*fDas>t]v/z D(e `ߵ-N4Λz+RRpI (N9kԺTs&;v}KnJ`zR\b=?Fo'V+EIJ=0zL)2vzq\I2ͺb{2g<jQ)N񀡎qR{TBI GQ0v&H!GPk=5 Ӗ`60-{x7%g$^u@70mM|?n9dva-{B |'>35 iS Orq䞙BS P <9 y/)H)V9K19Q.Cy;r9PtYQM"rZQ#-י>h.3$ wkAZ&'4$##hQ`g+A~5KZʗ0K9d$|ʾUΚl="+v/i[S)_;kZfe;™nR"OʿNEA ,v._6}*wsB__J>nJQz7=>/9mLi7>.$?ǦxIA뛆&.oRTxc5>Њk^j_e]^T2A  ahWKu$$xVnw+/k%SJ_hWa:](TDZ[Hq伒>@~تJ\|PGZ+BO(Rvн;{fE.=5-6ymĶqAi5TX I'?UKIN`Ƚ/\SMM"pRMpiJ%=xuazq.nڡ%KJOd! /5K&3޴kV\kI]P4/!hf9uOϝ[v+2e+ݶ6s&vˌ($djôkhy8:Ā_e.|kʻlS泒oSʎSҡŃ=jvҶMk^ìV[K< ZGRž1P'JJ]e)ԡSE=B\gzLj z] Re硾)$gˆ#հ ~ԬVPoNH6km|jw+i4NuRS݂ϟN~!)6ƃPzzP%4%$HCjHNHq2$8)=jvD@v%;+P'=jZiyW^~) jy@62T5c!(I*NO'#jeJTHD0<) '9"m1YHϗ-AA``u=1AI~KWD[Sr\ε9;M=: z:i VB̲S|pu% <<ѽZoVC0okRs!HUee~ Ais  d1bRj֦1̖d}sonXǩ%3^J)\zU8sm+`r Cc_8rg}>N79#}vZCid(q^vCٽ])O[Je k?U̟tuIm>L61ևWMZH&K)9֓ uQ >ЮMCei$}jJS֡YKJ5GAHjJ&h ?Rq5ǣ?l{Zq#V2 n8#UMt[ 8#ӼϱāmgH׊-w ӭU}${9U ^]]h>T U= #9?jcQ+6n JU*VRDnl Rr+#dI @YqP܎Y}FT^N>Za?ԵyOi^6&ݣNz)%sZMKqu t eMp+*2Z@drrjܘ(IǝL-0ұ+Cio`/fI%AnAJ?8ZvY 2RAjrgRLTƭ 6쇁rjt/v[PDf!1%$:ԯWZzm,>A$@<OҜ]Q.%n̖KH d%?3-'x --rCP۲^څHIY[!oD6sWgƾ *-qI) i#̨;K[liXSqm{nzRUݭ$`}i}g6`D&\&ΐ1rKKm$a#sgizH۬&1.*-'[V$.8˹v4O l3\K:nۜ6dԥ$`&{K\Yˡ/%N|9V6Җ^uW).*?xܒohջ#Z٥D\/z6 1/d\|{zg 5%$k.X쥽!4m-H1.Wr@ٷ#8ޜ7[tq#ePSA<\}hi,X}˄hE_.H?"%nLktėaȗQhXuX:$3MGb<<ش$eEÁr~}'0JgRS;EehbAVIWy5nXnwt |ʵj.(EHw|ǥGj 7v,`6F>uk$),{ݑ5]qi76DzQ&}RDC@cFz<-r+bz8ǘ!(m|K"Q/,"G]ڱٻ BX% |sn]k@Ÿ8X߀Mu8?mo#oi8|3OjQBU\̛+!H{yjݧ@bR#ClfB>'Rq;3m+;@WxTm`$,Ώڅ7,. Nu#&ݺKeJAZ{8J_=L( y|N0|x>[3}1tG$V,}ݏ8)!}H(XHzS4K jA)U5oe}%?Շu+e*QXm4j e# q  1b[yhF@IHvݒ'8^i=KD 7xCBBY,);<Z-|]diU֪IL˓HQ$u/%]/lg|G$s[?9b:Q(L  98? 5~۳ʞmcWf8OuY-?*PSKp6..>8_SR(j%,E]&>RlB p˹(^Pj:ǒUC7*l$$q!(1RḲp)3#$Ye<E4ee+hH)Wޥg Wz6[wzRJc8I=w:VDBJѣbxd VUMZ6J*c!MWDFubmim`8qgg:X!6(J 6yq($|T헶iLBqo ޒ?-=OX >s{-;/>gdȎl RB*!C:7h={é1S^SiS I>.R8(M6)Bxdd/k{,.Y܉=ج7Z|tOF ?٥Ca[}_iXO2At~ΊjXja(\E/R=>=fƏK//r]'דZ}BMaE(yloَLWǮ͞)d}-v= PV0~%y]/OǒhHv߱]״fN̙6*@$T5  ohd\wuw4ꂖN|Y>5oqXsĊ ʹք7DGm.)[pTje˅QOZ@CsϊWN\+òHvzgj v m?]Xu4;AǕr\xJ8cv*Q"vim]:C'.t7sJ 5[,<t'M5I }m)M_tkLW5;KRP1]\JZ̺K~#Sٴ<ųNÐXBpX>Rϐ8t4K%$$)aHIY>ң)Ɔ논4F9TTۑ=^[ #`%Xt_/:$lr<9GPGaB%u4 ۚ27wy]x[H+ZQǑ}}.n pg ެgY%i]ա~𥒟 iwF&4l W)??:Zn#xǥYV4Jins$ЗYubQ᳐Ojvo\ Q #AfFY',TTW cC|)*HwCjÛFxUٕ Jw0I^B1YW6@K 9Q&Q? 8֑B%T|#"'8r"Is.XY^Ma%ġ֕g.VqA#?R %$%Ҵiw%.!D&CJ眃3xy_F]L2])WLnJ%das Phbܨ7^JyX-t*a}jf!MPM V|Ӹ%G?BNSe2l_wK iqIJV|K>pq:ae+\+.)J[dd>4''OG,ȹ%Lac:wh' ӆeRpYɬeaƇtL~ծ~6[WO S*TԱ~U1o7=Yc,#V;ϭlpyrst٩]OJMd`RzZԝ5ߣxh|=i5OZ\l$Êtʛ7 W|Qˊe|TIHP?H! =*8*$sG֧RK޺~#'P'#D=9H#9b'^*΄2s#~fB&Ⱥ$ӓU >K#мG_bzb;o {_h=ٖ6m,!\XN69A7>m%/E6'B1Ur]u.q{hwG f٤ UsJC ۝ lhu@_:[Wsߐ<(H{U7}ډSOOΫKn y04|IJZKbthwhWOCꝎ=TÍe4T&#-8]O ?zbQ&[Y ʀi/ABҡ@|ʒ6-Gi=Oښ?h.b'-*BVs<;2[zEpѳaJeedBNp9n #GD Q7·\AۑӎjK[Kl{v NO ]$YOɧe\5--$<-$76S@`7":Pq-b֮'ި?g/̖g(\}m765XO<tfCj B[ZjSI0QT5H@$?ނeR!YKѻZ|JGcRO?T7lWm+z|$^;GmUUv|̼H_+ÇjjCO[&56pNVJ3&G-|\ARτߖ@*:+f!`a(*63C^xHۻ+RM{@X)@g4-,wFos-֣Sxj@qCʴRɎj\kh&ҪXEi <ӥ E*")WrsiŮB9`0= sߴR8Â>ZFbSMw1e˘HlFjnY{ /or |wUw!y{ht '8ϗڼnygB8B2XZ8p|=L,\e+ U\[dhhKJuSV*, w* /;bU|Ge)$㞤jDyBJݕ4qӢL,G{d.> u +WzPoW۠Bˍ@Kn(+r |mN 2VGLk?\߇5'{)8, p }=ԴHi;JJ#$|VۻvEN.ć ] C-B@$묻2T%\ҐvԷ-#rvTa}>q H*c7[Q/M"5&3x^RN4|㨨3lShn2 #zNo= T<Pﴫ꫄Q*t‚T6 O !^xh1_5'/Ea\ZŻ^;f0ld%%.8W>UsniJKX$uIGi]wj5E GonE[ZJB\dZD~UNY'Uomy7%Ǖo| zE xO>fnK--ƍ##Ŝs5T1U!?r/%pR|?iJ}iiՍT hm +eȖwӊm) n+ Z^R:BZ%#@&9l)Emr7b8ś4kXZKCiWFzdJ6SfvX--jm`G~z,*ۊp,'LA IQ/ pq@7{`FA=8^"mʺ[qj:fK'*YvӮ0 ;z_k'LGeX MMS*S_u=nJ^Oڜ)! {-=ܵ 6CRfe oI5m:7,$;S Rm`5"uaN}NGή+QAKj d$'=p1Қ9XYC{ch7r eJq\+A!3)料f`nOԚ'i5B{NS$HҲSnJ4:Oqt_n1?d?Ms5Km }Q)9+گqJh-Icv?RNF~u3ܞ: a|IM吻¼8\ HB+RP@޷ճ} EiI$Э Y;P[۟qS朓?b1gc5Wě%Y})9<*y޼Êc$SW6 #84ocsz%{ ڥy %{W݈^rҐ] S xuhQq3 )'>W~ݢսd PNImIH\/!Z~ZO$(clwȭuyճBXPYI3]%7Lp*oocRvmftؓFфԛ̥֞}tD OizK[rڜpj5ƄIU*v#ДX??5ױP#zQLr!=R?.J]t!C V6!}?\Tt/Grq%@'-- e S6Bxǐӡߕ'u̦Co0YgnO89 u(GyfZ$< -Nӆl빐Xvk":0BSyzա6ۯZ4+ ة+ۭS+9#i]? ITR %]\'P*`Ÿlqp丝gb3f?}*6ZT>å+ԫy !AW,0kk-#|J4D`|85#![ߥVՏPǷwuͩ+ KqI@q ʇʾk誕MJDYu̎h:b2 )aACxo¢w8SS2W-o,q޺mW_v|T۪0>#CI[5JP*R%.8X9qyUuUAZCiJisAZ"9Huɯ{0$Mhj+{ ޸L䚳-MEgZ.Ml6p|NH8E)`CBz_MJ&JT'!<"nf7uJ˫ hwz)w[S)4/1schX:"j1~ 2&"J gMY- x;j*L+VADHC)(@k2Ɩglj :`*>D`+ugTšB?5@ :qTMfC!$ ף_Yٲ,!U^s$?zvun[[Z$yXΆ6IIg~p0jMKc5[iӴ \QZw1 $y^}xڇ 'ʵ=k u*IrzRi>_L|5VYHjŞқF.֑tuWƈ۱U\. tJ5eO:i2xجU8u¢r Eq紊0?}=M(@i>/Y=!6Ԧ( qaFGi7.4p)TyH*Qd+dʓB5NpP|UK!$cRʅ Q lW 9zO'5 GJ$fWS\cX}kW2C%Y`T)I֑&mRf)N$g r: CmvmuqU%΍$t̜Ӌ6˒e^^TyDێ:үBԒ<ȥMe,{C] A{~'P]2YbmΆ \8RO8'Z3H㲢!f#jإdBb> ^p$y?JE:$p`ԃ#xO':bLUaV[KBՂxI褐NNL`?i2"6dAePHϑ~Dw"A@?4K_vkZ;Gڇ;@\> e] 8N0S8;|1\*t㼞J|Mmy}Ԩq@L%rz[LR{@9l%^*R*{I-T;H=qOEG8f=`'g%ed-VM{6"T¼'oiKh*滺ʮ.$wAp|h{NWb*a;HX죭s8Uw$n 8#?$S4Etw&!IR=G9w ÌqГmZ_@Xv>Ң8ry-^' s;8')"|65iPJɔ[{7j~/LX$qmg9Hըԩp4噫kD2$u8lTrS+.&Ϻ*WXR֮+q('?z4'0t)+XpaqH[#k>jV-TG?ҺJO,W3{O8 w~WWdK'ν۞{h{ c(+>Cc“MPbGzَ}ţV e^erlc?>gĉd|BG^I6x  R)Oʔ N:Dwd (2FB$QZ̡@HN~uVd}+) 5 ޾$W`r*}(X'oLU"Y2q°F `q}'k#>u' ^4/"iJ^qY[NB~T^ylϭBn2}5Fe0&)i|h)yqUԚ{UDlV_ YYN-2UjWyB% 9tE-Ve0ǽt(&Mj/R_eY;L~[QXRZ#רѩ3wOX4߽FxBf,]/n:ˤ5?ZqV2z ?Zcvtv8b4ж-$'  ɷ(m!l[la$䚎M5lFr!j#+ l& bߊlyd$wֱ:BI*O˞*ɻW!72ԙ)*mAVVtCc֬;HÍLA䑵9ЃcʰK,MFvDMh%Iv⒕.mɳbS(t=Lm ީ渕- U 5WQ?M v=0I8T;%#4kq.K7>jGګAwN-̏iq{%21JQOi~IMWgZrm"3LŒYPR*i߃qh#8; zqW fJC&1*D{uxcȊoolnS1cZ~"HZbܺ9WrI6mV"ȌSFJ>Bީqu՚nZggf|'V|xW_ So?ԗKvIHary W4${ˋrRT0_9O^1M_|f :2W7-<* cpk/q\=HK$V௦)y>Ӭ/gT1SO(ᄍ* {rV \r:BB9wCkԇvo-!* C@$}kOԋqC%(fc'JDԯy& ^iû8#΄5,QjCR)/KǡJj e _ܴh@:cmN ͼ<@RV4N\lMy!^gW~mA%nnV *և8YחC=hj2!$~yY^HҰ-b4%%D$Yb㳒a@ ɝ[=54^kA@ʲ<ӻ ÁSZ]-r(m-a8`RomCRQWEr ,0*@eCq*⥕mPN>U9,":ڧK1R3kYϭzcQ FKJ**zݐzճl JPNqI}j+#:QI[WE0yJ#>Qg,RtB)ljoCO{LNiTr9OΥ~3-Ȳ[# 9><'O~dS gf5 qN9_^*Хk.MWݠƊjI<|EUY),+xyG;VhNoۮ36) Ό@#ך z!>ƟI܈PZ鴒}(-C209"|)CizzSÉ\kkS>S9T/VAђ6{}(s!I>i*9:ni ۬ G es$ 8?J {Nߠ[jΘʒ}@TZLEgh]m A]r8Ҳ%jPǥBRe-Mm!o%h }ӑxsE= 70mQ9 W?sUI ߪ..[Am8$89Tpb 6K/6eq*5oe1oQ!)jiom ^;7XƚA(مQ ۭ/:HP5?Dؼ!f\+ڤ 7oWBr6'td5}#0YVJO@HQ3[6.cٟ+mr:2x-Z`xr}){-PcDvm ;\rT<4~[xYĤRyd4>4ʟ4˘T>UlYhTPS~$SM&U9! m]@<,Ɛd`q:AڔVV!*$sʚFՀ +TjYy »Hxy{ &xWv λ؆T@(g &zٺǸy^P:Uq)t8G΂`ps[A$تHWCJ )$yR8D6AGڒNqɥ0IP b1Ma l=CG)$_d+)|*i||vfJ|=?q_`q+'v| Vp+9P|jzַP*ƌ}E+ڷ5 Pds!9O !eGBS$gޟhmm-wMSh C>-!vZ/8Ka@!;Xu\"DDJH ZP*ٮWFR\}J0sTOO L`)')[+oCڢ(nm]b5oUym;9.)~bz~ޗvƒGv4$he'reK-= {PIZq#UR{`)[,.ZBi,y{w!.8q}L͝#w杒)vBPPsYUi[|j z~)>5m ѮTej*(P*Ld%e!d6H'488prHVkt=JKql)]Xu=H}/*$Bvn)R*F鴿7M-F8DV}3OnC%Hu*Vȥeq4Œ\LBm zjlxjRA.>BVHϹ'|Yz\@[Ck'B̼+ ejESI)_CS`1EEW=?4a}(u*4I$9er| db.-*^S-a@P2s’| "|w4NJy$"\IJI|Ia/-$JXLV6(u!#cQJy\vK(onNQi82[O 2Z9țޠUѤ! >7q?08P'NŚ"zb~X MGucn͸i@%jQaXXxAj W> ul׾tWn0f7++!jzۃ'Ǖp•fRq8zn< АILE RhgfMAcH "<2me[c9ʠrNx;(K-YYCIs@\PoSAVԩed ǧA@[jvDƕ5cWmC2>*=iaqmaiSiP,'ѥ>?9G?}EeTڐְ`ԝSjq/PXDVb vPqJF<#*jfovBßO@mv4>R[+0(G mtZ_YQ+%M1Т?c3RZ/F[#S!R[hm$?g)c u7VPR+ h>C>T?mӚ}#oxt9%C|ofW#MI ?OZ~=W76Uvo.GfuCw}@^×{ݜ0W(ei`G{Y2VD.i9vIROc "u5IBpa8 ߞ,͖ (i 1lg&X}E/$wDDQ`Y1N.%!9njIN lڜ(%Ӎ𮙩aEIʸێ{o>pJ}} Srpl#%#)OATqTm/%)Pvz#!忇o*>t;w82A wDiet_vT$Ԁt+΢yg:I<@<8F5YœtRA)'ۧܩ!\!9〈)py/ JWAP:JU.\[3Dϕ7ь8Q:g/=ܮ Djl+@7qKx]ˑg[ZEu+RBWK٪vC%K)J-V[7[--VoquUִpZ*TUkZ“lJ"RL̅GwgS:I.'].; s2(YN9a ڊ !XykSOh~x猓БqZgIBҨA9sy\ͫabr!DEj2۳^><IVv'$_gi;J&)ω8 "[uv7Imej ^4< Bl+rG\F̯Զ GAO*sWeU:~Tl )YS!0;Nl)OPi4)D,V_D*U'"JBiDiTTzR֨8b Q<I7599p$8JxX5 >͸Jsk]|XܯZh6VD NpMPHk@VP#&xZqZxT BGU~ZsZUx!+#TJ2)1'4㘲v{g#[_}hqR62ɸu$(ljk:@O ]e$(NmjJ3F mփ/t&sHi'2)lM2ㅤ)YRZ*< ^Z( f56ܕmm(!Ңo@}$Eja8(Q}g懴6+MC1yH=%JIoZWhQ$1SRhj !@dW29 GJv/&^Jb)( :M2rSJhjŴAI] ڱGJvSJ/jxGRjB w]hih:r#Qo0߈[U]p;:Tfչ-[mi懏%wiAyJBALYlr(y;cԜ#Pnݞ#cJxzCD6`g}ioHZdȐqI5+r9<.li.8*O#`ƞ Q!GbR|'RӭE2w~a&dP m'Q#qhJBy5p›(0 ۛ\SXC*Jx9HċEhȓ 6p)ڳ2rZ4Cm+␽R)j >H;&RYP֨%+8M,Emjҫg)gzR`3MV$g_5M${ Mag\7]e7TEarCIܤ (MWCC_?ji[icsUlךf\ |Id}*Z"͍k{5@)cu5LwOp75 =<-1net~3tW]Y P9Up$۬$G[,W7LN>^{j㾮2Ԝ:}j1%n;Q:m-*˒t<zRDrkn |ao"U !^U2ߤ+􂔶-zԍoil3zkvryqF3UYMc{FRx[\ˢn*\]͸*qE:tӭUڝFw&Z2R=hv짌Ķ;rMKjĔ)ITMx9YDMڵiR+O rE{#=}ߡ#ٮmڐO'wdLŗcަ(ɔߚA隶}!k2ZbJ J*%${$n7zށO3L+~x95b*Ȳ= %c%:Ed:UEJ$iYAZDrjdFTFGZgZdV7WJD2]m]gFV)l][;:.%(G+ @}j7ƙb8|}F8qO/e3Qls]eP@eBZUQv$Юvn\&w-Q+6dμ'/AbVzJΫZ6ڈ+WR*LJp9qUVquiF)MZ{4PCKlJzS@;<)Oj P'vzgʔmz-3#s ?AI>K +;GSNPE(P;qqoG.u""ӔRlѲR<4Eft0U[MG(a:K _T9GBT@FP?Evz;RrGSY1Ҧ֞  kwmLmrRQ(\*Q?T(A`6\dOG&䫕|Y>u_$Ud ,!Bװ g%dUys^K cjw$ѫI3F'yn\0mj>jR5^O+TS5XJ'\ny喏ˏ:ދ>-ʖꫲ~vFqjڑk=j<,[o3'gYVfYo29M[MjK -_n އi{LF<f"lKDH#O4 rzHG{rI3-MnkgFO/}O(赒R>e"$7+Az<|),4-5 v9a# _UGlNhȷ[SkpJh ֝ =#PVd QMl QYDtw'=2Vw u$Q$L{{NlTws֯#F볫tUNYpW[ְsZ*\]p􎂄MUBo֮םB{R imƣ+ӪP#*ZiV+Qߚ /ٟ`Y9C+Y!е"v=l5BBmK.~9 @syJ`|1k#JbW̩`s\79_>3M~XN #d&F0lwߪ uhdʦH4weuW59VH3t {;Z[ #|-8r,TZ1j2/o(߁GR.khTn<Pu_eCm*ϸeэklG1¡+IR5\}E Ml ڄu6AWve0K.qRw篖quf۷T_iY]IyOdƢrnw o#6jYpKD.L*?eKz1:3hߘSLRHe_}?4AFًJ,wZ&+>y0T^fǿcӖˊ;rDEe5<6Fq,H,7gI| !K-ԥ^2Ofy$(ol솹yl+`>`vGK#zN۷zjs]u vM9]ݒrlo"f1sP):sNxr=XuybIeT[QAҲ53}6~" 4iVJ*Y>&y*֤+,,[U{/1.X]EyQd*npckAfu ׯ P][1uΓuO&O^'?S@, ~XR3JљO߳}ε-_ڹO3R]PICkDPo&ղZ ;_詣",*Ψh}Gve_S&ј8 GH~=0fVe2J]lv5 4 nt׮ ="gh嫤:Rث]dO[5]W6"k2\1=w*xLó!v/y,ޜ(MW&y]Clk-T.yaڙSWKf槖hR%&L%siڌ$ ̗MQ)jZEjm0%ʌi HOeo>W U&\*LPz4, 9ESȅ3eYGPP+F{3Hse5iAެ^:&)SN%<{Y|; OF%%V%$컪 uo!m-ѪO,3++mqnߚR#O{3&( Z4(LC@i iq\{X'ֱ 3~Q~[UU#V"4Vdj7=\n]UJ"4 Z~2fP-DdK+*ĭr.P: i(L#=MhUuICQQneK΂=^Soag*Š2h Dkp?F YƸY!.OzPˢ^=%5e!QGphXϯff,`fH(WU6L8Xi4 %Ic ([QTTG^jҵQJcpId7̔6)f__M%>(5@? 4xacG[eYPq'P_%z" " lѵ ׵ a[0hnz h:>ұѻ$Q_{=Tv$a@CZ^F!ӧiJ+l]Kw+mYaU+ /`s>S9%/+o-9wn:y1E {xHѣ:4HGϊ0lh SԈ >Y\+G3 `9QbJK*)y8`jmhi:D`rM͕0*/ܼ-q{C fϽ*>$}ZUrQ6 8FVJRjdcC0oN.Pf~M'c|"BS Us8B.R<0XnnCBKm=5G?y=ҋ:O'(z@Ba ۣBoS>`2ƓmPd*6kk'滳6RqkD[[fJU}͜tײK&ڛLa]v')Հm(Rߍ"#І#r kǨL6Y–};Xgۮ:\j d&"+xzstH*:e,B 1_VC"\ͰQԷ)7S naN$ߌ!+!"FpzNjsk8PQ˗*A$Vi:*bmu AnaApTNY^ >(=@~^S̸Y/N< gI#(Kd-@ys5s 5̜ݷvF}7ºxOo}^Sф<'V&*_@Q맫N0kuw*?ņ<Fa>ÿpi؝]?(L&;/@A%S2maU8)eͱں'IX_9zHp_PJIfL#iX:M&5\md5PkF,tSI* T?j"*qRqV/fyN̕ ;B^!+,;~Qs +'KȤ_A՛\}+B!lky ֓8Co[!BoUj1.ZYjSB7!BEPBajA!ԂB!L-!BZB!0 BS B!B!L-!ԂB!!BS B!0 BajA!ԂB!L-!BZB!!BS B!BajA!ԂB!!BZB!0 BajA!B!L-!BZB!!BS B!0 BajA!ԂB!L-!BZB!0 BS B!B!L-!ԂB!!B2eoYr+zk6odكfG qTX6\Mѿ-EC[Mkoegp)bVnTbt._BW~v.k5^-&撿gqlysR ]&N'NUM|2ł:VVæE#@,S$Ȋ:W[LآAe#`~AQM ֖ނhȾ3}ng=tɨLU=N!^eBi.6YZz`$^j Ѷ U'_Cs(on_r MX78ϛm w-k7pkkni|]'Nu)`z1&;Oi+*H^SE9{Gf<Tⲿ9|&Id*.E7CD7H|w+_ZպʛJ'K( W8lcEh#\#-@4 ɌrJ-^Mw/ަ!ݼ#o;W}-?l 21)b_6J8~g{+tHXsW!DfwƓDd;>Y?~r-i!Z?X~;\1iA! 8~apH|PA)S?lSVmb@]cȊr.O*y꣚kDuԅe޶ſ!O.(>!qתUR|i,S2ʚ[5%[J=\*uUwX ڮks:CGVl>QWTKV9\ =]a|%Ԟݪva:Jy(„HP*tR>\.f602Ĝ[R%̋ͧrl]7U>lխoe8Ĕs€I1r`1kBB98jNV^rNTP%io1ZEEr[Uʺ*ܱeҴfG=^,и3B )Ȉlj>L-IVG,4S (ӵZ0@Y x_HP4nD ˢ)(;n*&IJ-QN[ ˨ c ([QWXSa[κ:> MyCtY\!\aag%u[S6;w=ܳZ4f==:ti<B2;OՕ57u`e(fGzQXu%[Ews<[{u?c2^Zr3}2/x a)rկO5r1J)sMZ,SQ4$M)]wkѵ"1 _*9jks7(>2&㠰W$֔.[!x-rO01'BJ6*=(q)~,w2h h<BJR`d_ 4,v,w:k[s/^Zsg۶Zc?KGv:e,ßfŗy XW=F|)d6"V6X'/(3)5&"9Q@bט;_hJK`EǮ+D§^+N;xCԲ efgbQnCI, SQ004Jr6Z֯7ߖͭ_|zsmA2Y6&t>!Ƃē !ЫI-P|[[6զTqjmmX\ &@Z8˪J]}̬lY\$ȕ%j Fk1kL(X{jҢPO)m2upm\g[[Y*m-dy[w^faBVtS^p"(.6̼5dnJL`1"ʅP*Rn6 {Tj^#;t2#+hA6T-/O=| 4%M|*x0ψQ/A;.m^t'b{%߽߲ie]hgC_M}ԝUgǣ+'wTUfW# z]5,A6s{s):zhErU^DD܏4\lb5;-ޛKdgf[}8 8oN;_'_-2ɱ|. yq#=kώ yn_8l҆n<Pu_e5\u6Ӛr6͋c!zkR܄/΁ lG|z9Oinu3yPǭc+o8[ʎiXiVLXXqi+ w|~fg1Ͽx[a{s]P&o$X8g) %C6!zRNǷ/ |TWsOQ_~fR-M9_:Z<B!L-`[;rο4aҸ(N/yйUO6LE,!N_dȋh:CB!{ZA]1IVV(9qt:%!jgF M: 4Z .*0rv~ޝuX ky/&YyNaxBZă !w"͓Iң d?Zj{Npqr40 \9rBwww N>?mݵ{q!a`of, 㩥2W_[>IFxH!BkeU{.oK|fsfPDj_5?ٍ7W1ĝ4WKȀGJ(Q~9si&PUY׶^[C+#5 IDATn3LlzMUy©ulpѝ!j]s?gB!ԂB!!BZB!0 BajA!B!L-70#B!@ɳ c5o7429trBFf<B!L-/FϘ=Z ?Z>j;e}fP~or$*iĂ{\E|LZP.+ XNN0;+5XT%'] b’uj>޺)1FfciT-e{]ҴMY+&,,ܸ!78(gE˵.^p(z!nAsOfLF|6.&Y ]&N'NUmpB!Zk o^R]uꑻ&Hm:uFYu?y ޟUr|9va_yJw8[LTxx BobAmY:MvDߩuʲKjƷvw8iB,5%@Y6N/y5L70$"!7p?t/ϻ\T[uRfNX=:c!k!zXjn_sB+G& ))-}KZYY$9w,66a$|!(Y޾usbxdy[w^f (Lϲ &H##B5)7},JsͨՐ[tiyaI;lVo"g4*RRV[]k}\ >3]@AӓWϥ+BOW~B!Лu`[[uk7m&~B5$BḌF:qXEK#B!⑏/^A!гN^BajA!ԂB!L-!Bo"b-G*:eܧ75y=YO||*5bfEaa!z{m-yE/?ѾAn~l_ 0-ڬ31ӀqW>zgB!L-!BmeAxĻ.6 $?.o ^>.֎][x7j]k`䊰^,(ɼpaYX1WnQ|I)kBܐ^&{HT5' p 0~xʊ&'(8j,'l5MlZ?|;uaO{KVux7oT6"#A9}tcT l<1Q#2٥{0B!O-A|t@ቕ;Vܭe#2 f.i^8x5E/(L2܊ѩEf÷U>Q>&6}\P[VCM^㠜/o9b/a%#5 " h~>zMұg) %C6~a/͍"V\i&":J^YzR'/R9_^n[ΞN5CwGҠ+fx.BZBu o>Ⱥy*ҝK:ݐ)^<$'F'9c'˜%bK/}>!NX@iR@#,Rs$?tM\KK?9FO fZ+Ფ{sn=lk ݐgfU3m:$+|4JDz{ŹX!L-v6vDgҌqeJG|HhxQe8iRkyEjI)ZSYIKG6t 'Oyu"CR'/2Ղ֨3$-*.mbCLm+$S r.x`U !ciT|ѻb<}?Qrtb) '=DVyɝR5-!sEEyt0`:^R^D;5;veVNqxu* b䷱g, x5.hy_9< n!YEAQ`P LKKNY)$SRZ`|.U7R!US B5LCɽ揝5ҽ{Y3H*sDA#{ZO*гٛ V$,,\6 >RÐE],w7Ӯ2ւ=o݋, mF|3cQO-ioNZA5Hfg=%;[llNJ,m,l_=g Q{sǘW*jn+g~tE'(Q[;9qpЉlR >1B(Lr`ۡ;E`7zwɻ --ڗ؂ ={O4E++ʼnǷnTFMS~r} CDiV_eD̻OYeL=<{ӉM K_urq7?\LMs' )3dRa!Aثe/ZB CG_ѕ)KPJںÒ˯}?C欟BцZtԭcmym#+Nt&GKX&Yn5ǽfV}ӕi=Eg M.?ŻKd+M< 4do䃭4oܯXz"iԀ{Xs%k[T`9֑skISf/T؅F~sK6T@_ֵu[/gQz^hwMdf'K/l=t TY7KZL*ZXIᅙ~|4(FыGn&멇k,۹0Ekrm=ҁt.Azt4qO/ [4;L-#@ķ?%JQ_^h.B6mT+9]ixnpYWJMJvv!NkT1 q# -8&pwº5U)40wqfqfrBFcOB)Ӛ (~b"4Uf>Le#w[|joO/^IOTq3ʼn|322Sn\]aPkZ2hLeI5 Y]WBI d O5s5cjA=S*U#SWWVʺqTj** Q%xVyXu՟$|-9nO[Jf 8wKU4/hϳ'\3x+&PpJ>ϋ~rC*'AIGr8ZjS Bٯ]inlϹ1ƥNYBe`AS$B){~rfT^׆Z)CuIVŬgN!ҿG)r`qxPT4q񱡒WJ`a͂*0ON{u0yv|Vͭ^ ͭx,MEI+ M_8aK/ϕVs;>-2bLE @:z -NB/_\5߿vDؒ =9q;_"hc9./yТWx, x,°% VNBBv;;B7/Qam_:NdßFU+;rg_@.3]: ͞t5O!ǫ{Sʂ.= gkxhXN0V|?˞dzq +U^Q\AXn @n ǯN=n _wnmwbmQg=7sZ~A&͟҉ _gƍ?N 2{uGQ4m s3^x-^AF:L9P4P:5{H4=I^>>Eq%7zuԔBOh u•; k\[ u9ځ$H!! kkKPrGւz7};[:_b`9a,5xܿ\3ǝ wOiѣk}+NW'vUffOrmm 0aGݮⱊJJUKx"'Ga͵=jQd;..5%rހ:`'kF):%A4O8cʔ҇o<.TImtѓ^| N{U?[5*zxdˎF7s=2uPѰ=}y^ŻkI'b/ߘ|z&ߞh}AK%(vx0_S k.&-S*67 63\b\п tjW|{`BjbnR1 o-z Πz[ƅ~t;ݚXA%^KQN֑1ØǓ]հ<ւWm-vҒ)\l9,MeVa1L-ajA$c BS B!BajA!ԂB!!BZB!0 FuK>ڰo,0 z~Ya 5L׀n'z/gDzEaҕݩ`mnY`|ʷ_[!_/x`7yL6OLB-MEzJsY.'~^0Z7 {_0p!`{Ʈ]8mSv53IV}qW?YnjYxBZBGumFda|d,<Ɓ߮yvwӬ 'J4U+fx;`eh(ycߖJJJWX"h&ޒ> mDnE/3BIB@BQZ7F!HKe&d[urke@c!#0 Z&ҵ>zɛjn6.=QjCzJ_HvX;u]BHPP-m|ۘci"0 ^g^ނo k;rfOw=hW7S{* ݅v݆L5 !bCz>=v$dSݿڊO)zea-[O4==m`/X0)\wBj1K&ooo8!"BajA!ԂB!L-!BZB!0 BS B!B!޼y3BMx_u)kեu ;tú|8P~l;ΠdvS+AK4b:qz߂4p|ǧ36?g6p;$l9çoAL8`Cެ6N?!F=mbzMzPZ:VF}_Z˅Ygᾢ22O-ģx=d| UTyV!!.s|2C&YoQG˩Ca;r%g'Qׯ=Ub4xLoGL6 mĶ-7xϜ 4'mJk+gTkiYҶLYo/q&3lH^qfݾ@uIX;̜doD ॰D!_UNJ5?zW`d#qv#0Y@]{YU0!z% *E-/jޭKSyKi_O7r,;c;ΨLl:4WӦ}pQ WQX0$ˑ {-*|Ϡ*I<)IrRw}d[~xq@ =FheThV˥~n0*]JZsRЫau]#m[i?7Ы]~-RsIJ2hs?:Pr"3IDATe^ﳉu@Nco.-ٙV_h@L7 JsnA;gW;Rb,N_3)U73r»] ne_+`)jQȆL&­r HJ \Ϋx4n~fFZeNփMg2 > WP9B XzȠԵ Y/<*meOՎlvb9~-"BLMN#-Z^xԺn~ 8TNhמwuYyV#E.%_KL}Ƭ÷&}OcY |.;~ z_Y`bl^kSMM7b1`ɡOBLZ|~u1ʹT'~LĶ6l9}IF$K2t)禀O69Oi+*H^SE9{Gf<,{kA~s%+䗮d ?o IShkFPAvn@#r>gFKinu3yf[1n%;ǐf#N떲~ڽ.ViڦǬn\zv> |u'@5F{RjTFNeS_v?-Z6ւiۮs[5le,HsB\n·#CQ6﨎rkc#&iܱC=Jů8.du Q}C]ya1剥M,ݧ{G~'\ :v1*D}#OagI~kJWV& 3X5zSN{ꋏ*o߉YZ ﯨI]#mڴMDP(@qve4X}i~|Ջnu#% ]lwD_πi'/;4fB>֊O5Sʴ:e-S2^dmۉ[2c"}^+=|,,)=jV}!3o]_4@=vm9O]ةQ%PSY+we5T+O{GѠAQJ irK6IJUYےtoO՗n6LZvL@$+=E\i. |q5ﳅu[qQP?k?GIJ_phYz[_8(.瓂uir?iZk2$% Bo}[ 8YwHy4 l6@S]&«mDP`c\tcfy0}dJKc@Wt@-bt"2#d9U2Pq[;N8f<0;<0V \]..[іZzY[%\3c6W|;3w~>[Zxެ4^}&s75CJ.ʺcPUPkZ̆@hUK\l%B4 -JTZtu_;H;ޕ~d6QF _~za*-(ɚ0j b.6֦ U%&"LF !$1UztAY㯉cҵM/Kꛥt7jm*m5jrRzg%*fr<Ш^"X.ϥЄ^) 7C1g|oֺF]V~rYUZzַFwX|d՟yܨg}(U.WSIhLTZ*Q`0 NQK**tnGG7fV\<_зm]fM*.fnV^֠qe}Iz_k+J Sޞk{4AHFq;?ﲇ6#m 93.T_[hw6S~{Ƕ&Øe.W;9 BAb&Bq{^~٦C|}iP[^Q%d{t,{>yWRWwInӫCTd{;h5*>lR;fї_۴sڸ"s7-o(Jj QvO+Qp9#Rog|2!_V m?i|)Kpa-?}Kv};OW<)y888¿/wuP[=e3أ+^jd't-ٹirp1VT?.Eg.u㜕 c\ͪ-'OB>줶N>f`!̗SlJ-ݘm@ݺ,p?kMGN_~+_NYvB1E.b"p[Մ:8k Wkk)ܰ;mK#喒ԗ>=-,ҦۘPKr>InS?6]ITH^M^k{M==cӝ_wO+ʪ\4M1s҄;ȴxG_Y???>\b3ܰhFCµVikoK p,=1 H}ʪ_{Vrn|<iZnᤢ]tonD"Z2P5Cs;<4ؔ'3הHQtfW}ɦ^rH q[G s&~c8Rj8QwH]bK{65ϡٹf_b::ݤ}]G{xM1mvnqTGCiC!jÂ5Sۅ?ǤB~\nQ;ngq<_/y:R *n{ fٹmRf{d6}gvNޠuիaX}xy;2!ĒϾw}/fit &RͪyE٭ڄwl:m"tMxfXd6NDaR !|[_ε\Nk'["l[>B<\/]wfN_~VW9>wӲ 741ˢLN~cN-D"=IR7F^!@jp|"aҗ]ܽz {N_sРPCBm"dz<#_2/]Q)!m}q h#4p]_^/De0sGW^~q­7jJ;BNRTґg?u܈~\},92kLT[ok4LWnm-8eNqtv?uxMƦg&z#jgu WY~8rsګZzONyڒ? F*@$2y«+]ZR^R٢)<]Ip⧴ PH->FS-61c_y?7pw! =V}Y-=hk"=D@QA%xPH-=|||>h矟ZPP@4ȑO'PH-w"B1B 7`슟mYnrUVkZ!DHHɓڴiSYYyזзo߸bÉ'_;}]}r+@jв8z+,O0kOn|9V !i4R\`ANN z}.1jZժT9s^KIIYly``iӴڢ76MKN>ݳg^=}+Wr&4hݻM&VL0 Z)l{>gLS}`n+N$lyVOR-^^BX˻v}XR}?jjj4Mtj4hPjj;6m[nM6[/**5XB_:Th^R<߄&RK@ŋR_Vjjj33zn*++avvW ?4.]q9&U Z:_: *'״_|ޓŢ$Ig,UUCZh_1x1iXw#gj*>;R׮]Je於\///Jemni&0//ٚj!B?Knn^``@À3gN7҅-#4ˡos Pػtk pD߿`0̞bPPÂǎ'O}ٳnҲ:::Z 1tPooѣ|K̛]9;;S @^ZZf?r ?BS~}vǶ|^~Zpܔa ?h؉c2RRNFEE7`LvѪ*tBZ>}ƌy}wNLL#Ǝ}cIIIB"V;j(ww5k֤M?`@+Wrϝ;ǺZ&?Lu>NGw_Z*lcȍkR {Zy-@jR ڸ@VBpc3x0CH-@j Z@jR ZH-@j R H-R ZH-@j R ZH-@j Z@jR ZH-@j R H-R ZH-,(ڤIENDB`pydantic-1.10.14/docs/img/vs_code_02.png000066400000000000000000000606251455251250200176400ustar00rootroot00000000000000PNG  IHDRWisBITOtEXtSoftwareShutterc IDATxg@SWBa@ 8Pj{㪣Ϊn닻Qq*Z(*ٛ@H(uZߧs=''pvv&\-0 05qh7ΫBI+;| gc{|ʼn0,]k;tδn׭J>v{bT5id yz4bse^zr }ǶמZC*Nz9k؍oR\}qۗl}jĮKaCnuO4KW>K>=aĪSQطW[5{ Q07z^uzHt:1˷Wc'O+dfR773|D.^|`wkmYgfʼT~'x(|->qoszNv'w:x6V%?iv'ԮٻO~Tʴn}N+RN/S/}!vpХCg MjkA:ist7ڧYʣm=g5b ;UcZZ&T&w6DdesQ q7lo {ȬҤ/ K׃2?x.*&>===>Ҏ;X=~̚ ^wЃsL&sO'o''<8[؟'OhjweZ~Ѯ3'm 2]ϟxP&"-u׭:~Ϥ3FtyӴzew8Êo[Xt S&ޜQ6D^9l* @$=2"}EY'^mA.86fQs.r'N*lh"3Z,f Gf*߮ ջK?-041xkzuFZ}'Sϯ^U:|Fӷ|6чPu0pS3wF\3'Ȫ;컗U)MZ|>dRQEEiEEEE RJ 3Rn Q!Ug(++=PB͵rafƾY~~'WBNUI|r ݇,W HeS۶D Ӿ>km+Ѕ&niAFȶ6[z=1===-&<m-s\9Tf=z.[0}eDFDq?(|ӂ×G61q;mgj+}ttt8ظK%ʈ+PUeD\g3%zP6ḰC1Ϯ/+;6rq]ݑ!To-K?0SoVc?}wlcPU48sdnnm6.ˊ*?zgґ-4'r25wkT+Wnٻq=Gj>j?eNz^ڸ:P9n-})dy> 1?VO}2`ssaȼF>3xwwNu8][6\ c˲FDׇqpٰ/iޱs ou ] C߿^'WrYGzj[3+Q0bzDUGc*H]64tًKc~?.,=* " h?L`3Gz*66C6>9yocdRsjZS(:RPuaڹYz`*~6zFQ1_1ϿNk5Ñ5zN60)svjRRd/.-HI3:]'3>,Q%YUί_5qh7륯e_تTx./9'*|ϽϬQ:*&DUDU@T@TDUDUDU@TxXhBFx UDUDU@T@TDUDUDU@T@TDUDUDUK)hJW>R wYp8<s;pVT*U*'Tg̩Dddd>:#gp쟊+++Xvۚ 2`0,,,R)|r{sE'DAջLi/޽T՘zcVzYzG*$"~;uԷ{ƍU;655544_<|}};wtR|>(#ͨ(xXOՊR=8|\?m˯]#Gl~3nx@KDlK9բw;f㡤kAևkTe[4Aim9#|w>HTeX?U>SΝ;oMzdՔ}9g_a>dߤ9őbN%"3k6OrR>镬((x*?:ԃ/DUb}C "##qf$ھGm\T9*iBSIjj!~1~&|"/kÉHdݣ`y]}~V75lz _/ n1z/DzyoA'Fçn_N~cO?_LY3r5͌m]R~wD;`ٜNf|Gk+Z# 3G7sI$Zj1@g?f͚ED oҥ?`XhѢÇD"6s̙D"bٝ;wnذ!2>}Q}uttT({쩨 9sXYY>|899A@@@͚5rٳgdvرYf'!!aǎ iӦ}gϞ[n.]lGoSYTz:wlnnNDqqq755۷oR$"gg={ۋ3g<|p={lذFH$UQVVVy,)w0^\~ȉvsyɐkq "D=вٜ= #:~U5HN;gl w|\ƦȬ9Gr;~hw6*Z~=t݄!N-t0}wWFD:eSvڎہ*k Z4gbY^+m`0pwq+l~cUTMsQn釠/!>Vj\._~999666 իW_|V;vUrss;vqF_Wibb7\~=88喔D""ڽ{\.oٲ-Zhb+\\\QPPW_EEE|>X>?v𰰰 J͛\n߾}{mH#Fjk֬QTUR*6m2113fLqqmۈh̘1-[ww#F[.++[n[nU(ڵsss#"X\\bŊիW |8f԰enmxdSԲxs'"9%7-K'R܊.ā8&~^ځYu]K-rzzr1o YSItQݭ-ukio*RRƆYPPF[s>`Th4j}}}srr<<>>lԬ׷̙3+;;BCC۵kgmmfW} //͛9rZyq=3f̣G^,juII ]|yРADdggh"Cg>$3aTehAtUyGúhv6FA@J sR.f]g+^]:A@kVթx:V/\3/lW&j{ IQ 8z*nMEgd?S?TTj:2ѽ{vo߾7 DP(ݻw=3WTL&~ͮ I$V;o}8qD(ܹsN"""k׮vZ"qFΝe2YnnErrr*p,~rA7.S-.lyv=u wtٕV>aY"C kf9GDc'd~RvE\kS+s؝#q'&L0<* _7~M<ƍĽ  ߳⼶vBBBzݢEM6QDDD=nݺUu-Z8;;K$~':v8h ss󒒒QdӦMݻwoѢF yiK^.[PPvZV{IF3p@ss󼼼UVUn9xue߹sg߾}̙SRRrnL&۲eK=ڴiSTT믿_~ m߾W^&L0˺u4 |X6֫ J޽L$:nZOV.[lzj8wԿں\ٍ1b999|͍>cjUˍ]vmܸqFFƮ]7a֭[͚5LX,>vr*'DTx0jOҗW>7Wv*  *qB*  **   **   **  **{bffvOJfvf?j 80 Q8_;%U.Y"r߾}gϞWǍ ͛\-6.33ё`:tʔ):99edd$Ū$ed[GrUN.&OzvfJW.V5ؙ1$'o .kkkgddrd2dz6DU;::O޽E"شiSes=u˗7oޭ[7KKKDd\>eggg>_TTtҥ/;wCEEEtt}Z-K(/]TPhѢSN2 (oРA</??DԿMrۉ($$ӕ?ʕ+O>}U"jժUϟ߳gϦMiڈj"rrr2dHr/ޯ_?{{ܽ{P( YT*t6jhȐ!ϯ9Mhbb"O>]f4iҥK@QF1 oܸwf͚O8Z5lnr혙UhiGOx)ͻO͸bS3UɩDS^^⒗-e2իWz}zzϟ?cǎZjM0a:n,ٙ|ԩS._r m۶Xkkk\ND틏QưaôZ+W(88X*L4)%%%,,kĈwNLLT(>>> ؼysNNȑ#c|>~III˖-STݻw5jӴiR°ZYY̙3ieQQQu1DUOOOC'ݻwϜ9cmm=nܸsΙΜ9ҥK ˕H$SL9qĭ[:t0nܸsI&,ZxРA]؊1f̘䌌 ooaÆڵ+--/6mwnãZj%ƾ**4j.֬5wιg\tiüHOj8;879];zQ5aÆ5"5jj???Tz񢢢{lҰRJEEEJ\)SBBB 'Jrppj{QQQgΜi۶aaffL&KIIupp ֭[߸q#<<\" PZZz9+++W^^p EnnnjZVt%/YvmGD^^^JDr$999""ݝJJJBBB Efbsd2ى'ٓjg%ԟYRx8R4"""##ð#Cn,,,|/ O28yڪE̴GgY!$V(2ɺeRlvÆ /^P( diiGD"rGGǗeccr _qHѬ[. }/^6;wD-o߾}رc5bNNQrrsV^' $lbx͍***qRentbX$EEEHxL㓝]VVV㍪z͗k׬A$&""[-;8.O':֡ zaŠaWdċÆ+]ТEC$m֬Y͚5Ϟ=KDbN:UhQQK 7n\yy;w VPPɓbkתoee%H[n`޼y bԨQWKJJ AX,~Ç IDAT3f˖-666˖-3, k֬ѣ :^ܚgϞ/ë qj!⌌KV]T*{YɎb1ɴ2۷o߿_&}WQQQm?*sNռv_3 Lҳ6`jMOcƌ;y5znog[B0((߿@R5СCT":~aÌ.\p1NNN/j^痖HիW۷m۶ZnZzu>}:vo߾{n"ڴiS߾}Njll,Ν;GDVVV<0k<|088G֬YcK"zѮCx? iӦ 6$$$)|33'=z4::sn e şTB*|h@T@TDUDU@T@T@TO[Uxx{}{ xBp*Ha  **   **   G&'->>Q>U999h*{{RG QUUQp _zÔ&|[Gϱ8K*ۺ]>^xsI1=pŷ-8EowEl]N/qwoEa {=2/Ez=w2nN]x%,c—mk&LlD!zz/WIG\/6<ѫlS+ 9A f_:;k[ <@ 0yDRs/0em^Ҭ*/U(URxe&kqFm 1S­IyT}Ό>sM1M_WdbdvA#F9S+ל [O:#A#{7q%*L ;롨"uӯ'u9U$ڡ_[DDm cgPJ,zJl ꊔ.U%ʁ[=5WS0~Zq?O{z2JZa0mk߬=ey}޷m;+آ"TIGW2hĄi;lI33rҀCl +͇~3dh5{UX2V!ښ)&=i,3id}əD,;txE#%ok8xd[cٍhw\JNeP5{FuSgD ͳǕUoHpg壸^GN>fm]m+d؈~&1Oao᾽WK*Ukm&f),+MgDOMcC"TJ¤K0]݉W4$`ɦgz4y}OT(KT+E% 4KYTۑWķ'uYvԘU_>r%'+|n>˪ iu_OfHn[G&éO!!""\x@kҶM㒥ڗsd6[{B K"R<<РgwSTze S bT)Gg16k R~{k"*@T(YC7y}N%"RRii% }5 ut.Û'5'1TZRe~]e MGU4{U~;{qVZN.>DD66zy WTx~._44 tJ#c_mr&ԩ.ݚ3SmbMDNۙDu(x6m} Ll\(.*aF|>E~LAuoK6 J<]Sx%Zjmd֖ѣGiDݿYk2>Oo ۵[dtĮ0oM̧eV]no)tw7-~L;0w7xg֫|H+ٿ&fPVYAG1?ONDD77;c)}ڶKc3W^ݲ -f܈)~9u^Dϩ?mP TՉR>RgFDPfVmZe٣YD)/-ItC+7 ۫ A MYyn9xF5g "79 [3ћ kn8~ۏ5L\p]>ulwzYrHVybjB T͔cdkFQASvl⬉;Nuٲ*zqYo8lP`|QDIz`o'B*ۺUZW|ħMIĶ b{(h(2䨐}jV}kJ1̘E2[nŸǽ4B]]S"";v3}6JV'Iiƣ 6l8 qF|\+ːZt4H_&+0y:xң?LHte$u @HDZz7>. : hkClQ&YGf쿸0ԾBG\{[8ǂw|װf>"k>G֡YgQ{)xkL+",Fţ=,^&ڴ4W IĮ_l_یt6kn 8CG էo K(~ϩ`!47336Ҕeݿx˙ʳD-/¼ l93 Z=LGD.ainl"EAO>(""4s4f%Y?|J̚Ҭʪdu+e;SV,M4ʎMrՖEZcdW8 һ-駉A÷쮼snPXiQ֝iňhcȕ4^ X*yz܍[^H)%"۵˔~޶ꢸkoRE#&ZHt-7D A)"{"b ƙRQWWSㇶ4)-'S:"W_hY,9SO̸ÌV~Vp#hPn9s׳45e<7ΦӸ ٙpT#W1M--MD6s4ZYLl\Tu@cj܊ {&bO< G#U׵ &#,}O\G[oٷ͌o{ X3߆}\dj1CZk$d% zx9E'B %:::~Re۩d9)bЭŠ ̘D܆& la)HV7?qT]3&XPV`̊O+:vѽ~Euw/yZELr"˵c,L_,fڸޯ$p̪jqbG}#&˭;GɼGkXB}?Xw^1xf6߃4jZ[Ԧ܊.6r2 g(nw-,f.Eكqbzf Y5۪+s-P ZtuQf { Spfw?>_M?ȥS;j݋,?,6f:w6-\-r[ Q Og]l7wŴv^VqW.1N^^n&U?a5>./%[Ůݥ{MIm %CIdb)4"".OCH}z\26kjW_*, J aWoխyM$Ac>X3~ʻF$FoOZtbw/9o1ȮIf~X}QQ\Gėǜ]%F%?&=U9][7mc<xF1taEF-zu[s"8 Aܲ)',wmmB-Irzsfo }|_ w&zvf6/yƳ^JS^Z q"y\\E7R5zX,cShib rrdzbcjf+OVʈyi|t#:u:y22⁒7CN֩f3H ;؃*[jQBSлrѫb/eըr^ K[k4MLY[V"2:ذٯN/)ӓ1aXZ Mkض߆=ޝm#2w.KK"mqAVVaԪWlekBn=ar.dz6z*LI)%L 5Zز--Lz*|l%@TKNx(i3y误qgmOZC-*LL;u1YW)ЂMg5odvѱEEW’HQ)ëHW,iVBf<˹d"*;}ɰBmstŷ?P|S_,"2suzвN'Sb_Z[ߒٶ޵3K6[lI܌/'Kܰi׫ߗ5[N8_5R.)ҿ)Zz豈0K"/^<dEDb D߃?PH=],vRٗ-;зͶ5-,~vObDR,'e\X㪡 Y,3^LZ:ܣy9͘75KgKr"jDvcP\o""!WM{f΅A^ߘZi`ˊ'ÃgΒFrW= *lDz1rwsSFN)pLLNM4I8իZu5?Xo&4/0,Uۅ"5Ɗ{զoFb$ޑm VExu?s}?G,^“NJ,1=꘾ Uz EZ#RhҮ,VHKDL^ytJ&1Șb}|}4wO~.jX}Ui~XdgI{Ei/zLaty8& BW~J}tvrc,d Sr|ge'"4y9jQ¡ _}TrWτ,;Ḽn 4A7Dd{ӗҷn.})g|uý^Sj9m[TrIv?[JX:=rj7o͂[H!"qԕ&վw #6q|Fѥ[sMk~oȸ)IS{Y!|\kӊő<^AO7okoB GnT%"RK BuT}.Mї~-8_҂U6i2b _sd5P˲nn!d\;w oȐDS+7+&#wu:3]7U.aY{\K00 IDATLcdfwbQ`*{J4\0|cUݚ74)/(E'yԭCL"xxIS Om^5^ejͧrS)TZ767P'OeRiy[A[Cpcj wu`3HeXz8l>X"At)&_` {D-Of66Jꂼo#z_i[T]D3WYY?[JQ)zI t i)SHKT﹫|ieyA_P1V"mfK2wHRrqVmsTt-JSю̫>w.Ѧk:W3HOD agyjQ\TtEC[{#h0-ϷtpULE֯1eYen,MnbG n|+;kն.K -/cv\"R&<=ؾBjsߝW=z*6\i#ػz}ˆx4Lv1;foXu*1ϳA &=.-wK,Z5:Axy&YQUOɻfOyIal(\}rV˂(˻]6ifO~PG1?Od9w=KV)3"xp+MۚO!?*4/|>ѱ5ޑr,6hmۻ(T`KHM <+3v<Ͼ1I=\7__;O%mM,{K-MDo3`弄ʰM!69:pͽ|hn#p9q`v|ugoZ?kf/J?o5ka/"&å89&+`N~7žUMASuc{ ֻ59ꈈv/O=8TwI:y|AD兇~Iޜm r >n`L2^GCix}^ID=^ٻ"aކV1Sc"Z;qL2Uټnod""r%˕#bHΑukgF w K%T؁^2Ϥ*ζ|ߝ̞"0g͜D5_6$}I ;q ;i־ڻف~hlъ;V~sӪ2ձ/ڛ~(p&ջ%DtAKwGV My_z6MʧG=?Tk7_LĹ >|nR`}zpPM*XWo}Qq濿oȔ8I9il4 bUKX}Ft 7v>iYt+7_Hf۾Y\t;axj9%CZF}OQ_YM+J2mk6r@yƚߴn&=ʃDd};M|o\鏪XĸPaNJ9Fq/II=""7󶕑AlA@6`1}}-uSOƾ2Ѻ(!_r#uV.9kY/oRsk&5L|Yctcwe5XZ;([w@':նa^=D9*}lDtVw&I*}լw$> b ^}׭9ٚSXGM[5Bݿ۔gI􇤋uPF@_U~9nlbyPS#=: CjCvjU^,?;9Yѵ+s?/ +*bNNW,odOhBQrRwkGBK{뤾J iW·GDlDUY(% ^ ߐމcD}:<:KX79FQ$w\ڈ!mMsL_9)xS6I2r˳lsP֕ Dε%Ji5٪쪧)"G 7@D6a3)]꺝yuUh(Q!wx'hXFg8gW"57WvPkC *UxmQ=7S˝LfыʈU7r6B5Ƿ톍7&|gDdael33,7+JRa ǚ^'I;\fZ]1UHGy$) }A"2]gxU"XFF$'w&:kjHWA^T9JD,&Rr 6"rmbxK+>7G1'^Ӓ :9^WaA=haRo^4)IBM1%,N-6 "qrk/]l,qoOmK߼Woc>QqևTxpMԝћΩ%^StV}I*o`\t܍x_Q]Q^yʹ*Wl־Cr7_Iy?gg#Ztz82؃}gDSUē+<9oyiBM,*Px_[7SVi׈9Y/#"^ay_J\\tK5_벬gOuNr#dC\&YaF1UmH(w=F;;r_Kճ|ݜyK_,ݞ2~rJ\#8vq : /pp3-b̹//U&}7bę,k\VWsjb#5V$'Sh(n塟" ɐxd.޸2iDIT?v=]Ʋ$VsD5 `IRp aOfq&?KY(D":ܘݾKD2Fo Kw68Zaj~)GYeUؗhq#r筽NjnCb_9H!/ut롇c^BКf.$t) Ivq&DT^xhݪ+ UGUMt|va'3/i'o7rF7WJM+wV œ7T?rOMvnd:Tڴ$)D8?KRj]|Rkk>#fXX:gjiz:dA0'S*YE㻙$B@ .))36-Ee6bYC\]ߩ =./֬k+b%;Ϙ{Z*e8EDUx6E\"_QQUUQQvcYGdn''D[o  u}ئjkM]{3 &cj_}/*=_eYiQtjS*my㍸J7IZdKԇRw^[-zwj /FP{gW/^]ra>=خLy^s~""Nn*Qն^Y~Ԉ \~5Mwߠ3z`5eVUw5&˜8ŝevds&3f̟$a *EңQSߙVj[G<""Qr"I И,}I'0/RN9aR~ r*p*ܯldui%9_XXڐ.%$I aRS-*;S\;][$-O/-.ͻf6Rp9Zc'D7Ω{ZP@Tp𿟯>ϧ=tC{wwTZv5SiuȐ 㔪dT@T1+e c>L*4 g$n8DU?g ?X[wk2!*@dmBÃ|]qXBinJ6ͮjqBǸ#^^myJ:Rx>ⅿ fٌ )ZTCo(ܧOriQk,;wզ5%igؤ&%Z\Sq-'M֢).(_v%o_7/2Tc wXpppIIADiiR]~wO.l:c꿾| (DU@T@TDUDUp3> E@TKZQe' pWp}B@&N}A1( MwrUDU{ CM׽[Gˆ 3w\uzxK{m䰌apCmUwWPػ"}]x-՟OX{'4N|{h/\~K E]Oz`{\mZ/JI D$Ϟ鑂)kJʖ%z=O XtLȾmVqjaJDU ""[sY+QݟJ$'\+'zE4e?۠49t/j.џjlioO-ȩ83`v8;:H#'\d>U'lp&V퟽Q#ڈ!Y&L5>xli,-HdW݃ǂ{l3f>1Կ^ިۡgԜmDQSt%ՍF}f/E4ܶ:z{&kN"eU 1ķ_Hd Ga.6tG])yW6'm!Þ]\ԂU64wtPW_G"5%z+CQ=?V;:F٪TN>dBYឞU3泬bNmDU_mLm1cUl8]fl*r^pֵ-R64Y&*zğuy:{~rn@6/Or Urr29TRWr)CnR\¾x%S0aK^2={ |UY \>/F [^Qn!Hv=_aP_+yqf]3]m \Άr>:~>6Jbh3s !QF{4*L>cZ(ݑ͗llE?S+FuM-:{#_WKNdbӒ|FVuMxux:P;9r񐲡ɥ]D>Kg9ʝ/Xui!}.`L:a\-@*x5ns_ݿ wﶞg<-r*miUwMU.]AvmQ}:<~ۥ9=UQwBBOhSy%[l#]yŅ<'ɗ*vdҋ6KD|DA-eەk8zkGaXSz rl"ğw?bQ~ ڈGT'ґw3z'MEorw 7j݂[;v*-n6®.ow<3[m$vSj~6M ]'-8ېMrKG}]=,EO 9βB@آm|Pp M5luc>[$9F3' h+;l9lstP8qL=U{/< yWW/ Vr3&[,HE>7i4/] "uy?͠6VKOQRޭ Y->orl!YDF$){N?P'[w~2+JW>:vs=<-6|egL9ߎYKOv ײwrw+N|3g j=L;Kz0$TىUX U^P>0'u´*@T@TDUDUp¨*  **   **   **  *#\hIENDB`pydantic-1.10.14/docs/img/vs_code_03.png000066400000000000000000000655251455251250200176450ustar00rootroot00000000000000PNG  IHDRgq]sBITOtEXtSoftwareShutterc IDATxw@OH.@I@+=DpժZC:_[kkjkmvإjp C%G*Pyw=wB B VB!0 BaRA!t{_MҪ29ToU Z8;(#ĕ i֊}'if4[JRq.r,Qu]#&,_`W| 7* \P)Z+( v- A1rI`=Fi3?9f#U%4ϴ+or. }/utˋ|io~e5R+374|oX4q3t{ʽŏNzpaӊAOM1#w_3Pt^p6,~m|ٶ5yf;okN~zJ6aY݊mʎ:o?}Q =0޻~_2S\jC掚A-[J-Զ;5iqBOۮ7 W.V@+Y*rLxo/kyg=aS/ Y.m5+r\[)}&gE7WnL#zMZt\lǏ=B?4j5f;U\RYN@ߖ>RHy\vҟTu5RZZMtW⠼όnV|2BA:' B1vmWV4++-AdYThC#6cݚ: MtQ0v-bofw\Gp %:UV{E&heֆ*d_=bxǵa廉}cA'c:/U}dB4n'<;OG:i^odk؍eʴVZW/ͮq>0y)wGվ"[jsc'6EN7c_VyN]GJ+7+ [~QK"i-|Ŭ˶y"*h<_dž\K9DȘJwlZu=1ek(-*-M iw$H($IJ:{YV5k Gߵ%b X*ߗf{[rվ$`">Lgi*"\Y 2y;'F|ZWβNP6 * Aj*F:Q򥀋G폫j41<\in>:ezᄏTCf5W[nUvu^ni냴bMѕr > )=;DK9ъ[m m&K MqxAjuw1DYE`g!$i;IkK At*l脡bGpjG{p͜5ݫz'F;  D翈31҉{R܎TLb)3vdiY-jg]&x%' y#RS џ[c+Y]gWcgaqu?!^Ujs]<ülwpw,8ϕ!T1LM$v|}"h8)om+P=L`7Q;{'= `(TATR6 1;^`6bsPjah*?pc[DAq ucé^>?OY*}OkF>ϰʛ`3IU4qm d{ywև!t$R{my^$65~Rq>>)57ª)nlh) ɢ7ڿ) C  IfaKqӧD(ޢuݹ*h8T5'ٗ䦍fݑ5X?v`z$8Y _Mx$.֏-HN3[}ۨk4?$)N'=J,4Ї! ?/Υe {4*gAvbr:w_4Г.<+lrh1k\fH7߬BjP:I4.xt4C >2# mBP@˦2(x,@6M_0etu{;7PPoퟺj_w<|.8/^pxdW6eڍuAUw>"VXq1_%[vOi<*e*#ws^@|)_Y"L'|o <۪@7_r!Ūɓy7Ǫ/;pf;h]}b>VuԓO5i?@*e^@Zrw]v#\d\qok["OJ(ߥzƂEgO(aK뿔[7v+A &Um-LŴiTzqQZtTE,|)~s9Bt7^ Bdfu֛+~k'ֽv]^Yǯ`X܉+pB]Z`8{$Eq^qoB]=vZmZp!BuwB!?!BTB!I!B B!&BaRA!¤B!L*!BTB!0 B B!&BaRA!¤B!I!BTB!0 B B!&B!L*!¤B!I!BTB!0 BaRA!&B!L*!¤B!I!B B!0 BaRA!&B!L*!BTB!I!B B!0 BaRA!&BaRA!¤B*KoLYСGg B![ų?9 ׯALl|흭MߖS{+{7 "DzfKfZ ݪmHrl}쏤1CBui^#GP4ʽkt]|Gg[,Jfr@hRVK'{S¤>S87$SZPro$mOb}GM;W^R߱ ! _#rI-|%Z5WFD}qOH[P;EFE,'^Spl1ɹ(p֬QT+r.tN09^ G*63q]h_ťoIt&\C!Po%*spW\/渹?ΕE Yklr_0U9)jb{.ȕ'~ً}`B{D i3c6R#39aՆ ~G<~*9Ac(Yxq+RODEe=JW^n?r_=1NK+9t1HJd9mOPa^^-U9ʋ)<;3.e` ʺQ˙U%vMYQ͕>ӈi?F.]M΅46וT؞Tx/=%.^^z(~{6n̈ >kbzG4e+B&`kܫS29Q vY*m1O|!i$WWj.\L@UM}숢Qٷ3gFf pV &j9T/AMb][I뵤Bj5ֶf&у p]O3 $ E^P<RZ҆\mW8b"'_+6ce][N(}^{qKcU++7.Q6m `kصl/_:;y( 7,ʍs-*{~yHӯ[RVJ53N&29o$Vg7>g \X7t"wGly\U:ը&,NZ :;ct˅pg/e{e6X,pݡDn|OVl|]m<)W3MቼiƙN7krVB:L9/戼܄Aޙ !Z) RgЃ_gA1Uy&ٍc*Ĉ%ݼ4GBꖝx9zV.LeEpXOEhϿ~~_A|ka6SL-Ƴ3ۻ+3rjEtw0+~[٤sbpY6tk=9p;9b^M<p}@¼ͤnW㖿0J.2dq㯇-pAqB*])RpX%Wlr|~,vCFۭZysfQjv#x FtGqN c|j2}~hӦQE5r;z(U_E-`*5W7ϫZ)`5Kstڢ?F* ЁN$-5a-~( ?鸹-1nB^D ZK./'tA4}tde񯹽`TG!ԋx~,æ7[@؂ c3_h+Z<.2m aOIL!z==1+jC(OXQd_lU!PoN:)I!&`ymc> p oiޔ\Xe ?Ě^{qKK w;|!7^2A#ܨESq* Io-L^`Rl k^3i|ỏR]|=6>BN*Cs^;̯w|7N7b'm;Θ%a9nsﺦtlߗ'?1Cch2s73.Z1畆u:~VWO֬j‚=^*Xʌ ›KYQ֜UEn> lXR$] <BnO*?&=h}4镓Ø[EV,V9 YF.ڂ!¤axʘ, Mӥ>eB.8o9&C[B!IfWn6(+9+9%ʝnwTn7o"B?rV,XmV;?eQVӂ҂nv=^MQ۔}xժCZotX U!B(5ңEP WjzyC%\O*Ս-x|*Ræx%,,/f̋ڛK6|yi&'a?4+&ÏXdr۵-Z;.ޝKP-B;TSwG~3z7:RvM|/)h)79/ؑ\w&mNϢ_wg?'7n \uyج'g_ƒ?sZ:?t>V^` IDATSlN:--;؋' 4*eB6JPPПRF!!BTB!0 B B!&BaRA!¤B!I/b=<'bn̏{qKK w;|!7^3y\&h`$4 j-0ocsb&0=`ZJ7_xd.LJp X-ivB w+HzLshXVL\Z gʑnE2?f՛YCJúeYWO֬jMroc=yc|ܭ<{䗦 rMGsh㔋 5x!B MX5rB!t[`y@ݪ<76k!&v䷧A!qx-B!0 BaRA!&BWq婧ٸY/6^ d A:iτ,=U$VBҿlLvǃ [nM @E}n7j^z_ʖ|g|p&~BݻI!B|' qJmjvķ4^cW\DHJ֊)tPV>f*):cƣ;4YILBlU=g<.4rMOf@ܨESq* Io-L^`Rl k^3i|ỏR]_p ye`L' ޼~ĀOhVl:fsDBJܔ! 'oaBm?.C⧦ԮYUC:V#[Ieeҳ뗝8'qK-+qĸ=Zvٚn攢 הݣc*3s"`察j:0(ذc&o#GJrҒKwO/S~վ><B!IDu= C=_d AЗq؄ü}cx7KLicR8<W+!hW<g,yR`8u᜺ F,楙<BnwϚo~vygMi5Hm >oO,܇O+4tTqO/X0䫧|׉,7^u^B*WeEEE?!)"Q{222}1%ߕXnK(\ƶ;kKf )}CDќ@I|* {?NH̸Wg\9kl(5a?tjنJ~*opq2_tJ;/JiyLu穕wN) ^>YaO{\8+!Ƽat뻃txkoTϔpcj+O1e@9?!?9>(Sޱ}YTQ/igٱ-o欱)y=@ǟ^o:L3wnWU[IZs$qRJdXL*Z ScC=,۹FOo.f\1r`:Օŏ8`Xc |b}U%w}R&DQe'5]^|OB|wNMw1 FT;Yn̆`0VܡbнF 9zLK w˗XB]rVI6/)?׵i >(ܗJ*OmXypЂgfÕII ?A㖽5gVev$ U& diOKVK}Cyچ[v5xo'\V6.5CQ[jAxw@@ߎp@ٗOs#"]J@wŝgO&ןn/9 Gxc 6^^`*_i)ِ֣4*Ǎl8_np>'L}dPpU Ϭ4nهs5:f:vھppRu)3FFӭ^}\ysg%!j;U@!U[0fP]ZBѶǩ%*lmt>rz:/JX&ĴT<YZxdQeVZa$VJ]?x`Ћ^-9w4)}C}YТ5]=]2=!ѐC?(QIA<6 ,fe}cRoKXuV7&*GrjpS:iҽߴ_La0!h@'zY8'nA|3/OreFN1L_^ϯ }E>8" =ow^ؤ`{'+XT{y澿:sɳ+fvvf׿Hpϖ6G$7pcs| ?(&֭j9X|r.aƲ#^pZO}^^* &d8qeD'/yo7ɰe+dAL"쪿+);O^օ{ghx\,ij!&.xC'8*)isu-l I}zcgoh`'/>,Ѩ,fBg4v;PTIR*.0]ஹT쇻bT'ACG o@:={זFQ㫥voQܣ9 4T&;oa܃U* +a#lw_j>>Vf'>ѯ wgO.ѸpB҇49P쀩>~{ yAGs$Fn!CF+:]EUzSmv߾z_J&EboQ]Qy٧f"0is`wRR˹;llΰ.Йhm7^nRQ%v0ϥ.W BNՁ~hSM1аmovpNڗ%ۓ]B jQ.:==]:+/bwߪoUTN3Gs$Ow@RY}|X.{E>NJZPj*-$ ]ʼ;_g"Iio*ST߲'wYEII`TɮB>{C?|RRY}y5Czsk$v۾-ho>a](ڢ{49tUeaiN)xxw^c`̕ Nn"ÄT aZeÅm?G.<0z-g%SVHڟC⶯s%4]>Sf]읗WȈ|h%n Je{ ;Uvxö *ը:s^ Ce_+ W8`v?ηtg/).:$6Gq׊8;÷{Nʔr Ccl}wL&+վ0x/7*)+5ч)u[lӝn=uG;oӦIUV^OEP1Ɗn-gO_nP zuݥ2_ȣRڢEzR `V6ݼ@%O ,le *Lh?vQ"tҢR5[ձ&M57ߢ O\m2ljaɑ.gi J~'2jMU0V^,7s<dLgIUXa1>8j:sZ&u]aJ7JFIs{ly#蔮Eit܋i"^{.JmۭZiMOVJ:A%RXoPo8W]@ pۥ 9,"gWi&,wwKATUTT6 aHZAV^(mn5qufVQWz 0ZɢϽo"!Z/ᰚ$ίg n*J*dDk!$I$ $ @ZI A_;iC;}3PxsbpXuʶ]$I`Nڻ|Gsc,ZJIQSG;Ks?_qEEe'u' R}_'m@# '(rH8Q =bI[`TB! 'ֺVtdC! [eve%bXȽ@s~{'8-=%ܛp*a8[fm1ٝMnNEp㌒ ˓p XdһS JoHBi ;`䣑"zmjLivi&}{J5uz#UFO_rQiVŢ3fy8,))&)i/{ol+VײSSG?'>/#m~J{OO:ǟ}r݊ $]:q.+!2,\Պ%$^6V.[6L_0 /298}Le*XO*v`0w45KcYaRXG9R`**'~İh;<|ư?:lZo,:3XzH~&gش}_nPԞnvt0X @6޼Yjn9Vwٓ#bV屽{5V;fLu3Α "[$]K^ЮϬ'vȮ E {vi]]g 4m\t]{=jE[KE1EGvf;;|xqwPp߯x^x_7ɫe m;:+ @sK!8\kwj'6/w@?zdQV;gV'%t0]>]%#2%q{ѡAt:@}p"S6ඓfVLfQ_ߦ5(6Ryuf<Br̺s%Bnp7Mdhc`E29'ܛ^Z'܏hK~VpE`ehjZg)k"#*nҮ^~l:gu;j7|z\};ii,:[(pU==`mFrfMz}K TumorX.uo1T,>EQhvpriu7-m0A\)9\swyrIx[vŤ+|Z׀`NQ8ġ=b~Nq؁ֶTNQ [5ʫg}ZN T:f)uwUʘTz+)%?W.5d RFHOK}2-N/Z,8Qc<2=N~?A},HZ)|0,yȃO?9Z9*hD&%lJuEΞjMT/Qi{Cq4l` B&8+%^ҰAB%I/KTɇZUPnaqACOs"RL'5 IDATa~tUe`+% 9,NpjzYRЃ1oZﯯ8_) Ҝk7g6EqQ5nX//8jt=S)_w /s@sa_5||JMc7%#ƛD$u. L3=|/ P\osB&e$,wwpU>=Ǐ/:rgBNku@Mph5LĦFz9\0GةnVgz30MqZ2%K %Թ}L[t?[V~fkO/?].Lji~2H*?qI] %߽ɣZnE[sHnRB҂կl;&F.v,l!&}q6Ej wL$;v8LKNnX-'/ n]~pX V=ѥ[`넋T4OD'=9/_Jd۪զ|VV)\]һdW ] FEUѸOlLĂo'TWBug׭9s>4W-pZ 5U4OVmE_[woxfcd>{i`^=~akԔQW);iSç]&N]_`6j+lcdq1 ݬ)j P. ȦY' $=t*#=e'vYa~efڰchW);ʳ% Fjm}$/Z:ECYYyaN[M2IsuC ֊g7=^RS.V$SJ5mb{) {sţ oY`f=|btǝvDwe%9'+vi%|RG `Y-9jg*pdN^]WonoUM0"& -ZE剜NPa\q]wMKg_\CxLy{͠w'Xg*Ae, t>e&<rX̷ΜSn;Vm2=[x'NƱ2bԄx݁ݜwp$H>(߃AxoOSn7dx|0A0D=+_Dž2"5̛Igx̘N,*ݝ%K eiJKwa_{wE. 7 fxk^eXiiQj_ʾKG^yVb(r rò.{PQ||vw3yg>3c)pTN5Ʌ-7V߻6= gA,|И8Y+TW876$&FaK섟ߜfD<$]FqƒbW_6=~t+~F;$$oq*G&TQh'gIITl^B#:OD{ft{5$!!JŒR{ NS QG>#~;/ICLouEd_ 쁮{* o3ễoϾFT_5x=Qᶵ < ~8ڟGR߬/[5j4W{bbnB"2T,H<Wn?X1|\rf|ݧXv1ί[9b#69gIʩNG@Ѿ9沽#mA1d{jOv[—-nݏ摓_8rT]~\²qr# E="#|xITTI˨r:fhǺ A;Xϭ$ QD~C)Owm 64Td5{"sXZjC M5N Ւ{o5 =/,96^U)o)2pJmV[/"Fg[>&\ŗ5RڒԾJͻp @R/۾Ed(0#b=c5z-m1#bbѬ@Q!3Q>(8o8Al(ܶ#;רz7q+-1c݃i^.}>󄚂#*Rl>ɷ1VvZDDMBHW/cގ1k"Y#Z& Z\Z#|xmS[Ob+s+cOQށ^"";cL*YcիnvAeJ,JV,iҔ A͠KMV59K^?cS~֟7q5„o1=[3b7%fy'+u|&b}٦ls仧/J%ƎDl*>jFS$^=?%mko5c|bYC{TCG&ϊ6w}΢#N-ߞWtm H7]ғ8bĘȦN>9KZ3'kϘ Xryo{Bg3}FO2̙UӪRX/Ҿ/isڬ!b*zdU'5DD+/.^Zz<Sy4̕r]m@>'f@٥(mܜu,eaCL%yFF!"bm܃Xp$T9̈́9E*[bT1Q\DVE >o|+ OY7*:]W+s"{]ն_m=s~RpáDA|.Pb.+SvmE孪bJcHh߹ oKq5A%,]}#d6ŞQlcɌy[4!1^؉kߜ2558PFiK?y?}3d/LU}iBDęVO;j蹛tI]jsE覩6&>QhOE"a]EI|9eܔo%b/ϐE˔})MoCDW7e6Y2_MM:}'}7kʄ$T-I>pʾ=fzbH*TڔD|" ӧR OhYW52v$8kg Z!P(1v8Q)P+]<wL9`ڤ"Hk\qF6^.Ys:sGDf}!u*[}}|Ee\HqRh֮Yܽ"XOXʪVqx GŻV {NsgQQ+)e gyFg\铪zF`#Iwa$7Z[}/u ‚w_qi`RypUkO@Ҽm-UExi\c|N}nA FϞTsY5ZtxvY^E!I'K!ЕTZGE:mх 7)B5?5iϦ_j8]IJ]?lPx8?\)+ձ0B(얝Ў!›8CٟGFCɬBx[+m%RM+Q&@J41ASf[݇ 73hT(o+3`c#C~M%glmT/IbVUEE#GD|LJ%*>L&rUQ0$ ""mєiY$K3GCGؽxZ\0|Լ/[Cr\?; NWsq'ٟGHxL=q']{T m6J@*%"s,Lݐh,8 bܳ ;\XrF9R&tlm(ìለ/ JơShC=q@#aO#Y-YVAJdgbyNlκQ7]7^%nȢW->0s1k mZ=ym^)j$"MY=Yԧ~񈈣SG<ū}C gݼNآ;6ݮӰJ{H_6FkT2q'$v9Ayjy .^y~t<6}%&0{U?oKWn% }drwO{"﯐0lfCDB s@j(<,zDDc 3E&4}O͕N| ݘWȉ aLSj+GUQ2NdGcz%~G=/޵ho?)ߥk/.ܟp=xD^<#Yg4Ҩgv8y0#{aƶ+Qa w _q|=]ʺ}VѮs|p ᝎ잭[^sSK*V:1YF&W>Z ٭wTC̄u}LՒ1NB٪kİdsib:웷Y>ɽy 9Pzsofڇ'XMyWgVUlI;gV:vtNTV +6le\k[=7}&jYt*,ҳnISə3gƤ\.cU}|뭴"Ū!AJ=d, G~7=C<gv%lMZsM8PmoF43crb6<ٔwd׼/WtF0mfN,OcnwL߳rGqx{\,%n_eڄ UϷ'.Ld =_jyƺ5ڳ2q[ҚԪ͹GLpܳ[oVb,ߕ᪕GLqQI[n/t?HrsT}sQ^.m|MG*۴GU?GЬ8ʹ[ hOA¬;rEc7#"͋8F-zqTraqtU)عcSp߇/:r4&,\O}r{K8[@ \crώjt_ܮ}^JdY$_XT܋rqŅۏ\_l""eX/[yY.ooLk23+c*ǴUIBs瑸}fÃ޶[N0\9;pXui6M/vceRq Ԟe<(Ҭ듫׆n_Vx9ӻHϮ7L#sm+krKBcNu4tS ; k__v9Č064x5{vu.> ӔǕ_Rku ?i6HzOv2=7QE/RH$GpWoL1}Li85?}*$,x|G+) oםrCnݮkDS.˚H[ ~xr ܚQʪѰ&_J\5FߏTlӱlI{v&C#V F}DgYz6Ә',]ӗV͖'O]]UIH[YsضQo m"NyڰR8>6z{\\H[}A~Njm  _HpD$[P4wH%kD$U͎(t勸20%(e+4|aLg'ݫC(uqA{_~ u4+\w!h~ qfޙ LFcH̙ru"$'{ bGT,}wsǾtIj(9)rQ!.dT#}pJY^m0e#?o!S- :v94wk=xG&rB=9Pr n:|z,Sg_zFN3K͵_22svXt.mqChϥ`߅)j?C#8xݐ#$ȣEɭ܈\"_[ ϊ=?.ZHѵ[힫Lɲ3ՠ+VRmsk]ٿ%:~KWsΧ_?x]C{$wbH_~.KNߪ?tZ7Ef"⌥;D/sI?^l"HYrDDkkSʓ4eղ/Ϳpzu }8/N<=mr*E$m:LPNf_ IDAT"Ȩ-+2D&)iuy)؈!S(p--y犫 M t{hMAAE?Kԭ#V᧴cS-Y 3󀞌 O;oᩈPj?ƕ{r\z񶆾]뛝[HUS 9,fQmᪿ IUHx7.ܑI%g55:8+q1U3^=QcjyM*Q_t|ޛN^2DS>6QՐw2MO'p\P4չNO셠Hg>ը"ۯfm3&ȣ?wakOjOf(CƲhy1I<{O%Y>!YonyĒqljm`J‘l$ [JoIZ~%#r/⦆IRi ɬ=|DQQ_-&A(y4Pf'ߒWvsDg$lc|?rsN-ft둑A-E&"aw#y35ȉTujk2Xna1Q,1 $aE5W ``'9vmOCO"=||NBD;jCke\GOYv$ѥqm)VlE?yg6m}V'sFΙ_}j>cw zW)G1c:_pnok1Xd5{ol~3f@茉.7wej|)h谎L${t~E;P{_a/݇W)>~>`_g_ZWT?lWou2Gc3OW{j,۷7_ j &Zժ)wj>LʲfŸL9kQhW-t`DVUʻd>ɕV#nwUВ M,0/'^qit4yqց6!CefV沪,JgiqDh1:sU8Kن9tmm(é3glL&.fû$JWy)Կ=ȫEq\*nZ םUOMsH]Ӕ%}ZLu:|OM]G֦?m0rw}On=/*t:T?Lcz:*yVrbHާ?}vslKeT5n0h9F Y] h=xv'es;zMmdNmT5Ds]b''nSUi+Q;OUc"/<{:V}NNSZMܟhZP^2wnŠ .9ؾޡݝ!` PC<%|[UsZntȚwӐͼ{ijq~%r[]ADD^FISU%W:xueg -D)I@D?UkԔo,8@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R@R$$@R@R$$$@R@R$$$@R@R$$ﻫ?IENDB`pydantic-1.10.14/docs/img/vs_code_04.png000066400000000000000000001015021455251250200176300ustar00rootroot00000000000000PNG  IHDRKvsBITOtEXtSoftwareShutterc IDATxw@o Fa/ApQGu⦮ZYVu׽[uTEQP@ # 2~$"Z"Vm\޽w;# B} B! B!.!ϣ2&a?izOߺ4&IB~qwT8Yml~JazE7ߺc{S5oLܻ AOt;>%DņB}p(}/sR! }ޔzKR EH&~Dyyt >54Yϝʏ.jwzz]+=bIN7<؝ފɔ]~3B&L$3JJbi#;'iM)b`2^q߸e'na,U&AVԘR度mRT`wBf.DA9i< WؙWD%.?؃ Y 09ΜŀҒY LxNԎ(-=;⩶0k;1YqRVg-S"h1=Ê('.0 5Eޔ@j>t3BQ;OiB؅qy 289;w1 cQ~j)`3Ƨz|{.FWnZs$vQ BcV;;op W:y-Z>pָ/zϋM~ٽ䒵'Euhm> H㩵4ve׹gR1 h@+Β4^¬]Q3m*+܌T]ȷOhvm煸 d\+5UچdeI>p. Ѿ<GakqKlۭ?L)SҚ{v}O{j_aA6d[(qrBX.IN_~l?#ٝ39RG̓W,VAեuΦ{yjtޥaiQo|Km+E{qTqZ\;z` ZaiD\ `|#Bk] ^hi&HoMyN JJ DiRm5AIB[pgڹz]MXw@e[~vZ~)x%cc{0UJh4t!ْFi*wTTHF$P2m&5P\\q!)-~yY^t)<| IDy Gƻ{MRT&oUqw\(V>.T@ͬÌ>kuU6+d-bĐM+uyʄ2c+k*d,XjB mghFw9y* !_]$U-[J*݀R;iʮs"X  < ;4}ccm XE̕@]K %EFt@QD.2.n??7',/ _a>ЍU"+BܬHS;>Ō>/G#zZtJ}k'BY^Tf` ИagJ +cnHò,M GT41dl3gXAbmҼ(Bofl5 >1,M ),+@d٘j.fCIP}Qj #jS%UTeޗ?ÇNh@XטxSzUb(\ZN_G6MW@)[Y Y |ibcx|gDnBJėYg 6"(ꢸ{.z4WY]s#=g%љi%Ϯ3R^FPIeӘEg0hVUrħ4} jnM򷦱Y}9oY!(hdmL5˲~fQR,+FP̍8Vtg'm}*SylѺJ,4|6$1{sFLpxT|푁O6l+ c-\ĥѹP}G$p^](Zd5"9Ðmneflʲx`j@Lt  A5=&t&:H .ly Z~JKq@:TXU2!NN$}>6EP?lR`<™]Pv-B&ozFu8QhnV3&u=0l4#> >)@y)*27iEKiklFSK]|10vƦ&i(R났U]9Sq-O0oc !j[dIs$XpʯoJ4oYIA^<Qx Tc\C,@VMMtVpmcgY)g+ov B!3B!;B!]B!0vA!!B BaB! B!0vA!!BcBaB! B!.!!BcB!]B! B!.!B BcB!]B!0vA!.!B BaB!]B!0vA!!B BaB! B!0vA!!BcBaB! B!.!!BcB!]B! B!.!BT&wmB^E&f>f>lvZTz)Uق2}3ܬMQ(ECNW+ϕ7US/{nW!S૨owǎ8y謶4 nSܸV%+Q|uEy0H5BP3VV؉%b;7:M/6z^R#֐({w&C)K֩N_(|6}Ǚs aD3c{״Q6wY7gkoUh^}ӄr6V}.3O*7a؉jRCϰᚬnJWf, (9&O\iPE3$P}!,Єe`Q@,v1k ъ} 3v`bݵKsjjFvI}V#?CฆL'ozj:Qvco}>y@޶ˮU(C!bstJl*U&حSRʽӈ Z ol_*~\+ՀTV<00?,+%yiwd ~2$nWm#<;3J~]VP{οC0rV4RnÈyL*ojT.§߂-πڿeyOikgm "ڦ@Sg,0`QDOe*A~;f "/JKYt&Q ?;iRg=(,Hߞ*ʲ~k1)+\̰FW;pj*=_ PKJft +};#yA!O.b5{2=gɴ4*==*TZuq{8܌ 'eRf%ی_W'9eׅ9MGQGтǷ sETJq6TH mʶ&*W{yB+ֺV)=ьoE4'I}qiEEɫIsgs}UTji̯Y\0:Q̕mghFw9%B=.#05oq]hv=EE]Wi BlfȢĥrn ܺ]i 4#vQ(4G]Ƴ !wy]4QޣUl{_68]V=y|`SW 69w뚼;\oXlkpKn_(N*$] }Z%ښ{Y49hOWgЙd젋c\H#X5Fv."F4$V4_ar5dռWkcnoP@%,M=ԗ9&io5{#bVδe^%,!I 7FVD3D ^Uvԧ54ˆ& Ԁ>Beݖ I9s^JTplʊ"ygP Dzb_z$C43cgݡ0tvSTK* TȄ#nY ] &ArY۶Fӿ0<BS~jl ڀbiks}\i 3*@).P께bsM'XXvJEr k7OCG Qa󧢔-= 7jQ&3J#F[zX:wpIafkhI!ŤI{@㗚v#g+#wא9J--'@ ub@]'efsokeb̈́q˴;\pjDs!ػP;2O 2!#͜-c}jR+L}94C"Zn!2xhy==[j¼[O)2z1!{/}=rtE>r!PcmJ80o=JEumRmidv[a; '}t)423NYi$g ^fTy4#eX"CVYjeEȴ+Ϳߠ!/@|:/ \Z"~JRYp -lϪo5R+pz8m19UIzWCDKOGWFQUxJ #}M{P)ĥ%j嫆nvQ_C3hl#~RƸPVUqSJUpwz~]*TU$_{r@7RX͙PT=Vĩɫ~n2*jҫH;Mwd73Ɖk%x^!z{(X q+,ݜ 5p}ąְKA;J!ޞ߄pn+`h:Z|w #qT'RB[K@]BooXoc6S*oe*aӋBaB!B!ip}ɋ*<!j>wA!.!B؅F7{&O>F9>CBa򶰃:U/Qck^ؠWq+L8Y>1>SվPGXB}}xB!^.VN,^{֕lP9ꏎ,{% :ٗo@_nj\RnM^~! 9czOOvm 9V'Ban8Pލ? As@7pc.U ل\Q-(fF; @}RUCBc=ݱ7&} 1G]n֪͜ec8_ L,n 4 Yu+B!K"58*Gha{i/ٝO"M.RI-!Лz/Hr^RU!B}ZErQNc`kVVfDSW K]O۴)߽PV,B4OT€ڬv_^QPHwt/.';9PpezZCg_~4JD{]l֜n§ց,Cl\P|z\1ig"(>V/Bս,?: X|ʺ95~ [Sj7vrλk?=?i 3MѴ IDATGFNP~]lq؜fAUeqzZ:ZBc6bUɗ2?w+qdѰS͑F!Pk8::rGyzz<#zB-9=Kdd>]~dǬ6|`mS |㬮KGFDFF_Yl.T3tL{X[pa{xی]8؎)3WZ{k~g] fN4l5,1Z4s- )JNJʖZ҄Laj׻,.AF~-zD/mok8\xĿ}|:0'E'o{v$;!+}C6PV(Bj2 { %};ÉgM8-|C?+{nM%ELq̨>6W~L8 _=p͂#yW; ?_=\3]*<8~O256W[;3(5&bN֯}|2mPR~isnD#4ow1^Qϝ*R{>CA?Lin nɉԆ\Xӓ0~]xP혿3[f.-^{ 8c֬lꐕ? "|9 ]?*nЈer.vF Mx\Y 'SYsFwV]w|7P$Yt,KC\,2?e'RZw >kKT 2.r:cyAډGNQ&mwQ+ިW0 `0aƨΎ)ͫnў%1dqXjᓈyڞ!£olB{;HD| lǼڝXzgP//&t¡ל8ks{@~#a' aܛG$7#aGͨv <Θ:VrSػF1hEaNyS]ʂKOhC)G1pޑ!YtEt A i_}P0vA5^JRNUXCeFG|)_+iIl g9]YQih60О }~xkW;qO&@e7\/%K6 ~>]Z:>3#s[zv }؅ч|*(ʉ3Ww֓"(э'nW0FlB*H}En7wKjAޓd (nR _6b₤G|(Qo3F8|Ed\tpi/yME0_.'> (ݥ>4Us'8.¿_v~QGn ɻW O ү?a5 GAPNA wA8)`aJ_9u>ݘeyLV~u*"T;uhOg g_ (~ O|Vycgbھ0ϯ%ʓYQA*o^.Ru3[\+mP'4}ռ_޻t2p|ݜ55qxyص kHѽlV`= Ү߫|Vь9&bg+b2'mUa aall8,ߊ݋%%$\p`˪Qv~MW m2=0ҀDwe^L2!&kh'"FʹAKǞC֭8!\H%;:7c?3EO gHk`=(eZTCza-*a X7i`@[&ʩcW/TPTtq50s 4}'NhdYuu6ɞ} T9&coVˊJd=A-OLJ! Xv7syRLU(,w 77Σuqb;x 2̳<>:T' }>g;GCCohM^x >.ɴ5M֢Wtފ1M±m?lR sgP%Ay;gDKqkz҆'㇍F]趁;(cFZہlS*}eBcPQZќ.D+jEz,/RO$8AC.v*aC%b;7U z J~uJvq2U-wnĖ&/n;p<=t[VMv~gͤQόJ #A3x5P|fNl7IAʟ[]|rѫ&MLi'&h@Qx,`)r ;iV%s-L2j֚IO3j27bgjf)fF -=89@˜_L^o6%&D4oƗ{)bQLQ3y ZLj&]81.C (͙#Qߎ}l67),#j{􇨱kd9@Ts-&yг$dEB uMl5Xv c?eAʥ;ݚj,:9lxU 7~P^$b/q HWm[W 8í+w_ԭ~P>xu [5osڝu1hM\vb1x4N9Fjj-?K3d4,ʃGq|̿[}׸er,h²#sPZOѐ?},l2.V%BU1l;x[fc-<gLG!w|H{3 ,; y6aץ+w.y3 G~ځG1Z?#4#T @|O*66vfb5Go&Ow/;S:a^Vy0p LlWd*9~xK[ \}ݹN.|m s2w(`giC)T7<\f`fOwlōuPCØ 6r˭gjiR׭R ʣ7a:nX--ʌٻ2k>4w,s/8~})wAػ`Sc02Œ{zxDZ 1Ŀ/߶D*ڵ ܤɵl + ]߾_T~p\rqmE, Z_eo?<{F w/sER+,쩺 XcV7\;WS.)w tlx~2͌meev;IHB!s,?m ש%]a{w46n:\}<=jnnnf~zsS9"-v)R5<ВBJB!A4w݆X겳>^|#? 9S 9~]lq؜fAUeqzZhvvp'&pz8m1H#BwUGZ~sQPkF,Z!~?;ߐ:ӆE<.G)x7Sap8gRog6X-.JЉ{q̨>6W~Lmiz3SRc"71̚2M>s`Y6lǼڿ)?~ڥ}?fǂ)hӤ.,Z !bnL[k#lA?V<ݢ=K cNɼ';wEj(:gǚl="4@OlHōkʐ,}|F6G[gy՛3F 4e}vzPG@C?v~<6Hw0eܐ6Y!ȹyG=9pd/'2m?c>E?>Zxa@*I$r DX]n l^Ö{+- dfh(`Q/ꔟj 4ok$XX5gɈgAH_5Pv6|9{ߗ O5_o _zn6D@ÔWZ~>*[~ƛ7"$Iej)9g?o^Ҽy [pv jme;9;/}r@2noܼZ`p|B-C\_nyֻ&D"( )W@k|"$EM{QD=D.SѦW's]Kqxt$_zj>;;{3G ̉Vgo(1%Ԑ`jaj#*af{CljM.r+ﶦz)WǶ}/XC_I׳ gc A7deE7qVAb4JOPpmdq澿kB ^zO_&:q-MM[O-0擀5q">[<m}yüZ0)6GaLyo7Y+;X dObR]cepakܘW6w :76OٺylaߕI{!'Jp7K}%*`ۻXZڶ 2!R:t|"Zp*hOg;Q`:t 6)|I1lmp4eGEm #.M;hF}F^qKcPQZќ~Q*s( d5B^%ir 8܊0qD [ɉ*= C-=,5dyޮ l]ޜV]=zn|ecپ?NpreXht+7ߎS:s1 IDATY^zVg% {{7JmgnĖ&/뇯J?ΚI 9 -K])s3s֔OeFⴚ%]81.C (9025*I;+^ !ILR)ˆiYwn>W #3`%=nuuS|SX&6G;x9 Uf_8l{wIYf⭇|2no f8~nia̯Q~s&7#M17tjkg}z-BսlEEnTHyyeZvv5XY~ Xv?S2Q`^OߔZt`wq Nnf=Phe76\9[]`h&䂌Jn@!474B|tAb1Oc̃ νԳK%&SQW;KS+73zr_}M d4ȜJBYlѣj<"[qI-11L\ybyG@CM.noj(Nյ⧢2=<]4Z>rswZ̩]6f (dY%A斵f1 PMU^.Ta8 qj*t6`:w`0RTvF,B8.$rs3^+|xgs+3q=-1σ4^Dι)nH+9`bM;Ub涳2\9"B~Hؚʁ!_RW d(O6b˚!!Ȋ~{3;oe/=|oiPe6x婡q+i' 2ȊVTU$_^LQˋ8chX)Hk}Ӫy5u @{bgqֶVXj[zԧOk_Zkڪ8 &2( n۞?{=q^rT⯾ sS?C7WHiWjwD j .ͤ鏉bFg" YEx<=Ҩ't4@ +_~+CYPVkޫȎw:SWăy'-9jHttt_S\:>-9Z)iEĻGGGG;*huZѠGFA#@o|;ᓷ{Řuh'GZ.h{e[^Ҧ?q’J7j>}r[-%͎umj+s3鴉?*πqsGtSUCm%EYGg !@MYl){з<@3fm<T{A𰘡X/m{˜J4))^^y\!|-QiԪ+.oab blsb 0j5B+4aDoͅ[ \d(PE KrZP wǕ9%NG+߬;8`BM~mז֚n} =-/Ƶyt| 4M 5l8)+{2raOj N\Xr[GS0#bli$2rj ؀IYK3T̥3U׍CB=4Nx-\Q3@a8 r0B;)p9Lua̙^̐%g4{p G6is_D1M%Fh Ο/n%?υZU}AzzyOJޚC q1B_qpN2}I8@_(7QFpԋ z"NaIM5 g{EQ~*]My1 >"G&䵅&=~XAàH>]9qr !kUrf JߪINK m<<脩ʅ E͖&جo5Ma.~Akxbl fNU{&S]saBN|-+ZT8Ϛ&(hnE($>.ˉKIQ|Zi{(9-J=4RP A@AјLrF{%gӼ;~7{r` |5NxoOny–/losFŨ,ҢMV 0֖Č fTj\,6x\\c|Fey3S:6$i|4)A`.C&*3Μii<`. @*35]n{F&&LBWC&)+zQT^`#{ '71F{+/m'Lxww*?礎ᖘ4~92@܋Б9-GS0!tbG-Q+2s*ha"]&^#/obx^?ΉL^kΦ)9΀fV Ve+ʆ‹F /M5ZHS.ڒ^\x.qüQhAUۊQ%c6bR$-yfٲWIsSCeN(`S@7f^-˭zx1+5V\+j0U)w֨n$HscQ{׊hށXKXoPMzMHڹda^Q6T61u SY<M\ߤ1riUIUNms(|x~!ר;Ft XǍ 縧iMYqP @4Ԙ'JpMQ";qCBg2Ю0+G 0 rFYO@>%}}az_`m36 B1.8vfꕚJ1u6Ǡ(R&DuscNJ/X!#&82̣o3$Gk.wtYryO?*Ni^[u&}?k)I 3- FQ)O Z YUiplܦje@d?YssKSUEo ]i`e9XV#aMzFOmn*Os\s}Ld9&T;Zߊ?{s]J7p+dn` ђJtAٷXa4Bo4GשT\#XeXn.AVm1@ݰO}$,VxC1*"&U= ~C'uŢlBv^R4=ukyM (<]I6ZyE\RPN7<@8&xOP_>up\ @Bvvw\㣝zqv]rVqQ^"@1wgoF]Q#\O8xԈM+m*u;.*\CR fr" E|N{( H_WG#ve4Q2lXt}[uRif\Y(d oS4qC\zoy8S&NwAsF#&ra60 ZӊM|~G[3E%|N]ESO繺4xs⁐ٝVeV^<:nւ87Tj#}/<=6sp-[L1SЦeee] sSiC'1¤ȫӾUJ!I< fn?0]:I)\s@n+3B ⸆ 㴖*ff,Ŋ}& S垻!BW}1=K+Zj 5 \׌5$4g@]̙6ĩťmy iކ@ K'ov-X|W h!@[k_WW)SP9 /r@@@ 7@ku@ @  @ #@ @ vA 2!@@ i@ ] #ym~ox&W %$D %o+ׁk'.\ZH^*{\={v?>piĨ[5"qax- 13W&nqԅs;~4;gOwG k?ha P羺xdP%eDwt),#!hqvK:wueaܲQ,Y_sB\S0gn7. ?xנ?MU|]AYOY$zjHӎq˧9Eu_UJL;k)t'IqVq=:X;|}wS!_''wƭ[c#$(gpwx,A|dx(胗܌tK3J#vA 3V'h!$eY4,jE}oXl?ksp75>,O5:,'I tywmDqqA'RzoİqUϛj[Sv Fdy!zӎrL;UT;%+D!$IY.~ a@G`>74ca45L0S6Yh3~ZN -?˄&*mNa[H*cf[7(4]2TnUm&L H$ilIUЪûyDǪÙS\! :}I*5l#jYoʻ/0aT[psh=0hq.@I2t.Ug3ntC Qc)' 0:x)M)ߍt3K5,79npcRȧk+߮f E^N^hbbPHܼy駫;Qh#_[Oؚ/rTveDYw(W_JBYEUZҷ7W"{_ϮKa{qxҙ N+N۱u5E~i|9{d\TPҺf:Э-V<7ڜmQ+>]¦eyZ⇳uYwW==|3W@v+v|TߔNaֻ wE h7&ɮ t!r b8FP1BMhE`2ݘV zӜqّ/GQ,m5:?~^2]/!?.\>6zvݮ?{$%s^º,˥TGUl(+Oj̜7fn:]\t#`@!ʓ6Bp Jh~cCܧ {NXpnڧ Y|4Y\ZӋwO^ n=?}mNB \r7] iqͲ%K޸d,޳9Q 'SSlLX x^d.9{;UW`7PӇk^yi*I^kFp0½b;XLT1vTL_.JJBk; _صH˪] IDATP.ɫ8]7Q3@_ ;'_CMGI!nhK2IQߤ1WJ- ܣ+42AӜVjz{_ lUˤJ\nP!DWd} ЄZ罽u3B SǸ SYv!>&tX+kkB͕trRp#ZqZdntTmal߸-3~O~u] .lBZ$ev/!2BTr@=ٱB{fuG41~҉t7-ű*{I'H /V9ev]f{6Τ?˻]2Fx=7fxPf}ը9;K f¡#h6BpL H;t6y^2(2 ]*p0&c"]{:3B j&5[ZO);QGpO7 rvTsk L[N↲s,b8bHWLwOL1ˏ& c`,F'6Ʀc\'xVYkI%=)-iLr="$>v7XZ+ӛL"-6 \sDh.v 镞IsFb%dtR&Nb[Ƞ[+ SBgƂRQ^BGgoM9?l=lv},hf, ;sg 悜Fߤ!n<Bw: :#Q(9b^i ~NyJZ,'t[!xӣ+M_k:wA kݴ6ӑ7Dj1hk(ͪT^#PVn?T7mD6.q3T_8˜KRwVc F&{LYuz$IA.oD#7ёP&UFu 7116Vv6_R(aRR_XUPu}yG4j <ũ60,p"KC@zrOgtUE[vѝf腔5&5M::Lj_h"b(ʳm֝پEl@[t:g*}&E)_U8rz~rS98}ǾYtwf^viۦż+/Y7^w`Z՛)QSnX% O+uŔ_0,cDF/=xaakg>E4ʋ FL1 OWI2}'F =ôo>3sϙɏIt6YU31M&|½]=}R="qԷGg˻o:2~θמ^-ll㢢F= ?x{BeۣHucpgy2xP,Nӣ;R:EFb 퇺\m˻i1rPrѨKBsǔ~=.f<&9oդvt|nΔ]5V JZKz 6s[gswvz?%[^-{ DԳyO;MAh- QG&lFIS#sG{~}n|a`3bTJE"T7@aFEt[%L~wZ,mG. DeztCƎm%DߗwKۂ!ީ(aIO{?)U uS~oKYbX'{:(k1P8B/0D}N:;>\٦^?r,r?ٚ48N'=kX<~Ejv#G}{pT#߁PUU|F6v-[,QsD^*B`/g.¶72/6]OH 2pe^_,O8n?n`9~/WWYă(8[lԛm?m+l@Ī$]E[\ ,OtGOw @Oan,qr% \g{c,nx3U{LN,@ɒ?  Uꎾ)h$3qz~zܟ;Q xO/yW@\vS/^{Ja{+sXGޙ r|g}5]jCjF,Q!#\Ggq\Nt战ZWDoG7WrPaټ!,P..!lI1׊]د/1VroQevfݼnb>r#՚{.4ItzxGEl ⟣](/榺IolZcwi ӫ~Uyxgg#@_:iM#6wUйfZniN3gm|yo7e=Od?bjټyf㢥.+{uCc_WƲz{R?x>z1!Q$ ~)jWetfe?5+d׌ @_+%[4gw/O15C,__Y퇄2'`E~&}cЯ&ֽ,##r*Frrẓ }k窻z1OC̹ zd?Sm`wgN7-\+۲ˤ{y5@IXZKm[ Ɖ/U0$!ݴg* e@6|hb:F:lZfؘgc.\%lt6}۳4L/jLP^P-E8oRQK&ŭ2xEyCGF=v2|DY.\%lY>f/W|lfV̕BEh<'dItXM E\kFqvfd,$H?<=pgw_YK51j[ֆ>nVSPcͅPV01Tn vfb@sA+]~ok{PUph Ϛ3奖ߌ-@HR% Ceycn7t:mZ?H ?^HJږ, *ִٕ+$_6(*."-O}څұ!QHsˡ##7̟[_|@:'ؕ]<ڒ| ^Q)Evjxjw  }v@>8h2_-z4|woѭɻ<<3Po J 0;w0;˫t۔c􉨉Y__޾aAzwˢjUTgH ݋o ])!3(UW5mzkin=,Ύp A%35UA.j-PCYM &xLMIfT0jd}~7ݹ?p^%X_Op 혺GڣShg;zG{ܷO:Vdpқ) &`n@  ɻ˓v.L'tY?0%lNQYxBR}WY_ϥ(+Qo6ڑ fVa":]g jZ, b[ׯDqdQ!dPH߃ڙ7e j[\(_H)ӧL}`nQ㲾2x|P#}p&evp J=  p- ן6F i<ŭ,Aձ46~éH~X⫯ q:q˝nnjٛJaL(gpkfEYUի BO<*b_:T2r|UQMӅzC{308OOq1;2)l%%-)18o jB3[ܨJTJG'>z}*gU4oX$7JVĢWYC;pۼ "Qmdz6-<{laFJx)0xX u۲Ԇ&ǰU+F{`w"BoaIKl=֞} ܨ-R<o~*dWUߢ '}[?XqێTϻ:!ΞXoD@ﯾV& $i>OS7MR!6sFeɵ3p&TKpecm'?u~r,PQ:,6jnle)OD]-yz,k톯gE0R<,vGjPPEV{a!w]8Bwrv!ft.Dsp p^}UqFq*Co7XLv=*cc:-L?tRj8PIcg]utٖ}o%e4EfWb87"qˆgTjrr{d`ܑP%pm_M¬ C65gԷꌀ=TޜGE TFjU!/O.Qb|=»0l捬fmpPVmם8ݑ8흜mru_*yO|],'rxu N+V-h sSҼ}^.nkp7i/k/$ĭ ZU`h(l fƥu)2 )F%bI}tX~^<a絺JTak Ilwޕm*JSo+$E6Jф/ /RT1zIUzWm[X}Kw\̉_a.^kL o>%'~*sK1`;М_.p{sdGzl:q|U-~PIxBXV92c0P`3wFpmn`:4Jw됛xPrzɶ#88!3g̭zz@BsbIZiЉkR. 9'azkɤp/J y""f pPOLdJt p`9;9O:\#j9>zʒZ3kXctΕ 9 OUv'W(B!NvtO;%ݮA&@P=|~܎/IxuGܛ  鹇+|m~b({dR+ߏi3ݪwzf!,@'` bD7;ڠ g7P5, ٭$u O{iʽIPM_/7ި#N<Si\ooO;ߵ;ʍa5=۲~k=UA+&uV]0_:ԍ\f /[bX wHCm>R&ٕtF=ҺKzCCg U;O4?_#1|ve@[}MÉ-C7=-+mtMǞWVFEֻ}/Z i ?J f_l"]i7s-m8ngkUCc ML88`X۪kjȔY2dž񆚆 NP--7uo1 l6X[m'$„`+lo*2\/1t\0@ۉC?tv(wԹp>A%fd]XyKpٯ5{I}O𞿪 @W^^g:K_j3.= i`̉zN?Tߜk_%=#uv _]Qp8#s3\tIoWSs h9EB߻pq_9ށ?7ag})=Рgm FPhP"tHO`Mƾ7]:^:6 _wy0Dk-8_I9K{0zIDAT#>S$Vwʏo>!]=Ǒ/Y`?/,ř 1_80X*_ 蒤a FUn0dd{[Un'8!}@<ߵཿЦ{vA06z߾Ƭ+>T,Ryśö́oVrG=i2<{{֟x]Ng=w؈k6z:k4xjjj]yD 7h8;ᮮ<gsťݻej;'v~ˏDݻuvLo.u)7szVqKWT붳MA`!mec%r"b;\y~c gܳg]l\_QHe:B{ =˹W{P מqqä%ڜh4} ElUVR;"ɍ!!RUVj7|iDFϾ\lh &ݚqЮӢտ\3z•d b0;eI`3vdR_76x>y6_pI{oE`7W9ʌL:0e ;s4"q]QWNkԲΓ$TVV,E%/*%+KH ;lߵ=Dm]1m׵Gy*Tr$3&nJ=IZ.9y<*|JW(pr Cn/SYZaPSI96,og?yGHbNӣwW47m>P9/$>555%7-Ȁg\.̭ ^:iQԽGRу-%'[K"iE6jaK5+7?WV#ۖqu\.m?ٔ_PB|yɕSl׸ߢ_v}G~`v_ͯLM5\}SQ#xyב92wӎ 2kCW{{V=76,͈( ;|5}Լ],,72#+4޲N#/[,3)ڪkCO 7uP3 ڎ¥ɷASOs/+Fz# q^ oͦס>=c!UcW/^<ݭHUTafૡ?YGHrrŊ0[.[5']19wvA-޸:^}!OiiJR]lttK/t=@SWmȤKVT=ݟxv_^^sxjS_X}h5b5}2թuORQ>nq[{4EL9""BubB>p/1{&t?[saʹi}ۗ'a *$> *EJ|Ō*JQ9i.`n$T%Dep%$KspL!wNrO-JD ++ח`rL]j}4;6zeeWrqIW):kWxjՂQ_Mw9B*?.TT@H@HTT@H@Hx ]UVV FR!!RR!!RRR!!R>2G ] >dug!Rބ. x_ێ+ LE#u:*ihhx/e5k?d2$ ͋R[VKKK"rp-auqm o!V.̰qĕ+WM%=zػw/_C~>q,?ր%?`lRqF, 1t޼jү0]nL'J-"ClCα$~]1eUԆ)>SԎ;~7 . {|̚5撝;w޺u>9_NL7S;3LX^NDmX(iUB['ˊ#1ӤG~}m>UHo޼ŋK 7n\QQѷlٲBߟ~ORQqµ]o !RJEDq}Y҄ӛQuAm_^[1ρ o}sM8eYl cacvR/s4U-4nx[eY6^LẏX:h$9:&l$#ŁW2Fޣ obk,*RhǾ+t<~/tNBBB֭CBB3m611ɉd6hРcǎٳ $lvVV֩S}||^.yݚ*--}:x{{kiiT۷o9rD.DQF蔔\x1$$D]hԩ"(;;{/KÆ ۴i~zzz}+))Q(=WK#Msn뇎1qǎ_Y3 5|z^u J$%WDLvY$~GzHu8U>v-[*|1[* {56c;uR>}3_?7FFup73.T#|>ܹs=zvZ˖- @ ̌rttLNN./2НKDϟkܸÇ_.氵էLRd…O~ID"ܹsʕ+r<;;:vhjj011?vΝi޼yXXX||<]xqDdnnnffvڲ2"JOO'OReEzZM;:]HdSN/W]bm:,6ɩ!{Tl՞U2/F:ݯ=tetC.T--Ν;7jԨQF+W$rMMM"ҥ˩S<<޸qcqq1)̭ H,--¢[n X/R,,,ӫP(:99ǃdr8]]]LNötwgfvz~уOaٽI*2 .> ~={&&&G1e2:FEEuv׮]/W_TTTkɊol\\܆ J̙3k  ?"bcc?B._ c} 9 UY^Ge%Y|K>s_mƭYq~ޔNq7GnՕ'[vM;5*6#u ^ۼn܏K We^߼TfU]޵se҈;D/?D%&F>Oe8]MDD쥟dK:Kݾ}…'d2>ODJ… %%}W^ZZڵk_S@ ~ mUUU/GguU(8P(p8ڵU`KK˴0TT*󳲲=zԯ_#GjiiT/R~}KTwaݴ /'zq)C-8h=MDtvDD(;79""v+&y Ox!9sTn=r;(ѳ%#F* [>T{1{g~)_y_׽ v,<8z6yԗ^ 6T%ɪ޵kכ7o\YfVVVEEEﯾe֒&Lx6:e˖Z:t׷E T}>//͘1C.[NwϞ=ۡC޻w}"֭[v% 㕖^|_rx#m!5*y^ WrER~kI I ++]+r*y^^^qM++'?%ޛW{dzB!;EԩSÆ Ҷm%FEE sB.PwFRGRA B*B* |(n FR!!RR!!RRR!!RR!!RRR!?J[[_: mmm&Mf۷WR58An+/±_n@@״iENJ':t ~9`qUkii,:_Kf oϕ=4~ԖGi1vO{jWD\4&e:ۘj3~+iii\.8''GCCۛjkkK$ghh666{wC$15kTש7s'N\|iӦ;w+**;wnYYٸq㬬|~~~K.^^k̙敕vR(Dݽ{w}}yIf͚uAGG'...((H"y{{xǏ޽;>>zݸqc.b "7o&'OVBEk׮:th^lق=/!!a2K.Æ :u{ƍOX.VZZ:e"RW\֭[>{S rrrΝ;'LrҥM6qܢ"q;v͛ڵ 9sJ3fLnnٳ555՛.((/AAA>ttt1bDbbbZZmۖwM0a֬Yqqq]tQhbbR]CLLvv_YYY1q1d$$dɉ\?² .@CT$df'}C=#o&"""""ADdgg*˽bѣGCCC7o^B,WTT7n\BBBppd "==zn:uTV %IRRRLL9laaaEEE="6mڄĈӧO׫Wy/:0;w㕗s8CCCT]]R.rRYnnn<\\\!ʊۻ8888???33S*6iҤܹsرc;vINNVQrrƍe2˻#R疈}Xu޿___А㙘TTTl|`# Idq#G;5M+V苴6=xZT2tll7o̙#JbccH$U͵*###.YCDUUU˗/m۶ŋ;V#,###"wuueXDt "244Tgjzzz-Zxp8/7'[,+))i׮]~~~{>rHxx}dJJJAAOVVFbb X[[DnnnͷBٺu/U*FeeeiiWII@ PܢT* D"ѭ[RRRbq\\D"qww,--9~9Te?TU#Sܷۄ+^)H/9} ·޽{͚5Rј&MԩSDTPPQ3ZUzzٳgܹ>/^lkk;v؂k׮,o``PTTDD;w T*6lbuVPP1bĺu"##ϟ^ڤIÇߪ^BǏ>}ヒ ym@u5c ͛7BKKK' be*((044Td2 y^^^744uև=ݯŞ9l}M긵=#0tR Ԕ2pǯf[2GI,++7448h߾= ®]DMzzzV,**jΝÆ 366prrxEEEDKKK]]$9::vϒl6Ū]֤IƍYq-Z|7zzz/DXL0 L}蚾9MOOWT|>?77֯__$U_YkI"y&oL{adddllnffֱcGH$,--(===11qذaBP=8JDG~[\ҤI D޽{+hu===޽;""; IDAT\;}F,f(H uX֬Yrʇ+|mmѣG>|822χR3gڄJI]  /8O ww%#--CVೃ    ] RuA]+ikU'*N6[%MxGpxB \fq>Z;{>}D!m^V~k q>pBR1Lpkpj7?WךRJRi-$/ʓnAn,LN|c's>K3M\(`ij`r:X΋1FIo˷LդȒVӊM+j|.EmJ\JzD$}g>n-sԚhRXy -.1rkԣg*>׽ܕ}BwRU!)@Hsή^JD$/+Kj.aaS;j2DfPqLuꎻ5WԌ98tݶM=Gܓςl|IFF|K 8Eul9f܌S TWhh0qS)O9wKO1[K , nm۾ֽ4j窌9l̵)Vd78|=s3֮zlfyV"Jظ6u L[w|~5mfHMe*-ֳ@L*#k93/;8/;|ÇW uKoh \ |1ܧiړ^7-u'۫W:{7:kdqQ:lrސ~=Gr8eWOպMK DL&~X|%TˬKj 1f/Q^R莵^P aC8qY2$x*aVu Υ"㲄N6qgi0ܽk*g'+!~)W{s4a@KoG`vӿveKr]^Փ:]nlV l~-&o}_ӶNm";׼p9&n@H(qQ^Z~o쭧_]=kQTm/?r>"etݠ_=W-"v?߀toIDDtcڗ[ulz* }yuKm@v^3N3tg;_R&;UZ& \%ON?O?oNemYDTU^R~Wqٽ^UeGG8___9O8"~<}ccM."j}3pXkΨH\il1E6h;gύ5 :t^Xdc摞I'@O՜Bsa?KL321.nj@D.e {SV%^ Ժ3_LeI=3> 7pOXkwyUU|Xyj Z8H(xK8j ,M&*'ft߸@lonJi?͹9I_Uidfn`HEDi.I1L(d/N~=5H_ψMGUDZ9jer*#W[煟_ug?puݣEDM'p7%gч/(^'rgb3Y3МE cw+ ߶)Ȩɏ˚0|\ϖ>pd1X{c30,sQ_G[[S4#+97o륺tJNk /ݿl`7;[mMMg>{;O30~ 0`$rۜ{-d47gm͜-4I^.)ȼoc{߽zwdao=+m""8:(RӺo+`@_jYނ~,t(gfi&WY8!Ʈ[ aHۂ?yk\_yyR/*ǭ`?ȳ6l\k}m,;+?Ycؚ#YO ?M1C|sKrOoYz&9Ǟ8( è_<&3Pk+%mdֲO'p{Xe[ٲtYɰб#0AU'A.B"9Qild:r"###N&JJ,`7$~?oGAUnRThݠauLM}skcfEF\bN9ҫK5+hq@'ֻu68BȺAEO;DU\e mU F$ e)L O(f ,VF,=ss 2X~NEZR.CHNhT l 7u *qO)0rvr QQ2{~wAi؁d{sdDτ% F|kLDzvV̘bE>SG41 "e񣻡 5<;:\ˊcc9LMv6kwu8yQQJfNlj>yFy>TuNyg~ 8ig,\Gֽי~4fދŮZ;p`ciac&Lez[aԯ55]۽iͲqU*IHUFt<^R/W5G)k{%sm-kl:ZcU--~߫{vp]*&C #Άڴ^.>k9~XU%dE"^HZZ2I~vъSxhh!"*8;02_/WlVUѽU>ZƬ'twlҦ.!}Þ;_SnNq"C4w2~"aTSF$k3` DzfomWƍ>0'Wx ^۸Y)%c6>5vҴQΕUHNe?&Lܸ*9zW'X%4Ur"j;^NAD"I@Z#aYYȠF+^^_ӺU?QUTECo0.;FM/X<ٹ0O`Lr(_ӎF٭Ld2bpP;g߁26e8%'yɶc2ԙ(ߑ4( / )|ϱFCup˗-7hf˗~X{: sK*yQqn]},Ve%IEeDKif WNEY׃7Ev}E "]7!t]u̐VG.*$~e^Lh3EUzfVefIףO#k]QQy={[:=LlR53zQG*^,?oΈǷdr~AEyrUǂ~m]uʫcw~jk̺b񉔧e,ee*I3Q^NdP&S͠BJ4߱ 0pAm]"-JLE^шCI_RL,~:ZtnŢ1z Xef/<ؽt\--H.T4yz_&[V%#">GATUt/2S|>ƹr#0boY)A"؛  91'߯u*ǸWZCR~scaZut#YeĤD\flJv *$ێE($9h߆v.udeI)),1bF.N 䕥ScpVVVlN3&w 6G~Z_SDZbOV߷)sVVֳg|plvh;M#{E%-%y>2ȼ5WdL}mu]LS}-)Dz皭?X340D\.S~la+8>HwbeV%cKֶ0i9: 'sLd!^HSA,fs_јOolY}ikе{f7.ߓ>Աtݤ5@BO #p|XexU;z[_4ZQž؇GRqM=½_*JKTT/ݯiߴVynnAldc]bQ[+V1wWJ}yÂili>IDCt,:t挹A hKҲ;yҴ?v(\P8C=3"y/ޑ475/][ٛPeYV{T(ء իljpp3ğ&zQEҔ<))>ۦi7 փdf^ڱHycjo ]'"I˨K-vq`E'W?(|ü@@MT\.e2JTVVVVVVR?73kBrK*w3RsV'Y9q|&/!â'*3ZiF `D USS- 3L.KDȩ_`HDDtlH[o[5߿/#"ʛ)T)_} 8HQk*IS[v5BOsbu5uslKS1qt>B'!_dg#:_򠣂s GM[S&:no[2I̚Wxoژ8.IQp=T(cdNCvuN?'o.{}I7}n- <[Xۻuu;\-s=ݟם {שܹ&.ךduҏLD$KQ>]07@tm曟⣴[QJ\ny'<9Džgw#BGmՒl۬q;i䞫bi'<Ԟc{NzZ,?dmUXR{jU 4Mcni\'OHqWaUY n6[qzyGl+^jev`#9>SrH2s{-ʗgKT#&Ann.yߒ*lSc~Unv1/^l>QA|'ʉvzkg=b%1#xQΥU'Э?HfsB,9ljԹd}>~lEUqNқ IDAT}hwwyduV\fɥYqlAk\cXyGVV"V?uYo۴!,%tdIOusȉ3k>[*θqd"uߩ!QArG"KX"ÿ+d26h(UUfw;4i߳&3O^Hķ߯C݋GW>-lva׆ߌgmuԭ[.OٲdEQ$}E#d׳S}6[&-μ꣮UGUxx2gvof9p׆A&*\7~~L%W)du?OwnKާUƺK5I1X O*{ұ_?k%5zU#]Ft9ylkLXK߰/6֮[+EUpvbܔު2fLMLb W.HI$;߂Ʈs5p=JNSsa3a LMOSUY~F8#|dHӣKyqeŏ^w}zӺ2D.TiǴ8ɨGao6;+Ki!ѥB+KK5o…K,;Ó;dI:"iiRT~gdiZZTZp,2w27}Q87F['Nc] ҕcN߈w/Œ8eƭF L>}]Uvnzi[ΥA̔pam5=ɌMVϿ r22Xjz~.dJPFL}yjJGD+8>AxS+\ K{s$LǔvO"غ!E"b_}o9Z'/B>'sW !Dwՙge Er5ʉݎ3WYOݯȲɒd1 !TO´DEKDm(gÙJƆEE~ ڻ,Iڳ=o<]涬z^fڨߵcI s_^|%f [ss s닲rf˲o BH 6klhMWUAsVKeY{3&)mt iDM`VnjL279~"7mYNC?(%>&ƒ2zSF95wsr+ 05G_W_|X*^/JԜs]_t>p]W*h^ˣ]N$:G+7nY!GRۇ4y-z@Iۆ}/)A噍붯4<_+~kUSKm|xwa9rϮs-y?,K.ؙ{xn9MPd[khnw:wVV ٛn$v7N.\JLI_5֔W _-}#eS[w<->Mwt !OnHOk:WEo\Oѡu^-Rj765Z?]=uo5([l6I3LI,[`ACCÍ_+B_ydLL7aoj;u!\gg'!IF)nY or,۫Q%|>F%5wswGi/-ߺgiK5u;{`%Sɕ4gUg݁&mؾuWFte,qF[GsdWĕJ 8|bEqϻ쌵%R#ߘҟaBmIمLW댨Aߟh*95Y5c9ŭ{LкmP '}Z֥uZrZ.FP5zFMpxM?E!8us6kC5FOseɿBYNLRxC\CMYSW;L'E pq ƺɎ6]>Tyq֖wBzMWw6#n-0lZr^6ϭwlӅ+5`:lW,U^v1廓CMxLbA#J:_tߐSauAG.fK%&f@GĖԷJj*ǒ9Cg{3+9S]C:0@UUUy. EHtbjU' *SYp[ޛJ0hRc[=ƟaGSWJ% oXڛ΢HsTS16VaLh5k5t|[kj3'OsvCG3qKW;~'ԣ˂^.,3auWdQ7$|<.:+jFYP%8GJ.>Wi=tJ&7nLEo\e([Z{ ):Tރ컪 W*2o,WeRP[TI+}-P +t_ eyYAyؤ戻4)?7'"M<_`ݭЃzY׌=rzu]oVjX`ʶH$j>ɯ-J(Jt 7lhF&t:wf6PU%e=V; ,(ȳ Cdğ F ~Syz-( !̵P "B .Z~>}gdq&ۡ\bME_ON,vPUG>TPP'RUU{;ZPA@j!IR @`RR& F!YjTި C;sȻl{V7LX7Vd 6;dVhIp(А$# ҈}_ݑ<g8Ͻl@MkV_;Mr9}%)8pѨ# `"QoДhz?! ӹs*xnMn=k2ryO]W]kl21kfi)`M膴dI ņ~Vua=? +I”Mk.ʸO` RZ([(v&TVRNtz=alP-HauE$dhjl=-\0UzXZ۬2M=z ?V&2KO6 ]MȦJ-hjc;;u+SVCBjEBZe!ZZR[Úy )Vi:>6lMS;<Y+z3t} ǚ3sG;rŅlaFt6e9NhIR\ N ,޴deۍmʶ6iNSŔ>̅`YP< KM& r(.Fd89?-޾Nפ~ڳtPq87&DJ_wy 2J%?N7p$ O/KбFlf7mfA; }Qӂie̻jw 3vl֩*҅v-4NB=lwN Pgeq߸|4’C[k[17kP=9?]wzZ\Z>ccoWf\FҎ̒Hq"`OfKoZi!@-z keT|R5RXX+c3:9F ,]aKO1lㆲ\fF|5~גfoB!:kt_r,;C( !Bu4zVo|;B!B!E!D33ZKkNjL3126.gQQgh\P+ scF*mb3K+\F`jT8ەFj7[2ڊaVPTc*L$٫Șjd?X`F3622)տxz"} >Bdj*᠔4ZyD&[Xk T5LPuR!\,Sk:}L @h:R:܌&Se ;;*FcjrR%FzjUwtE!zY.>h!B B!&|!Be6B!R0 +Dܮa B!+[B!R'VB7v} [B!^q‡BV8S‡B!Ä!B>B! B!„!Ba‡B!0C!B!B!LB!0C!B!B!LB!&|!B>B! B!„!BeUz6|||B骫R!^ \.+!=B! B!„!Ba‡B!0C!B!B!LBDKH6+YF*=d3|Mb+'$'$}LmcM r PFYuFvsq_@k[>F&&|W5q@&/ 3i',vTˌ/l鹝7׈/ll [KO232 G^ݿf-'؛hzuٕ3}x _7Lvx@Ym8}g~.v 0gp6 |$Ś}8v|(½|Iַ_yS';:xdݟ]p5^Pߔ6eU˨#RJ%9cВ\J褂' %Z5t{WmޛOq~ ~QB/~l^4zmjkCz{SIո2$f&37 Fo^7׮~%ˬ/׻]Y}/qiP{Ru%i{%c' d\ߵ&nS6o~x@&&/1vۙbRO؎;p7gzmgvmNP[:p.ϬOA9-7xD61a2Sw.MSo9e1k3a1&RGf8G֝à~߷dv8}Gds+?4J]udPmXz8OĀE ͦH[2WyS9hjaGϨh5$T氭GS ?E+{]in9`shqn6SȧF{x3ۃ' lCbAџ {OfWS7# sgu^@2nggJ_=s_Z9-pY Sr@ΦdŝvP@\+,K/y2Oj=gӅMjY?^=M|jܕ cˆ!]-6?)M3p\nZaN &0)yyƄ!Vu"ɸO6{׬پ(„ӽǔfu5:E17/!/os4mgz.Z1*gkc\8(+53/=2cNo ͭwϢ }Dl )J\19јαş8Rjspl׷k -_%nǟ<w~[-|z(޳xZJڳ%Mo$eWpA@0j! @&~gR~x^yפZ*5YYOȺ/~3K' %2RI3y!Qk2 ى c7_hѫ'5nm9׮9#[ %Szl^p1;$ PhqtH"WM!~f sZChߜXH 7eմVb4}5CfFcFxA$yY|nοCtMzpF$ż7 .@X:=KQ rnrx"|ޖ0 ~'H;R8ŝnȮD7NYAқi*'/qV_>'_x,c߁}ԧ[y)B.`+WJ5NgHn/IAlpv7Yڷqu"!8 '+G/Bř{ODt1n7'2/؁@mу DU?5d#!HB<^6 ?!Pܖ c琺iѵgڹ[&^oSs%"Q+],y'!"Qq_bXQxlyJZ%ɡBacX.9wHPwй<{u( [.- rnQiNگ66s¬RfԊ6.,{d:|@p7q_bI?ZaЁiKEiǿO1O3U@8GNgKjBA[pئ*myPHz&H$I IRMRc 8zpLlجEJh @ K!Is X9)9d>2&: ]zЉP%&쐈w%+Ngc'!T?$HLttttt+wgʝ{6E@d8xn7vIL& R&eVx v~][$P wW) ݼ"'$GtRt=4dLhI.hjqtfk '4t^P IDAT\'ӓ͓oeGnvdSz=!4 G&|犠 M2' ]LyύRxnj2 f{k+.tC9:EedeTn\uuj[?xGQr讍k;~( Le::ڈc,(@ 81DT/<vqS}^.ok^9-}\()jc3wA4?~ESizs573Pzy3RrϻQJKIۅz 8Z3"+{˦䰝=gh6W[Gf[m}jAJZ)}^K- zVJ~=<_{rsDzT!wغFL+B<~:qa0'`ظy5qsdc B[^ԑw'wpvh7FE,F!(!\,{=9 T7LYGׄ#C1]9 { ~^]EDwn O59MNGNuOhΤN2"ZEZC=9lЙ#}UVt( [*#lLf`gG&έW?_LPnhq56kRX760T ZoRrq'ᮊ#I^+/Orm6nҪ!VTR"8+lGwlµb,zi[,c6iY/_MςQ/uCWgm W [jcc`zs L xoyҦ ؾrCár"ov,yǷnO8~N"F;8;bֽ .\tʒ]^̎7ic9{0L``dLcOMgI: feU|vlϕF-tKG~Z8k[Q/n1 [lʐ]y)YqԞ}idKe$G8mndf_ }@夅ȥCf2P/JeI|wC27ۄM U\μ ;HuVk)Ղ]Ehj7(Bquu}Tƀs?2<Bɲ6\Yo8vzm_p>|,Π)OVTM;yXf:f=w䘚߷VwbT_ qfyJ ˸*^kw]]lp|;n.PՔzܵpk\Yf㔥jn9X?-X]?IWXNK t6#LuIO,>>\.?qI/ڟjۡ붍-|ܦ5߼%q;Ԭ9tη8~.+`?h:/*=ҝe׿4q锔Njkm΍.\jZZŽ{LOQ `lll\%)AV%|4nbNSVܬ횹`Nbqѫ݂AD7; ;s[2"|0*T3ʦuLIw)r2f; S'0k;;R YOͅӍX]򦥻Q}Bؘ(̻ra@]i]Gk@MȧadN0} S&snliY􄘫.[~ qhgyfBijd _6D~4%@Mѱ鷤h㗩)vN4l5г8Kv/We@71 LqrG-rW}|>>;? ewp2iQ^d֎ik)#,5">=CCT@nDV_o𝷇ý'ݮsQ.3ƬpG|Ea6GQ&wcI!I=څʶ':9J">R"V78J鎠7sJ:ZO,w,-\L}fv0nRG%(#_t3&K2LQG=qТiz(ꈖlHhN#)Ury:CKLY4y:PBJ%kXosw{S'ϐo4_Hj=ukǝC14e_uub;cA4;>ԫ]4 g\x.ý=gmr{:#ܞNSpov$j}jh 5UZ0Jiu;N3wgˡ~> t߶ߩ8\\}&em9 ٞȕӳEcRC[%'{2NMlS*'qX*Tz{G5~^Sź]`an7 L]k,qJ6%TOfp}ݵ(,̍2]cгRrX n͛8PG;.;ҮKC:}c& PiͿLsN*J `fkzEޥҴZY~UXjψy{\vcVݝpYnGH/N[tٶjIL*ٔU_qLH͝ҕܻ3VͭVW]m[yiEbY#ɯѬ6^&A,8 ,{F빕LJATRGϻt1W~.͊mggE!z>:) ]lVV̻w1TLch6o|pwz+d=dc BH mcʰ2Zt9+OċKzy~x[oS󋤰JGso֓FACf&vf;K3g$+r:Y )3gPSZ> 'XA!,[mX?t6e[u4'©Gk' cmA*rj%z|>94S1afMr,t1چ iz!KepVH,J m#/;xhu>!z>Z<ke)G)(>8+ <;N$㓌LeUB~׻{(rn ,Aft3x S_TJ Q8{\Og4b݄;PO w'm=3|0Yѧ!W~,9ԔGuݧzx>=Uq/sv2#Ӳe^/9 V vߴڂOv7g^0ZRUy=\ [MqUs&F'i,(F\])B zۺg8P'μpgoÐIrB!gߦ Vޕ/j%9Ы1v PgM97|Dww+'3Nz98܁z; #;N|.N6o{01hd`',MfѬT]rX@Vn}r93y05eYwhBmO%7,wDke73 ra H)[2^9<ЅIKvl#$u~F]udP[6tO4ȒmWj/zG;o:kmKW%q~8qڸ]8VTR9.R%tܹ!m0ycdRf̆P5 ʤlCUgM):VFLreK$x!@WLN4f}slg+E~}l6h|]6P쮎_Kj{2%Szl^p1o[NKۣc?*P9I=,Ȝ{N f#S&'Lhθc13GG;y:zk"Nm9,eu+_l~KvDP Ȧ|Qo}sϟ@~E~S0pBV]+jbd c-I]9S.4 =w9 Z_E~S+Nw%jԼ 5νx)MNNHz(7B{7iKI}7y|˫dEsOϮ_~tL̊4H۵^ 2xOB᭧*ZY9Z^9_ D! +RHP[-㿭VAtԟ7rU3(i=5:ٝ_NU(S ܬ2~ٱߧOHSy%랓=wI{feĄ!+vlfiC Y::V`TQyBl sɱLF,@HTѳ& }5D@ II Wfn3._$ mv-e) o41 Xvd`EhJMCRK ˼[0iIӥ4 ȱU HeH,libj# 7&-,ml 뇲,jjKqNR|eP_݂L2?ؐ=2r|$Ix'gtq VAەc?74@'E†}DZ^!%8n`Hk chI߳ۈ?!? |5_E;F1~7Ѐ4Έސ[sZg.z##I;Ν?6%6f}VۣZLBy1\Yv\G׫)Yݹ?}}HeL[gbRnV㋬>RpoIB}ɜCY|wyqWea[wvo=UKϿzæI婫h -\=:A3nW+WA(ˮc1pBU];[',ۄH7,9Lt7_;2[r=yٶmw?7J3$X:pr'4t+z}Xb_V-ىY˅{"nn6 |d@dv1mC\3<>P-Gˆ;\"=IGo z(:|@?eVficC) pH$ M("SMI ,]+/q<ٍ!$%x|iֶ.yV^ :RT-7ݒx 3R o\gB\ms#y|POoy2d Ay et7* xB#Ow&p7uWۙ˿A@D;lgЉ #94')s@{2*RBlOɫ{s}u+/[zv#'7MƽcMޗc?zZ&"ֵkDwAf&e~gαu=5sDBֳǂxt6"@q9l+$j;q!&|PZ,w cCPnCԟ->fu:}t!=H+z{LZCTiٜ.##yɳ.e㑳-]aG 6=BPzx?tЃ[&}V/}ER 2s;|ْ~.4]=W{d4*q{Z,] WyUQǮ(SW;L'E {6VO֔VI"τلk'G};ĤiZj6w?'k;%VwdŭS2wx^PȄE.gowʒkmq2*GۺR^no r3 UT//4Q[z IDATwxSbڸIXQIPPtmž?3D#rΧ5~I])JB64˝߶Q>mOg2^X^祎NuQo۾3/N2>O$%雿Gk8멅ؙWmS,#&l:֝j0#|ӎl4Ty b1kO7~SW^KƸ [9/)d& o_/Qqogw.ykΙʇ֊gdD;tL)"|zrﮈh6!1xYCìړsZ9My>݄eaޓݕTi,N%[tf"(BKŇ/sز!ke1_wܲ~%c ywoCӲAt}gΟ}8O)֒9y3ێPPhl:u83wݩUX:fӲ B=_ZkM˂BFXW&WB>j?͕L1fxxΓv8ѓ̌Bɿp'?=?ط;ߜ/ι[!lЫIG;{ Ba‡Bb?H۰p^DnKD^>!֍ J)Z 1`Q`ðkU^StHP}g \̇g =;jط"8BA׈RIo\M!#'BAg}Ba"Erd{zh9&6$>'! KLMߝĊ@@(RzXiBe<mPw҆7e6R*2/2n;|J;'yy @Ha/e(*UhV[gVJVmU-.T ATe ,`rk{?޻{s3FyL']Ǒ}GlQw,-̠+ߘ<0ݜ[]%a 7ӊbd ̇Я?'Vĥ|tT2?{ 3@3G?ʯ?os(- x%y{e,g./#C>Pe~]޲:!{mrwcl>by ^RuMǮ=(ǷWhYw?2ZzW <[NTiɽ>QUdE^c}VȻPnGIQB K@ ^/2k[~z=JUY [>q }Fէr\Z–߫Nq0Qy?K Y1;3nP*$Ti*A}@ +ʋǰ ,l({'k:KPgKnpBatK njEUN;&Ӱ̠Rx@  Ghʚ V4NZx ӳik7,GC ߜhV=LTa$R9x[nxVQq챮m,'\[;$T@ {ÛK=0r0BA sݬ&bֻstK⢍DŽ s@oG #j "uPIf0uU7^hOGP"Sҙz2,7MAQWurгKF7'>nTyǧit @ ϋL"ui&N4adh j0eA K: gcFp'MZ @<go޽z-Q)|¦N[Dņկn Jvlw&|fj >xLnkz\ ym( 0m]4%aT=MA^Bu/%;lߘɂ"<]rqwF-X^߻H> vG^%g>VDIyA| 9|̉t8:NLhsui6ј^~_Ɓ"4M Tvo>K`g͜בO׉ D%>ӿ_ULwps$&dI캭6xL|D]b Qqb3 fm}(}ힱF1+rVMO @Qdte^{R*;YHe:nXNԔ:}OB _굋jk"}Oum0>í j iEϹk'8[hEwrD,H!3HeZ A H銦i͋-g5'ճ釿}Kt5ݥl<ęN>]EZF [n;TPYk*?cOrJXj7iխ=G Ȋ-kF2]R@?`~%w1Ruwv͝8Ӈo7wo7klZCYه5[; TF3 ߿bY}W.?X?z5[G\4,W`KɜLǔV/%|[jm{KO243䋟'~m@ H!@ RA>]<#P%}|D{]973>QM6ِ{|*3 nScx'F4}GXд+ӕMCOrYǷpU)IҶO^@ c.-A.¦" C!Y[V :V5"<ۙ:=W=:6W޻UE/Q J@ ս5SD㩗ݾUIR Wzoq0:]q=ƣ&Hrﴽ̪ z;^c9jڕ=ܜg `ᕃRs'wwh $Uwr{mBjZ0)Mӊ fg{SF{3qSgT}'|4çr昦$~4sքoݕ*~UͿRΆeg%]V;urqmvA (`$Vɮkº))Gmbh@ 82jebacjDrPʆn|<{[`E/ DAN #ȇo*ܗK8ڻV#,6|֨u7)VU"qzQul[ !z .]8~ĆlUo'a TYD;3,m7hElC?λ52 }fo^kƮX4mreK+Ye 8?_/ؼ(|#/|_={ 1뵱~Z9E?VskQ@mv*?7h쌺<.rk g0HlTzUܷn|_yQ`^?.3ʳlsk~ӻ}\a~TWHly(-Q^Ǚ2 A")Ol-m슥Mߟx1 }sѼQ,Q8nXVc^TZq̙? _lݝ#9`lǡLosS^`{{Sx99[ )|s'Z4}w9}/N .>fL}w4Jf罈m;oЦ^M':k_!M=>෦t ޜ1آ8%>wIWb!C=9)?w&1j{xNf.ݏPqz *S+s2`ʼTR9GPeX,mxg'k,eoƭ}-b-*ugv"]q׀Cp οpa˚;̩Y*ܲ6C?GwN%H50ښڮ0}m1PkD%^c_Vp)on6[(H sR!x<#!-eSs&aiwO.]),{zR]G|s{ϝ pKXnx ;#A iͥ770a;fc&ܽQM=@~vGTjE䯶OƛҲ䭑0}Fk'Z&*{CTv᷿=iZ|)z {`ų?926g2a?l`հQM}>1ckw(ٛ|Oh+ֺw§~A%.N?0ήM̜t4iQYNov*MۇMJN?m,jgؘ%WR]fK޾5e("|CArf:r;9 "a^fܥБBnSy70`LQWj 1*- U} ~{ZnuNު(}an^q@/+ƭ0&a؋MODl53c#WgWq{/8枙Tޑ5vEE?2@1v~Q1<1uq㧆;V o4LVY41nsymɥ~7.YW:QWTsU⓴dsA驘2ڨcL ?ި'"oׁ;>p.cP4iwi &h/ݎ }6\xQsj}'eŪR"? 19<|^x6 ?ؘ=p@{c=_̺k%5̣ m^V-k\F3$6&mA:LL2XbЉ*zt\pFw;7g]k{}Njy )xxͽER8w`ktX>ٗF@[g&:P#lѧ/ x폞y`JM^j l(Gn\[F(Uil]`Ue6 $YXBtw&gUeeu*qfMf?|Y^4-Et+~FZWѵ"IaiV\ԨoIT!肏jf!0c8~(AE),!)o J[y蓙P H=gOFi'{NVJqyfJ]qQ_!c^VGxI|ˤL݆9.Eŕ¹AN|6:$-)@=[ﺅqޘoXxShIyOQc#qi][1w֜ IDATyߚFR|n%ɵq@<G8}uJYuw0,wo} ?c6&,Lx@N2vnVVwH .)3jueOI r_;{t/vWfN >xP?A>x-7rH3 % ym( 0m]4}HpF.F4:.-O A>v<3&ݨ RUL0Ay,ܼX psh*K"&$x̻z"D@9{s&Wr_QR4~0,#Ӯ^ͬ%#h@7FI 6q2f\s_o ^Qsn 0]W0'0|]bRQe~85h` ߂QIFB]y>;8)Ͽ~)P) ʿV`߅ˤ(bn9},DS k;PF]T@J"5V2:tJ;] KTU'͌ЈUj@w&*Db}4dɦV#.O]UL}{ZN_UUazԞ:(0yUwdV}lX Wȫ&ޫjLDfU9V@dyVn+++Ki KY\ x3e4kC4; [`v.֗b\0{ϣl`s^,эK'Kzsl zb" ?h ? 5R:_`e䵕[p=`y#az9fh23\Dr3hpt ko{y]0(lڣB.pD6\%=xؠ1 Bק\:r^g3H2쮼6B91J + qp[KtO/54=#lԅzA`}JKHcǧ.-hd: ěR q3(tHwlK`AP&&;rP?g 40{P%u^Op k{v/QnF͸t,Wndp.ZG'.PTT倈zy+6\wi-p"E5UT@ϣ],3zxy4˔n ݝ-x37WȕJIEFzJ`ݜ\^ɺ km.#]Y!Q*y)I҆|ŭKEU2\piۜh 'HR--+LüFܭR^q/1[vj:CS]GQS%6sf@Ow*$Jyuƕ5t'/z%KS j*;GLF 69sf79WOjR&*RDup7,!ʽY ǡBg@uջ^W',PkY"R)Ⱦ]4c?^K¬ )ɾSNZ{fC)ǔ.k2QY&5bcZYeTQJU;t&iԖTȔJ"fv**Ք{:qiZGq3SD9x„Kuڱ<,H|GN&N=C-H!hH1GH0Fȴ`hkZ`beŢL}w@%Ww (V~L^'Geq9Lis?V6*QR"oIRa9t8tLԘը5T . 6VyՅz☝tkeA _&tJfzu.Fi àSoPDgQ<A><%tV(oS^+ SF//xM wVWTt}qVoNHy%<먺DYUH枃bNHp0eGWm]M*c)\HRwcP([ۇjLTF+ @Jj[4݆01:&!ؑhv6 s0UM|R5 A&=AWK``P.܆,N<ed`؆زK<;#}>Z+f:Xaw{{1Nv|A!]030SJR@t@f?,VZ vVQnjI N_am}pozM2Âc҅{3(uu{qLʪǂVuFN^-SԠ* VZVeBÒCpl!hR5 @9Ԥ %0 nCsP0sKA)?VVMrDR +Ne,~$,1)iҍ:aa捫O%SNFD)C #ȫF^n:OBn=IhPW-[{pV.QP86­/s{,!;xűv l8~qs!.-䚒J=֖޹0YCz 8,߀}5%+maӯO^+08@f`o`+.oaC0P)r ӆƇ0^! srZؠ-)׍t0-. ՉJ@St)MӐg2v{;]v|հ#I8PbiZ & :ślcu RYt8)WTe\1th0ZSeЉ͉:D7;ɀz(]@H4Akq Edcf$eYFYSW&awV]=]$>bz8 WE οi/]*{ޢ$Tth-Ƽ֭u Y>xA K¦(.ZE7z&$|dEU@ 0޷8eo6;z%@]iB̌7*{EA!]xp۠hn;PK.t1wP0U(@ .@Z @k֭[h T'!555--|xIE@ Т @ $@ H!@ ^Z>p8 >om‰@ /E @@ @  >xSL`N4+ru oZAI羊@axyٙB\wwls&偃,;WGH!*,mZBSfl&ljP}vXia'/FMJb" \^y@_am:~޽{"x ]eD[`@1tn[fP&bZx8wS{G|[0dXw1I{5KCAJ3ˮ%- [24_e{3IN~jz6+mXgM|kpȘR!F'KU&jL޾|J]GqJOׁ񅴩4%!#H!D[P U`_L"dZBdjc_mbVT[6>^[sP4JV7T|22hvn;W[v_l)ĞklI0l$ohi #" ϠJw?2m95IqYb9 Ppp kza?1($ >huYqkF &HM]{vPUMF:woh݂]/,>5 _d5{sIm7.:,Q'4%u-.O5RH%Ӗ i_Z@)j. M5թqH tCDPJ.|WZ=z_:A,9O`:v7ŒT9-4@@ղ߇ty}ك^.:-yN*T] wv3H/IPg;QHAhGd>ժs=ޛmɚV߰"MJ&-4ؙ3FJ\veߦCֳX 5_[Udu/ϟ_zp&E)= |;Ea՟ ^&P(p?y QVC /}U}_1Ӕo°P1kˢfC i*EtSA7*t]Yn[ C/bTgNx2d,uQ wwh8yn˩ܑr-.TA-CuC1] Ҩ£{ue˒+,Ba-gPݩRL ?k,ۭ' )ؠNy0_:JXLD]޶ky-;;?q|FVx͘{/MY4֮ X ޑ$BAs gݱ#9ۤ(3B+̚C G ̃QnSFOnLew6|Йox1q)]ctUI߮hъ9wՂ@w i"?YdQ)m݉GmbɂU.3FLjwBQ㚏J v|ۉ$Ŀ#9#6.hrZueF"Sܾ[vqk xbVJ֓fYFFQm%g'M|23$%MQG7>X#ܩ.WܪMbkreQ ۳iGZRQL*JT9)F\ ҠnpY!NO5Gk{~sxyVKuγl&/ox;.s2;q^M1=YMsѸM&ߩ# I׾Lw9fT߁e1_-Ք&EdYviZ1{FH"1{E'EerPp峬o[\Y[J[z[r p{t.?4TV}JO`$`90uP(oWJq #rӳ `N/H#FL(WɫSⲵ<n+E2aDξRj³kToB@/Zӹmډ "MItf>Nl%ڨLi˜QC&SsM8IB.: :]&VaH|ե1ZEc<dQM[-m8H;UFchPI:`tmӸ8%I֛ \uy7f.pXT#H)kH.hRuFf*@,7YaM#OL[n~7aeFB!N uBqiN7_yq㛊SzdPʊ{Eu_})|RoU7yoaRgnrAWߏs`;3{9N@}hz^"3nNd@ @ U+ڲGMŻX`P+4S}x}iy3wr،4(^ t@V,`JF-JS -/l IVk %>TD0"$ #(XK51YNd U!gDuqTLRy>\s -5!hV̶K ص> ɴ9;T).qw"\IDƾUj[o5w*Wl&'40Mh"\Pf$|'tF<' Vl]~7+rfb;tzSϫgYWk-om; "N"y.(d}PecPvQTS;틱l`h3t5IT[+t+v3=hː^a* cAZIRFR1]0SztYlPp1 6սO]٦YgcpGؔHJxؾ.WLL)#QineET?a;ԿU90F|c؛|kia5IMQHuڭ֠)#^=O+yk8&\\m6hfx/')qUBLav2lQ >Z`79"Ӓ+]"{ 8+9qw1q}mP8G{Й8pH#8Hq9O 0ފ҃ZC|w6D Ç@ ^EIńzɇdlM2^<^"NY̸kRUNY_Zk h~|eIX[,;%6Ř1)bAP OF4u_qBGKΝ|؞.g2kԌ. DcHMG]ml.\]j8^8e2#K;M%H ڛ $`X0Ɠ꒞B漊?x,]\DZKm#VbW)pJ%f~3?Z?F(.ʟ3s&PwϿ5b~w 2M 8Pw>z蓣b8ɛ=eW2A)L+c7=9t*_IF±8)7MdRtjyyЧ%w+.< ӟ#-8ײ)/'ϘoŮ\SP%7#˄a_4׊ l 1&9Cv?$8"'''T 9н{w 0X &ug1n{ogx]BS;iWmGJB*ʜف!ϭD e(Q#I@<'6%ɜ%,@/E5@7As`yDjOd*@ @@ @ eC ;;[T ג_<TC``L&k0kvHmIDAT寿TR;wJd$ѥdL +D1h7xΜ:lhVW~]*eIW+~?-~hC'N7gUtT0 L`[vY~ګz\ƀo҉#^_ɝ{8SWC%xm!Yuy޽7Vk6߭5E]JZ,Fyf3:dw|Y)y61pSވӵ܁r?:JJfР}:|Nl*ao(+?k6:~$FqĿQ*;?xA|t\۹2FU|t$Vgu$`wyXܳ:0^8rg VK>> OogFCV-ST!V{ {oW,uQƻQ .hQURqZh;)㍢F%n۔lkjK?! Lox0܍m0[-uCQ,]ٸw@WOIs`ϥ7>O4|}i Ԝeㅎ+qإK| .wd˶㸚Lz2HRP,Ih.*lݕ1çPz>уǧ/+ .=X3-PSSqBE#6!ɦ 3 Ew+7es 7_.Wdr6qes Ok&wo5\ÍC#|mV/joy H͋-{ʒqy(Ҵ;֝нS__ۃȿx#vPҝ+`Եc&B}Uz/kb/ ?gd;n+ P&59bk:k5\ :ebeKްNaD#3׮uydG10ݍsm*a8rӛ{^}X:y؆O^_@סk K=vAiU7mUp ]'q/+,祝feDrk?bX=2ƧR Dkj6mQB>p:vݻ[: {}MޯwZIuwSo?akrm2+Hee[GiNϳq)\[M8u_~?pIٍ,GYG\ҲUKS.5d -ƶWo;KnvΜ-.MY'e]!ݎ󵷶al޻%Q r­kڌf.e1`$ QFݢ NMcr\n0e2\׻ǯ5+5 ̰{7^i= v4ܮg/DNWF}!}#xM/&v>\R*q֘%ڋk%=h|_㩘NviH9<%.ՊZɞ?ϾbѮ>UW5b Ƭ]-cC7>KS6ty'OVmy^\0)NH.ps뒾&u:hwQޑ7D%)lHXYn*兄mUZ4 8,Hŋ/6飭o5 >(=3Zwo,5+2~қY*[&[;sͽjnl}ӞXe7n3`% PŞ#=A>"aNJܿW7ڊN.}nV_V<2BSzP%dA,IwvgV?L)@u^Z1i^={C<;mW.CM<>4aPsТϹmQ#:i6S[({M%eŵꮙaR!#ŏy/enk=ܫG/+ڝ5o֨(E*O;3! ({t?~KqMQݲƅb)~w;*I\dVԎ(^JP|0N;%$T:z]kmj9P[QЮ%6܃芿|g*ZFHe+ۼ( H!$S?9!gkpr})O^B420ƣG熷8'|궤S`cpx豲7;!&$TJ}YGKe1 +ō!>Mq / ^P,L)iȠS=݁c٥3P]Q*[)\.qM,IX)xBÒvl}SSkjI#ԓ`Flg.v ۝ຆFo:@/ano6/ ͜z> ~V"R3`9ʢ*\ e5}zϧ/R[ $mz21vf<>ܾr;'֧`فd wdԁ : G=A0jYܤWSj _VyգϞ hgLUeỤЛ_]?=$I+ B2me1;vmZrOr/ԡϦFp H;W:iq G|wγYj=ռ'hOo_GG?zg鲜2x8d51*.<4 tW{_'_qk4%[ޖ\u% TT_Fϰw'3w\IɼћqAzkiv>]Ot'{ xŝ9?=Զ,DDdwH<&;6+tS_d =0r}/i(]f#kɈ7^wkʼLꃥѯ-]`0}ӧdu^B9۲3G)D zݟ<6o}ڲ;*s ^5VVcҜ:uMӒ yvs]vOnIIDʔuL4"g]鋇*:fQ[y,:wkZ|q);mޱ.EWQ qtu[l˒lQ%XsbtKiYr|Nnnnk.> _ˁuiމg\\+wrfz;4D"玗<𑼋efC+ ʦ]YP/X<@>@>@@>@>@>@@>@>@>@>>@>@'?{0IENDB`pydantic-1.10.14/docs/img/vs_code_07.png000066400000000000000000001230461455251250200176420ustar00rootroot00000000000000PNG  IHDR?s2 *sBITOtEXtSoftwareShutterc IDATxy@u333 (xfbՖvСJS6ZMVKq.;CKMRIc`yfQ"0=q~&!|HopYs6xrN&/~1aGrG,v6ZA0*{PtH6g.[Lp׀j*(`eQ5{i\ͭ&WfEC5;ZZri qq\s+ ^0|Xw;M ,56gn%fr)1pڜ|e6] =HD~)wO|t3RDžmO}yVr?r3IS|st}ȹc `qȦ.f 8Kc52]iz܍n;םbWU_;ÐEޜ兛~>T9%}=ƨ88\՝SV=aw7V_zU+i!"umơŗJq3\m Oxt6_w[r/DHCozح_/\"xC>@%f`H[鍽5nȜ<Ȗ7mD$w|JvF#4,_e_cfH-5;߽$=~uvuxb[n9k,xuǵ6VyC φ)w$l~7V.YZWEe&تsN8ciaI*ۢ7m-[M&^=ז$FeD o*9r};kiDV<] ̿ԋ셟XsfrΜ14eyJ=CA[mTzo $+a 4<> M{i응Իꃗv`&WL}ҤJ^sO=xϽC%|kP^ ˘$]1CD|Msm~fѰFD܁m^vi5;Dr=RzTsfGe46ltkuIt[ 9m8ޙcu!`Č~).":_ _M؉2*o7jǧX)mDd,/kDgmUIUcls|-tR}}`:v;|6gco~AL!'Cao͢k7ԋ⧍o'x׭čS{y)#c ++a&}}oN?iȒ=Wt ۏD-N TdQpP2i,Ppoi;YkEo}ś!qPZ%/Xf})C5̩~9ȈsJxuɕ+ZZ(z蘁{x),d~y[0--tJ8[5gNyTCs[is=7#fdrSE{OA >x~$">WKv˗6&Gh͜#Sf XGq3d=6\Waoj- Af%L訥&#+q,69qCF\n7Xscv<"q&6ah)}wX!krH^ojo{ޛXb.Z"r4&᫏4ىL ^>a=;]DTVSY:/״ 1]^屝X%9K IUE?kFjڬg(D@ncùm<# s6TU7.M~=P>ǨCt]i{uq D"B=,-k-OLmz?kok,:VwQ-S8cv}Xa;W*;19? 9!uW7D6Gdm)ʚy|o_|z. 7543Vik98fdC~۩њSM=iK 1y6]P2Db利xir?%\4/9Kw5dݫI ic{]_0exDCDa1ôӈ'BMrr{gNKX%rF}(,cKB:9oQ,9mY*^ZKYk~%w[ZupvmyvR p5Y%%D$ Ć7PۛY_&xs=yq6Dn{9ޔms[mF8Nփ3pvƺ9ݻ`0(\Gf)\%)\%LJ4ڄhYH/OT5F 7dydXkyum&*a<9"DǞطli~3?uHx8juo= ykz3qX&LzqHo0?Ejn6ջ6D'xFj16|WMq)\%yW}\_V2,>js:^6>.CD#?8y7nwJAxxE"{<8 " +ns+qAp0ߧ{lW݄ͤ@v5gNK֦h&c '`_֚7coЃ9_kM      @O" ł,9g~YQ{L{U^U\p[۸}tv; ޞ5 ^v¹\r%INV59YŵI5-D|q=!Ryi @=pMA~9kV˳\hQRp)muP\МxgST)7;bm@Ԭ mfk۬%vҰOD>lrj\`ο͗ *ΝP쐵~wM ޣg;J^]C?s8sz׮ZՍFDmu[/o:NUʝ{=bj'&/!%;_HiC$\wqxKL.!Q gh|Kn_ٖՖ.rd }AXʾ,>?4p~|z\)Y-e=]hČXQnFYqՕy?k03Vcߋ/'Qæe7va, O lUa3O>T>t$ duv|tb1i6FD.΁@"J-MrGKgtN_?ψ8rDig^("MWXqcMQU1^[Kxʏ[;F|53c'&T,g\AHD^T _[TFW%R&ϛ&VKΒUU)g͊(VS3<8wbp|Ckw;lҝ)%DG\hq\&t"wé TPWM"VXkw.o641ޜaH4%q 󶗎0(bZ)s*'D"w%NL9DLN շ~k93'S/s^Ͻ)f[ŤJ^99T[[|tb֟ӍQR'l=kYRFK=-9ٻr8s* *G_>u(R';~h1RҔdp{mPTbB'{X2%XH "MFVDGY-:. mr₇˛Ȓs0*+_)U7TgN7U)CGĬ?_:RH~W$nJNy_J>,#KNkKIYւ<ޣߑsSvuVJoAfȘ=olD u%ZnꎏF]2i{ X)eǜ6lF{%UHC-M ;f X cRx$G鞺+54BӦ;oaC>Td&j؜28+tʊ{M]T<6J'&Hj:,n&nx+#yǦSklِ6rB֧ðR%Xpnj%jmpDu݆[ȹKn!"Xҵ~BiWCllDM5<@g{K|XW#UfЬپHW$"2p$upps75GH|r NoD۰+.{L|(CD,ǽGĐGpwZK1v[j e<5<츥d1w$w%dtpwgs[[Ú`ٮ#Rl]ܑtF;x+N|l7:@J|gg>{mUkgM뻠`IO$Ѧ&fKgl y֘9K[t:SNUg#":W%f:fgVxw7IDg錰N]WG.XUJ=_Bu[ q̙Fz8H5ͳӿpۯ& 4;"!3D퓼DzV7$l<^"*!Qf󉣹e]z[⤸Ug䌳sݥ>mrå3yH_b Ҩ.<;}t_ fEò'Yzi־A)4ic#'" Ēҳڨ$u;mL|^t ?޿$"VW6o+ 8.Kr*ߪ 'K\$l}z!/$TtU\)ౝ4hcf!nTKvoyF+{L \#nƒ: ;o5r|M<_ַ=ӦZQ8.gz#Om° h{Hzzܫ+D"""-QE*HG) 7~R/\486z[y㵙y"0-!EN %*F+v]S{kk22 ER9Ftx.<2sJ,O#QcU jk9zSP74#Tv4|礤GO;22鹠7J#C<{ݹco؅o~qJ෸OǶ軔0~֠a,mN*;}0?Z?=Ó߅ݛ;' K? '#Gɚϖ54 D$G;9Ru[,'>,cϺ |[^̈,8};iw}bhGy"t*yƒk[xwuCU͌N.ڹO-dybHO>lR_4wP{qS] *)^;i7pVcÖmv9$}Uc> IDATu93l>3fYrUWXi 2,_8oGRxv=֜g%uT*c- HӀ1 uZ ~(YnG7|=HWt]ՃM{@zk6Idw> SD7ܳ]/*Hl]@z\|K/'~_mvty_W xh{3<}}ǚTCRXP[p9Nz_؋MpE7>9meDUZ3+E ±ێt⼩cDDs%"X]EMuIQj+0N?W"W)d1ZnYLL7]&"TU"CQ) 0DD&"^<diSٟ'ćrտ[ @Ko};e=*3}n/Yݛ޸iDv/C+ysn>M S~mqA#Wʺ@Ko)G}M$Ȳ86_uՕ"T k&@Dq'>@z;iH\,K%rCoMwVhМ-+LUmzsֽU+sI^`y;0:8%>8eH7~|<}~+!^E_%=I~)$qnĒ3'kiѿDvZQ.mnرhր>g.(pX[p57Ao$&/bn%sCbFJNojg{zGE*Tjl[SEc/]ocӞx0B؅o~q;R1kZhVV'ӥr?%˰bapi])^/_g:"8ۻnnAH:nQT}iKe8KrCjܿȧy{}3 ,޲[|ת9~b(.j7`$̇4r/j)Z'DU>_0exD'\Lttt]ՃMxK=  [$O{UZHoWT^[}b7@/jt ]C!|~-CB>\/&O Ɠ_~r UKD~QSo]8@Rk7fJ)'w;.!"r3׶~ 8ߤV}>>"L;>:( }Njր1a^#"~8Mp-7"oPwjxIKy~œrD_:3să-9ȷҲZW}G[zϢןOEI\9|3#yݩ Lǭ7w_ʌǧK=-9ٻrH1!sCsnVM ՋPST% `"&\ߡY Q5;"5TјKũش' 8Jv7^8]uGy+°b UgNm6n3ߤpkftԪnV޲p}ѧ_ss."-[?^(%wG-8/|UeHi^#Mˎoz1{xܧ2xbnMKItVk^cS7nofk}ᖈ:ɡjV;`\т6l3]oLx춬(3Ѹvw~Nl$Nx @O"_x7f=VO+ -#+'ldo>DZ<: #ު?7ŽAV+9#􏪀Hw[1/>_/s^w\)gNy[h9 c'??qVzhi8r(ov@ecd-MiH˾nųG˭$;|ae 9ܦrRN;gEya";LUkj(r1 CD4{ql"u+.sH=՘,}rH|+Ww|뗴W̢IucKW/(kV۾ivQꄌ|v'"b IwHdx ˛~TT=-_n!$!,c慠f=:dգ5DN~~ʢ՜֝Ŷ."J߇ޘ|}ҙ' A-ZJ}2C#,1Tҙ`ر=Zr;,<+μO<\x&%P \Y^ 9jF6u7Rst9#܏\M-$'G}eVq7fe {pE]/'-oѶV myGqnuNKsơpզ7B5YTlv"}GDD}o-v=5 YV%远_HD{FߏnhPƎ$Ҧ]=ND-;df]u%-&3u֚YM<]\k楓,{ַ&$ߠo8m_E CUR<dWMĔ'G%Eam{0w؅o~q;qV1kZhVV'iuY~Ja/:M%稨0.ĚAwi4*͝cM5OԪfͽe2ZwKu×׏KyLdo7F wsE9woh M_s ?6Uf1vCeΥk|tOJ+}G*^oKӟ8YX~̓7^:F|O#r%kU>_0exD'Ѽ5cƮVm>r p /6&x vT>6n!.@g =|œv7. %M_Ԅ;L~?Lttt]ՃMx @O] p             @O"ł,2omh 5 s*7/9r0C 8?8"?9&<0B=#=!Ryi LG3^r6o8 0r7gy_Xus -fK#91%R{;@?}K=l ik;_aH_sZp)S.9J؈͹J6M P|c>+q=GV~&V87/m[oR= !I^?4p~|z\)Y-e}H$0eF%flƊm/UvQ#ʣw7[*X2 bʭ8?r+X9*m%K"bSN c՜tnb9GH2|XUi"ًf[tyީ,715۶s*,vMd w|$ac%EdnޘlSPzG 穰VѬT{S4nf|@F?:>AMMw*FfYM/וz6:{Xӗڛ3֎%M`჏K"RczdƧs wNz󍝾wv,]@z 4C)xcG j- uVw|l6r^1XN[ĊHh-; d3O!T!q45D Y,yl 8$9COoo_\vW춖[ g27w(QDTpv1SFoBDD|SeDK*VwtӴ WRfs@tWm 3Gh7$)6np{֓n"ORK+-/~qwk6ldʷ DNӮ꜡1;EOc;%JCJoD۰+m L|(CD,ǽGĐGpwZK1v[j e<5<츥dW^KuᰡІFč8%r9/7fURXCIwwo4`\ܹ5MgִYƢ :"EX5G{!C!:_竰ͺ;SGt""}g1n+4"*lFP߾Z!Q ~1g$NRXBt)Rg{M|~5I7~\CD*:k{[Ϊ]YMvNN|Z|Ij>Y+RGȔ&D+=<T ZQ On啭^sdmo|Lvz6#F|[ܱQ]ˑ&EJ<<)̅JDr*;0i|#"ϐZqS Ayǜ~JԦeqaTnBRQ& hr -?$ȧ6aX?TJc4==PUMU"ZǨ"D$ w:EgyDHhLF1 иD_/mXWs^scV>3"Mjڞ[Kk}S ê"2vswNJ@moxtHnleAFK%Бq/* ,Þ5N)X$5m)vD%jdUI;/tMlxB#jOEmqT31W51-ș}EDlxjxZHݼO|[4yx,OxƬYTQ_$;+G iD_֯_67er3۞RYׇ++;}*i컸)Ѳd韶8.ϴތi=a_ '#Gɚϖ54 D$G;9Ru[,'>,c׽yG_˧3Dָe1)s]')ɧ),X~BĔ'0h(􎙱>do/<]j ϞȜqƴ9J3Bꨴ壈m3[ MԴcC]B栕/{9t!_. µw@,gҭZ/rW-Y7g8ZT+kEtϜ6.v3.βM9HȲCOM?x7n$y Xu([{Y;yצX6O+xuu{t G҉hvmK[:-Y"P/D>CCG7NQ`/ޒVCqWk.#Qf>집{VK֒8!)ó'2g<1`7-%%N]Xln[ ~?ʝ_C1y(;3+b{-+s3 &w\ .[ZWN_n.Ӥz׆י{/ɼ>;RXPW"N[pN{_S3y/87jC!?yT |=ܽ7 PpD ?{'$70 a)d(*8ps/hVmj|UlVm+jڊMUeaI \B~!y#sss>sLƅE< † !I/_trRY&~.fn.<\e0u-~z dն,at.AR+B`Lli i֦CB-'ޑW!s;NϺ @7ko"@"?ڥ=C&r̟G.Oз_ K0Z= Fs.@|l~૾uǺ('E~Y&;x]k{l_;m휆6g+)s۩7_ݘԿد7:YpYT֤eCU׮v^󓉡w,"4CQi |X=Fr @ y_bov[~XւRw6BpwX0Cdߎ_g[gߜSP6ё嶤\@ q,--M)o(oRUKEuvM!*|IX_F~@ }}yT)?W\Ӗ!jA x՛sw~sE9rn˿6V:nجqHF700b"@ zk[q!lٓ55ťNjg:[O5ۋR] d{DL=o#؛|@ Roϣ_6MY=o ba5­^Ŕo1Z;%H߾\١7!@ >|CWqݾGS44e;@XZZC .^ث ޺_vx{>k7 O|qWȉSƒYwPW.ێ^jjߟNElSF;s>_筸5P@C<6"V gNQb4cۑSڪF tʭ딙^AAK; s-ydK@wRv#[oٶhB+.NޱK.2daa#+OC wc2\\&N6P_s) &SCZc-c3w1MOM<|F s}`/QYf'D :$t6NjcHC(A+gԔ|2 wN;jtDcW><[dG-2܇[vԒ1ɪã+ANwloM'?ok-7kE__mˏJlvnN@q X6ɈNVTje˵X^ >=ČFʹ ՙ; w=*DzutyXT̮'6!U 6.,>X#Y"SH5ed]zOV8[૕JN.|goWl1-fkA>uPB#qg_#цgJ֯OȻ3%x*ۣJo} F1n󻊽hߪy,pfذȧ{eEJyfܺ%h+&?% @wsl!_~2^Ȏ{b% ?~(Z \:1O47BaVsBl$cĭēfm^ sU?wxoٕ,n @ݫ[\17:7ד|Gߴ*jb36hp,qqymEuٛIco^  oV梵bY!A3Iû+DW-L=pFz05ų.;[ȭ.\\Yq;j7.]h=?6, ZuxbmZPe|5zIKʼn=V14\-7O[UgFI]c.EWr[RsC+w+g t嗊Ѝ885ࡒsMM)=: 2A/a@QNzE47k:ΎL$u GN0X7+ JQO?J ¤Ԋ%OgқEe8#yo3!c!b03kV. 6c7v#X =RhGQ2^IX:`]eJQ|iB./'~(=jAC&X4 *"*ؔ~xl{еv0{ϦsG9L+eSNL.&3P =@DZt@8[0ɣ^dqNSas^h,nGùl>nu*qpNH0b3V}K%ABJ!Զ8h5X =Ee 5"ԐX,{sŦVjY·FL(kJ! ZΫ,YFBs Lj.RJ@ʩk*vY)FR2}˶9lNVz\|j9ޭڂšN0v߂=}r 8ɷcꭥ3p5r7 {Zr9/9ZX]F;Ki7:5(Unf zԂ:*c>,g/}ǠvNbL\Ц9Pk9])Uԧ=KEb5͖X:b> bwZee\GORoo]/P)-]vO( ,><ВeʒX6 \ioB\=l3N]#r5 Z˂oT NW*^+ ^y$aݛIm FOP-l.uA\F}>X ~gN{z/NbiؔVCoU0}F[X.mU#-޿}MG՘ue!`T 4K+o6Mw o:õDBNvߩe2tX硫F.N33 C%(u^Ǝ}XVwoo<ΦڹR;J*+iF.nvqW@`37z=!;B+OMry\.Z vDN5ĥT _<ɁŴ>iWHh^:$'.{gYXqìYLK? pzB)EZa(q꾁s[0Mm.֔*WUNhgC8+N=->A\ 61Wr-(dɲ3cE-I*gGMFMsPX#= aǷV,q6f0L=g 鑅 ?lּv4#˫\AX ?fܹ3ht}>O^M3tQcu`yLc\_eB#`&^sm4zD獮.A 4ښtY8Q(恬G-2vc`ь|L@XlC,-غ74]qOW73O[:oƴU#V x#fKGRЮωǜr 2Z|wȰ(AxӒi]M4 2r׎YWo"~^$(ow_Kk S&2{D]"-U:m5JY ꨌJk;Mo!M+ vD̶U?1ɢ<e- 2>X4FJikۙZ"A'㲠k3? PPS anAsO7"@g"^q֎׆^A7ROdeƽ{ߚhxнB~ R|OV† _fqo?ޯ%15ៈ9$dG%pLׇv_xϚd͜{D ӤoO-Kڤ9G^iWEW>/^&9~$ 0+Q M2U}D<=.ĜfN=˔Yҵx1^lBq\;t$yn[s |? oϑS=g}C0Z]hH%+jJu" J͜*otOa5ڶ@3 {C  NN@ ^@ Ro@ @ H!5+hƶ#atsvxrb}hDĩ?MU[QW.9Nذ珈S"6xcj﭅Ro)o2 #[Q7^6HNS7LeTIRb[3זs>'(+ƞQ &4c-n1g-o?>@Yr~: W/m8*44& l_]BPˌӃlWtdϦРU Fώ~]'ٮ2<: BO"`- R9V>Ef6*u*UJMmva58̕Vq!\}~zI6fW-oؗZK} X]O 4`֌ZD"\SXVƒvkgb[֐|ꠐ-> nAWIFI]c.EWOoRG^=ĠO=JEVf;7[dž\;~){w[U;y;]K(ioWu7\D, IDATWs_8bx'# w=(#@Ͻ>Mٷ.f2qۑ̖?b@PqJɦ;ܨAaRj78}D e޾ײq2hq#7!''#pr3* @%w=T (ʸz&w^{ϦsG9LvY2)r>dwnf:ۧm*?sen''bZO̩(g?3ZHfs2.n6ѨN3Y??,;\飱\Co?qLwύ,hj3Wֿseb9>nq6s C7Qk[KÛNn\w8l0!3%ǥ*ZS⇉,3ױ)<.7ڡk;y1gΦ旳.GY؛gQr^Pҝ{@G;Rs Nqݳto>T-*W)RY*t,bja"xmAsljTAGah[P[c\8'`$[nO| 5FtM{#?=(ӰR+8 v#7\ :QkW(ٙ 1:WhBk.ĥR){ٽ o$ټN _2iPכYQunޖ:{?:"\2~df zqJQv f-0/U)ĂږqMR26gy-*E j"Z>ˤ ibUoE6|Sw`.ejK8m@ a6`-&6tTF\x2vlB7=w!AXϩm~p$ѶDF#.UYTҬÒ|B:+s7T …$9я=D>x6@zmy{jF?&J$I?,Z+)ikfN{Xf-1cSf[ Nvߩe2tX4YYhFؘՠ+ :ZJyM*\Ƒ,Iy;v>/h#gi,sz˿:,}r&T8F^* X-azj5_PAYljE_4:ȘluLY49L:Gi`ѫ5&K8|x,CsJ*iFn\F08 km4:]q&V_{K5D HG!Z-wa1}fBAlޅFLht/ * C7#}5d cqǙf1Ml\,,i-1 kףV,qf1-='SzT˜R{{JaL~rp3)C((*w Xbײ&5&u bPc &Oh!f;;F7g ^:ֵac5AۺҴ?[cУ =!M]e`XoڽT  YȯS[o:GBrl&9uN8JLv`2s{#x;@ >i/0%^x{{eρʢf]ҚԪ]d\5{I <%:Ģs.7uַ~D\ޏ?-_ϑW,(hHLzcnͼ"θi45V ȋ{7ƾr"&hݯCش .R}eԕKi%:p;$ #+%%Lظw1F8.ٽ R2,pfO=9hOzXݻ ^/Hg r]"//fN^v4:~´?6i3=QvYfh&q9R񚠙S'= @\~3D}ŞUnxe҉=wMxi־sRɊgwz3{" 4st:s@ 4s@3@ H!@ zC @ @ !Ah oC6Zo.PS7pwww=qvQ5 瀩M& X#Ln c O ی?_ڂ .@ӅB!`cQw~5k*\(5Z|S<×3.{aܷT~]m0˨s\"(ϾqaK< n؁'Dj 3K?S9SE=(~a;VK*9u.3}'LmD[Ћmf l]Ủ6s\H)f MU \PԷ9pHմM #yRMn/M~N¿b>F~VTwZ@w5ԠbQg٣u;kM2.ȉJ.mPwK&]d|N{mdQBnzT~jJ@+j$@ یi;#`Lpl,`JYEz\\ViZJP~0y K rY} ~tBYkUvȐVZX/zx+\W$tL ouԎtfMLP{'ݚҍ%Bwgj4@ z{_hhrY-_wJ<9l\"uH边K ho~N^y!ۣdDžDž̩ 2WbHY&E V{HldcJM+| UMyKI>޾eW3Z? S[jdyL=d@)NR@43ڙȾ}WG؛jcPM]E 2mqU+[*d7 ^&$hvs9SDe7”˜ZI+ sf=5lQyJc?o+yf?a՟{*dKIΔבh,I)y|gN `hfL /YgeĴML.L<'r.H+ d3 C4ތ/N^C|G<_j{B%iR^]f =t0Kr1#2/@Z Wn7u M@lVjї*uR˖ۡ4a軜:13B9@KM&OD@:x|7"K;nyc FEUxeBBZ Uwd8סwot =wG.+U*77@d9* ;TJnJl:Z,+2,T4( z .5c bDXn 12F'W$b~ds[#INu)D5,ΉM-mP(A1G@gVUV++2Sv暭?<ʭSJMhZ֚^B(We<&;> $bCaR"VTJkJJ$@ ZNXxɒStUF"# Ud }*:EUb_~9_U>55)xE ]Uʫ9U.sZ2UI5#v+/L"**k{yEUj9"D,`?ȭƷc QhR,eݗ$F"@(ĢB `&F}|@2[s^tfS"giIS[qzNÑJhXMq^P6HP2!6ry4H! iwC}r'C$ WMUgb9: Io]kiI"V\RL֦ 鐛E|IkUb]O ZgUM !\S)F ,cQT J%D>L7ı5dq |,JyE:s \&2KW~ C|2Վ_-Դ_S5q&Z(:-F'nⳋ6SZ񪪪+ ϾR*[B H! ެlJU6)dX%+X[쌔Nӹ:|ݵw^)U(l=S-/hP4/[e/25z؈ mJQ* :t FoУSچe%WҚ#M-SQ+Ny^܈L\P#H B입1"Ɇ t X F!\i~Ew|#TE kYaA~u4;S%'YeVԘvLm B& @[ *%/;Vm1MKng@?;G;D_KK۲RnȈY(|47؆AӢ3Y6C}],iFtGOn4ș! T ((8zеhC nG"Q`&'HY!bϜ44l첌/"*Ȭ!hLS}t:R畢*q!V=t }#F3rԗ)(Svau-]=&SJ6jrLjEbXQm0hŰdEFB9ʈ@d:[m؉IFNvzZ ֊}|45 *h^C_aCwX_$cu#]e9w R_߿} %QMVQؘ]2m5q9 ԛ%z`[']WI`TPpk@v_2D6Q* =p\OZSmusSos-h:V`zt@SîK^cDv`mIl?POyrҌGޓNakf& 7F&.)(C J (Go:4ػl)^I#DO!wzNUy-lkrax[3뼧ȃqkg|t2\E5 YvhFooWYYD{"ј}Fu|]=z}ssQCE⎅a1L-R9xQ7'.+ڞM¢^w2!_2rӃj=n@ 9TY_t;=]0# rFY 7xsT V<݊Z.~io@>N6:W{4s$ tIm S@!S4p/4m鴽RȕNj}BY=Ak 8eOKKt*p} >VԬ-٣S[ 4 "fgSdR(qPZS&H䖬%*]'⥪So@mK&NxccěWo]6}A[ %#B{Gb)8ߟ$& go K6ihtt;I14/}YceEyPۢx\=9'$f,ԧlr@u_ rh'mRцI %7 Uz(Tx9C#; =H:o& S}=b#:QXP5 mY;v>Fyrk#Rr|Tz38,7gn i34O33xYťT[$χe\+r+k􇍶1hY}ܓ얉IY!OB!V⻱5&~NmQVSǙ q S$F! ?N oW y25pߌ.ati =8@ Cw(+2Z @ RoKl[@ zSHO6246Mn@Ԑ @ > :g]RRs )Q5 @t{C @ A&P;D -z;zzMއX 10gm:V[kZ}hKA YM/0tYmх&?EвhwbkM G IDAT=7j F83'D$~zDhid}T-W e VgvcǎM獓 N?ͷ ^ BgL^f$P׃9Цȃ/1xy~|Lis/a@ '|7rA %:ŸQĶ!ml<}w{f8&Zwpy]CoePղTeX3j xj Әa IiEPv?<@;ήvjY꓿Kj1g=ߓd4K=xb0wc5BmkA凜ֽ書G_p]]z?>TNJߋϨzG#;|);[ߓ̨@W<);>Z&,\|;qkK j U'65Tƈې}д$>׷Zz275+(/q>hv[7Z>7+Dhؚ`YIWވ9$ `JSX>8XM,\WЪJNy% 1srxէ7(=M5iʒ;T Yd.A@Pw?v/l/\ (?~} =;x r0l~5-~:K@ dCk7 nn&a+W\T~J$c*2)QJV 5R^yD2N67@;!OiNsERsgR@Ι@VVFThTPg~1PAsSM͙jf߬,&~ېNSęmtbi,{>D&y[,IʻyЛY]lTM2SԲSK%*%HK2u, Jn0lfgDʥb nޗl8WsNzX]AçpU;AXwK ou6 hM7˔an+~YgKl٬ud6`ɂ.&03x륨c eWa2p@M:"]ӷ|]ڼ'tC9R9ASB}c>#0YS{R( W|Gk΍9,x,~+Iڬ}spφ 'W2$.lqYƆ|7AW {~=fɷ{kWX{![osרOݐ7]7":kхWw渢;夎s~Nhr,l7ЋX|e TJ_'vO556UظȫSȚ6.PDu5wꎘZJPJ²#ԈRڑq!3gԥxD~$x=\ 5z#<w)^? "#x&Y=9:ɏ 2DlƒJO QFniJV5IԺ?s*h>oj|XsV4%T[T* ]3^@Ȇv =Row}4"j6f]rq,zYvۗÖg<>bĐ5f.$x5e/ՠ!k5p8ѥ /9hCgN./S},^o#Z[0lj|W}H$!F Y>oBKR/gkJRfJnr,jo؍炯MI~Œ.)bf>nEȜ)'r/ztOxTВsK7nwaB>ʕ@ >v0Xf&?(up؊?ּoEs/)m4~X*I2KH%},BgtIͤ$IfBb&{[ ,\Q_BےARëϨ6Y=5zsP\[GR,wPZjN̜D> M|ycUq$G$&]W-S֗(9 y7u/"jV@4 [V_FoJ7v"OK--ҍzVFGM y*%Wv5TT_g0BhJx o e}=IWΠu<?MxY-lQDj?O*á˴~.Y<@:pjBG~S2Y/ݴhg˛ E-"b GranJeml HrAy%@Lg #"n &\kʤ 'wS= ԼD+BnұTg$;9hPRfcOe@ z{%֦Ts5jCfvvf>ٝ~>3!"2a;mcGg֜8߭󸝪:U?z)yA>ʭ5.n1+F̭ڴFd>Ru淛TRtZN,ZJU/I DđJ%TTbXiT*Z~vFڥW݈zg%bM޺*E>S&.xlپ _mʬ#F>dB1<"(H/qL51Nd&!0|a=hD'gL~X*ߙo@zwW3߭zN[b>hR%" E\%g3߿tz}aqNvSy2[$+z&Y :1N9ʉYj2CD7۞aU3V1GȐ;/zdH 8B(,.g]VI]7J+dYVlrʟ> V6ӬIscG\ʪ:glđPMDjjN|`$gww4>f&] ȯ^ĦJ]MR-1rD'?021&tm`^5M_m /̙LSV"?n+R`DDCg"={v-CbvZRȋ4=OX^~u!^Rx;lVm% !f'’HtIUP?Q٭zNo#%N[lL;}mdPpސMНfxD%VRyA[]m 0Sjk/b$ V \bM[tv+qxn\ᆺ$.ne@Cf-\2 il>57rq"Qɱ:/۩ֲXt<>~Svs%"M_ZtUΪ=ָ%ɈNǏ4D+rLFpZ(c7' 6"~Ë|cfF'5wD* #e\k!,w<v+bC ӊL[j0m9Z$Vt/z梕HkM_ږ/~fݼ ᗵuw甈/b=^o!-U;C^qnUGO۩lJd ;ﲶ&p%^zF;PĵOLYtzDF/εH=٩^vbK>{wXhCFYUC=>#vZut>, _›Zmqx"2֜PYiuf*ۿ1mDDHỶ >ί>͚M{u)'{J\|][3DDGyfV~UK^\ u3[?Y{O܎E-cqTs1lZdlcwNtǿX#1yޤP5T|Dǖٺ?dₕcD kЖՙ,'gFg6lp>I͆]M+>@jԤ?4g|sV}LkAT'Mg!#Ԁĩ3nn;OޓYPsS⣤#{1Wm^}֊|p;i7 v߸o.rO'dXo3 q$]mEnokvrm):D$vV<}9®-d]ҳD~Xm{ 2x?9￶WIv[[/moVgv:h uo߯lL&L> #_nmgf+O8mBա4YQb]lNWԝCDuAo.7?ysaI=Ҟ4̕z Bo6-@i{&|; sg:Ot)wS^i zFUfZEDC֪eeZm\&<} ww LvN^Զ2NS[g6@wgmR}%5D$*ASwp}V?ќ 'ɠ?>h q<68V:\.YiYk.7^a#<$ۿTZ+3ziD=fTp˲hgͽ/Gmn c;s>&yheo :;17t'\]~;H^xHm/f{>b8qAGƎzkۘW)79u/s) v^:;9|rZTzC =Fe̵y"neܤh):sj\rj/m``k7`ٟMַPYT5X,\+>6F(tLv#<.Z[k=w^gܧk,2j㥧dPdk}N~^nHd~B((+OmԖu8H(۲\Ʉ ,XK/6NnȦ |AaDl݉b/h&UO=ڽߛ:8/x'!ސ>KjƊϵ~/ Qؚ~f 8K$rW +эcD57GC<)B;+O~ D9{7>I_r&WGTp\k Ϯ>]e&"}xԗLKϼKYXpՔӟҜR-KWױjJ/حi?-E \ٵ )TQ`':WP*djl ^ck/ xGH36H{nW!Mju oRρ^MW _ds,eުJtgRXsV{!=+T."U-OOfڝR0ڂME^N#FԤo66D^^5թ|iGIqb~т,魭9?.ZȞN%^C& K"&o"U7t k&UU47k7NfPLIEt7Ivs(E2_o4 ^FՒG Sps)wr4^J!Addn>"+y"&#<G,syUkX. +z!JH},^sqڶuGĄ t6~".$N"JDv)";Yv- = .g4`ݒ"Iæ'l>ғs3`nBA+ >m}.޵M{N3#?9Mkᑣm5ꃧGG/;;oEq>SZʉw+E_rog1q;Δm݈k.y{ZB'j7!灳[!e .dRTUieL1*+݂&g/= \Ͽ#:y; :ӳʭ#"IHggA&-{Nuc5OϒhIF\؊}jvܦ 4~2i'^!4Z]iB1<>!>WQCi!Lx#oV~**0Qdo0RY,6>QL:¥J ɽrlņ; %(/JUu4q q358r7SH(u ,-k! xy+0ܬ&K|tD7xS/~{w*ok9ΆY,'4O3<-x\S1\jeuP._755 W]Am33V_e\]H|nKn~CůX(ud9Uf~%Z"vqun5@rvc}Аx_S}<يTL&ёHg\e=9 qIDATlIђNXCz'c{2bH~=24 'bto< >@DG3e4ȕ}% v62iijtȢ";aoNSU]76_*ⓩ?iZ[cF$\ursu>}R4֢Q)?|7:{4C>+xftv^C:p2kt˪.n\Gg7EL;pSq]g("T+rh~T|ӂevV}QcO3]M `Y,U%8Kqi|آ ƫC-{t:>jٔgrcF $VM"m-W-w6ܼ^Y_s.rsb·- ij?+.x;c_ǁo(hު]s>-+[ϫޞA~:/yҳSxPO/ 'uqD$%X(ݣu*~_Z~5#P]$ v:g]t\|\ㅂsSlR e!w}(z{u6,tכ$+{(Ƈ~&,;wSz` sy{aلOF:ֵix隢ee}5&T*mnF[P_7UGoM'K>P=hX?vpʅʄ@wy~gfθ(+`Jǚ2'{oG˔Ցbћiq/VVK@<AkmQlW罰+3_S0Sm'O @7k (cSXZmފ0e9q@Hodo:<9o 0t{!)# 72sTz&pu !қىw¢zc\,bAa!@GHo=z\# 7#\4Gi4%mݏ9T*+|ItZR(8(7@z@zC  PHo"@z77  d?ؾ[)[3ئvyS SPjm+Jg^NF-YS]+)(8677~DD ږ`@!4]  QA @G+ѫOIENDB`pydantic-1.10.14/docs/img/vs_code_08.png000066400000000000000000001011611455251250200176350ustar00rootroot00000000000000PNG  IHDR LsBITOtEXtSoftwareShutterc IDATxw@g_@#a po뮣-n[Zu׉kjݢ*8@3I $>Ar㹻'|y B p BB!0 Ba@!B@5777]mEObch^_Ty/FY{wĻAa$n0yvXN}iALaYzО,D 6'|pMґj;(ݲ*rC٢[T,.z+H{e$>6:zg6fbZPϞȋ,|6l;~Ό,v-F:Knz3tG1֦:YzZ=T,-JSr+'RQh֡}¦h+w RRaڜ]lZB!S=7L:0u"y B=Tx66sd>C;hCYNn^LWWP7HdsKΥ1Hu]Rvaٵ$ rو:0YjǺ`YϜt >g Aր N-~vF /y"|"<ͻ=ޜn9TS?s?rH ẙ7IcEo{ ]" lIkjoo> (`{ +;;v.}ۥ`ԭ>p/ݜŤ*{RPF8Fdv/ *#N>-EbQI߹W tajA~r(5B-{*=MVU7K ZNVDs II/ %ZRQ}?_Ƶ{V:7Mp C6b[iiP-._v2t$,yR~82զg*Qd~mBs1B,*omCA)҄CO+"A)N؂ *"?yVv#!QF'x}ZYirU+JJ ݛRA@ju!IR @r}T&mB (Ξ1pomGʶ%h2|Y]]ݙ6L}L" t:Ш )4#*AMn80y2;'H- @Ukt@Z U:ZZ^H:7+[ Y2<- 4Rx7R6rtoCAe{X Pmp'iO(2]C&4;uDU2#dfR^5S#62FR4BJ#~PWg,ܰܰĦQGO9IjERFB,)zߵwkԮRqu_]MtБuecbe ^%aG.P5Dnn$ET@F4GEn2C3W'75ν,/+.#ڜyT})zߕ8tHR5Ptϼ^!BzS:nV7?,6oLfX?.0֟Y|~ZqIOaP* N%Ei\tj0پC´7߯qhL+Sj`JKtPR\F[ؙ+ fm7Iq@aY4:*Ә=SJ$11;$7ra0(::Sԗ/<.83Fba6lZT_bhcCTnaĪ &M|θ{!iR}Ȩ}_*|a@suܚɱ:]-LEwbBK/\1 ~ۣO2Ɲި*טEP^=X7]FؖΜ&8Yu»cH`?ȸ]@>M~j{wS;QK&6beB]\i4ڨbȲ40ζ71(K4-p`NmRJ,8{ӷ &ǒޔ[׆OK/#MC\l(ֱ8A,0 nbP[% u7b[xL`_ʌ\p u$XTO!A:ۤ 60@V7"Tխ7 RTxb~\y[e1{54{}UHJS\~[T+qbkv֟qtN(5R6``艁?OHB!{pC4-+!Bk!B|!B?B!!BB!0 Ba@!B!!B!!B?B!!BB!0 Ba@!Ba@!B!!B?B!!BB!0 BB!Ba@!B!!B?B!!BB!0 BB!Ba@!B!!B?B!!B?B!0 BB!?,6tYww5)m_|G84fR^jF:T:f?Ԓv \{WWMmOlkxYKx)k͍i][=!0ltXir'DZsΨ"t$[y1WJJP2o_q)&Yo.yo:)01(JIw¯ȥwOޭ6t׉0 |'0'm,}w+Aɳ)wZy̎Qs;͓jeg*xC^~ ؼHʭoT3BMgFY !cd2ws DyE2&b3-w^G4gI_̑?\yk^ކfPM]_vlB\҄-rMvwS:5)-+#㷊pp,w BAX6šh@y2/8!2lp1-A+d˸pROOXdѪRI汔sW-4ңd!E,νZۨsyȨan91b}XcAFIǎ:[ת(:>1v!tXN˿Xh7)bAhVsDjhT1с!1Xz.x RW!uhF@4/Lͩ@yIYKxKbxżJQ&%m9^^ݓadk/@ZRj^cӆ+ϰh/t>ǎ i;n*EpFpIrqosr/+w; ]{BBN7@1vF\z)'!mԹyԵ^#Bm„GFaZ7`!1k @ka{ JaB쎥W3;ukbI/?r|ɺBۨSEZDtyQU]-fsʌ8x+ V3[@MGa Br{'Ve.G*%hU$IB*UTf; $UQɲI7@Cfh"-A&,|,.fϳ_ B^VXތ߻R`:6₏Aýšښ)߫p;MFqܷ$N?D"OY<',|/~vPiT[nAJ3k:ncX~l fx7.:- ί.[zaɩ,7+oJm DMOj&H=;*'up J )Z‚@6<{Ud*͂xΙH. 7ÂLYt pEMn"xd5գVp6[D<1f4 lýiULؑa ﴶi9:io=tn7+Mܛ*2Bv#Jgy;žSCq-߈26lʛ'e8mf]Kn h<HnD:cYUa攝k&uMO4F.,;e4fJVi~ɏvijUp ^]6ns,Bo0r'2H%H7vV)|ѿ%8O+:PEunB>*+)d9)(%IϽUqG/G.CrKM6YIqr)@QYaVuQrޑ+u 5MQi],T_g`Vʜ[b/W,$?"ҮMw3Nrxٿݝ~c$ˋ ;f<7rVjṌ$(KnE5X[ϟb[6HbJ0rk:ڷ7Z}*Juir)84ISQuBXiÇvhm()*I.*x(4aZpN_NIа{ l[(Wdم'"uJ-nF O ׫Q9iH8)_IиcYS TRI C3xA liJZ. P i/hviyֶT?>^^Guv]U}UE 9@|i @ͻu!ِ%D79QkE$iʨX1I ^}) 3'5P/itFF `Ē睍R@r[{%/UK-<eecArVWf>eb~s /55R%0: VXXeKgV^+r$b/"-=Dsjƀ;J eY95?7cN0D5RRyA$k?#݄,Q3[zY=| --70,mL ?Ey=-M8еj-拉lۺi*YU&>4 k ],Lf՝ʭ.mڦ-io߁XthiĴ0Қk.. ]|k9_ѳUdZQ¶`kw+#nKhM,)!<9`d\lLsl̼[@%i<ȑ`prX .Zz,S6,W/pBcFlKo_IxRWt;ktrStLEwlդkn\+#uhfu9@,-۴7,=LEŇ/ }?OB{PGk[/ ? ^T"ǒ*T´f/*\Zۺn7@hICUBN{^NA)F%FMK!&5Ht(2h ˭0n_Zrk)-uCOdzB;OG~GP${uWcZjGrI_29#W/uEyrBR}V7WB,]TyʜC:hk'I[k̀i$~ش(JaJi7@hldˏyV1.c6^h- 2VgJ)^\3"IuYAeF,>F9q IDAT#cPI $4KW ݊(Js~caLzp{kNvs9[}y]畻f7Fu8ϰBdA}>ߟ#!\:C[)9ZĶ'V|+>>BϱީyXGKoT7^#B?B!wB!c<*R!aB!0 B6~& mG\s#k͍o:D!04oٹp% ңn>boSk?utH;yZR6[~o !j$ÏLZ2(<6!wƦѹBO 꼣[.+ Lo|9A1:`7Ҕ5"c=nHç7)gƴ`1%TAl;*(+Ĺ{%([K v#m1+S>:4gƼrs!+}atMΩ̮NghaNϭ7*vdφy%Ն^_o^Jths¤Y+F-,ܾG 2rU s;zxOt!JfSeaU䆤EF#ΐ +B!;o jx1JQTŒ؄JV+łFFSqB+!BnpwcTKU4BA4vi«~ƴZ)NJㄍZ)c%C!> _vweȃINVV :ٞ{uVlB==[VV 5j iRg:9^=Ƃh5⼢ד7lȼ"5oZF( +B!J!_AlN%F*I~Ѵ ??yG9;:XyO.tbcWo.`V7yZ4&Y؏hĥL->*HSrTgk"(!V)B>X{>|fI/ I?wsB(@e5/ob`gA+~\2hxrbx\<>#lLr~ϛZA]N1aV%>7RqtN(E!Pj߿E!!B?B!!BB!0 Ba@!B!̠o:>_ЎB!)*NV}8q7BJԼ; BGװ{X۱ [/PoNS(N?7cp〞sg7vAq~ӛw=ibD$!Q!T*uñ{j@z]Ώ#8!S%GꡧZIa"3v5VBSt#2IWn^tD vǛ'D'Q^_o^Jths¤Y+F-,ܾ6e䢫+W#r̠o0/z{7.:-  &!&SM:(o B!^ ݼ%.ɺp#c`{B{_Vxnijfq7Qg\~ߍ O*NdkR9VB0MB~= @6T㒥%Nn]D1M( ,ϻ(YBV`$ B0c~ғo{SPY옵s㗯wXG !}fS/K;/*rBW3 ^+͂mcc~;+H~_B!Ie^m L1{@MH|(8W V5_ޓ Xfہ &-_י6m0GM-ͼϒ%e_!J~j OOO<!^N@RoxxB=rq'W@!Ba@!z>?9m@ m!C1!';=Nn7=C!0 B5B kl_g'5??6ʣ}jkG&< _W(% lEhKg^y,-cظs 77>n@Fqݳ.:zZNåK^5+|NOY|Iy6<{`i[ xgK600{xBMsbDЊn8eGG6^Fe_ypLIjnKM 0-~nkNtNi#t9+?HY>k0&@;rBd&S//B߳yҡM&u6wy9AG ):5oA-ً9]}:1bV eҔV06-ƌ,?5cm_se6g Aܞ^o4'=}|%T8Jz*lihgJ 0pxC$ pFl:qp^J?)} }W++F/B7l̃υ*}Nt۬$)_}S&jSĘ 4`*.9KB}AP7yUu9Y}N,.JIؿqn}[7t NM%߉ZȤuKn"˥YRRl?HX^CAԾ>ȹXͿҭYȔD!|hfD):W.3*KIտP`Bx/SfjaJv4f7k.(˄)Y@㠶Yu?=cuѮpdKW3i[rh90lXf6TRmRUXoqlczШ^4_k|o^4'!;LӁ̈́[ÏD^,Ҝ:ҧ"{/۷7uXGcf?g_Yd@jV(GޒJus>q64(3h$}'k̚ rNqWQs4ۯ<8&AM<쨤ֶEb%}dAQ,}#_.;_H8f>83D7VMP#[ ӧE Zyu# *>C-;waΛ{:a0SW#$7Č %ŷbG䔽;8d$È Y}0[Gj^k7xVRgUG?^ΗuNAq^vqT_3"*fZs.ʼn5Ls^qIVt@sV+3*H<0wϮɤ\ ?ƓOZ4h (4ж_.`N?~{6SVL~ǪʙM{`V"^n9ݝM崟懈.[1po36A)$uf-'~?5(}ƭO1[htEvʿ6p]dniKgfyf'rIF5u֊ oycd }n2il&wK l^շs٭O@p< LoJI5֒ JI phe~\ ^ζr.\BfȨ_N&uR}It}ץINŹJ[wiCHcl5Ӛe%f!񟫂Ta/|VK:3fLSCE'&r?`5RK j0l DL#Zz?2h$;n 恽gE??5lL߫Gw敒2qL0JKq9b1?E5o*q 'KxTu^DCΩG?E$E1ǎ3l>Q7OD26&ؖ2"^_ۏ>!hgv~o׶{:?}[Ggm?l5nM|\%IW~Ѣ_&IG7M_ͱ `_Y%^#S[SY{\lakj"H僎BgDNia<9"@w7>#ADQa܅;(KNQZ~=,VX.V)TZK?ߢBKSQcKߦxUdGUcGݠxi9%ҿev^`*9??6<@郟"6-=vǼ0Rny\~j4]y5Uʾi+BcBf>dQ⭮ڐɠ<2:KFmKfR--ߵ?>(VXw7znFvS uR po-6DwK/{€bR!U5Z1YT,p#Ļ-hǦ= x57^hW^;,|Gϲ5ե*Y}(f^/{_qL2ޖ ?)|ڋI0MqK|&ǖ|zbR4xÁ~ 4ibwm^)M 110A]"դeXiS2Wd›|qcۛC<~U+oyg^Ej  l9bYAGٴJA˜npX'jL8cY@mhӈxBWV $ۗUԼ_SKGhe*zS;j)^N՘^?v˵, s3/fA((|m/I6n߁7KSk?wsq$ac[r/VKG?3`pԶ-V6go 87ȫ`~Zڭmwo{ oZoMS ػqܙ=@ G)dcdOG)|GƆk#I}ɥuJ-B;Y,sty;*cN6~x|k]W|FOţârqE)ADArR:gۻ|5e& ,UUS=ueXoCTͼ[751Bt!Aj4ыĔlߤGhuۮ(0-yXY3fC聾M0hBUB:צ@ˎ"ЩK$/g˲J8JJxB%_wo՚}3φ7+ۅ/_2Į6SoJ(ҍn}_p*g{VW,hŕ{\ppJ̿vkݥynd1tV'fEFaVR@IOzhr/jۚIuV8peuiYɄ_? %9P^-Fuu/-I);hʈ>w3RqꞿMm:þlcR%ğ5tW+D?fݮɠ8yc? ٹ*GzmZAWuB)(*<_gB[y(5S-IR̛Rrz֜_l!t~{ 7a_n%e+w#q"5>kTa^XZ'899*Oj*~%}C_hzhќ޺a4; *?{3qiii+` ^"7 Ӗ΂{dq pn#bӸ&f2`0Lڷt />zԊ:5>(zsи>j-4*|Bw`jy wB IدOp? >]8B!!B!!B?B!^9xBJ&G‚; BB!0 Ba@!B!!>TFEoŁ_9ٵDE8v,býM;ۚAXDkҘ#6^T!oع%v=pBӳ r`y?:8LkBm}< ͻ:y}DŽUb5S7>P_Z=uKϕa~+,gN~9EVهjNpl_qe~ƒ/C$ S`>՟mj8-hyBlĜ|@vf{քWT)%dJ n  1cHewy B?wrn[K7Fd:/Q%}7]lI0)Gsv7uB͗ê'Fl[N=SW]52ھ-GsF7GvKSVQuWm3m&pANGg[=Wɣ{5ZT)GǶx,zvJ8m'Sd@%Zpd7Pd9]1턙ÃL9ѵL~~kfm|!r+r*c22Q5'S`i5jVMA:QSh\ȹ|$_k]K=VVMXھ)LRɗWĂ Lwvm;pZ{xwjJ_^+i3dv+=Iמos̿o'%5Um>Ɯs 7Jػz}I^V)`Tk?rjb@9Uu|A.׾_rQ xZ<۠4gIݢm[]p7A\U=[zdefA&u"K 1=mjh9Pu­2m1F \wz Ӷ=&Y`0|. ksE"(~IL<; 'ݔ8_f];[AUR+*ɐupv"]kws⃧ejV7-x5GֱuΆIjmfN%$fKbAj5PIiq^ҫVa)^tuD 3 3=_ 5')"s]M]*J>'-jӒ%$=M!>KY J@9jR CBGt5]=N~n?kL;/fyM)GU@{dQ_W[|{vq=|9OW}|Q%^P`u%ߢڿ_)YxϾc7`T&"&!uJt4yzȕ"of]/UĞBTJz7Guf¤6 !V6d;YIyReuF>RT Oah*X&q9޲k?y ;W_A69k8e@HFQDAu:HmZ;öVھ-v诶VkEAl !$d\.(XyrZBxŞ/!G}w§F\@ թ߼ܒQCloŏ2:@iѪTt!]! oؤOQx`^q"U:ftQҿZ[2Acmy˸THsOO/O\0`1^ο􄺜qøuI 1= v."=G-fغl?(,&-$0w橳/!;2;H6U^:oV|r+f,/pk>-"E Ե-.Ns"[,x-nxY)vb - - ױKg 9tzo:b1wͻ[gBML^Yuʷe g"72Su`vVNiwr ,;-5uomMacFXpTsd $?:}Fܴ){_ؼ7xѳ|{Cn{gmVd-q9סkƬƨ>Ot6 pFEuS$?.op6b W݇xQ ,nktD|9$ya^_BWz|;ZiPyp si1 "4OOVTTb߳_t t%O*⭟l.vW_'^M5:m[ErmEE!D vc3񃝮g\ma2c,X}%Q1<&8vJ?Z< >{%>i9fW=AwN0g͊LYk;X1LN9߁9^3/!$Aw-@!!!BB']ìR`鳌Qﮟ W'"Y,:ƪ?ڟI\ݙկ˴`бSc\.:31%Fb& gHfYHyg`?`_/|^T}{_P^c1Xho^]͍ѼUkNLT|pNS2_ؙCPZRzVcڨ#[>o%3ۧue);Sd ==έczʎH4< .\9OQJ?{ǿ!z"Afh'qZmƖ̑<$u G nZ2zE8qWJSsp;!Ld$i'2n'cF^Rܾ}`g{EgfKnBypr+obs,=[f'‹Ϧi9G]n`Ez YU;'7ZcReuL7eeT䧝TzLB4``nRʲ/7Wv}'M=W  8Xpj_aг4OT٩)OT9k|w,:͎MԲg[E~&dɄa^64m]ny[D.`-`{1A/ٞnR\8tXC]ܦ,U =l~BݮHu+ } Jjj~:V!2(R.S oh6~c]m(N^kiϔ؁\:7 o׼?h?=e6(&\^p*]y[%0_/N5N@ꔲ&=IZHL7cW}yC{8 Y7*~<{Fl'jƬjS-ʫq=\4<ʼn6+Q#wo^L0^^{$Ϯv]/&kޙuGzU4b^bxֵeY =CzY`糏#uskNצNۺ՜:(ܛB K8aŗ5_xcO4ny#q{n񯽝(Z 4p ,WDLxi޻%?"| ߺלKQjPEA)-EuSj%9WmF5I8 1e]\7wu cGĄd(`ŎId>Ug œm Z3ea  nJɊ=p!G B˨uU70xa\{b ݹFcXKҪ LaHL'JZ]rA1CU)[XnFƉ2N'fn@+^@_ς;kLLԝ?vB]TUIu5:!?w0̴e4:ӣ?4LJZ.@ ""Z.sLe⺸(KrW)tTM/U֖Ψgx{N"x"4Pה^:?֦1_dKt$TyRi S4(xfJYuiڦ9Ѽk$I$I7#l뺿*I`_2 jee)?)`lۺYw`w@yl#;Bzۚ5 Xp4 [ffK*Z!WKj^'5zkcOZee,4j$75[JYϟ&ihkruNgs }m*Լzdlhz婩*RvV~̶Q҂+5Zj5 <ik*2;D vx 5Jk J5KzyMϤQx]}VʥFUs-Pwl/ڊUeRɑ~Uu\BGQFezLK }ŕf ,ڜo$˶C0(kkjF!--nk+ ]S_]k2Iղ{ߺ;;uu\רˮ]h:8<|#lx1ns!s}iۮ7FEШ3 [ n W ;-!-` ͿGU`J^_/))-8s\u_S5%u 3H$C\+F`3lr6~r-Lm}qoxVF#m}LM֏d)U H>dڞF xlbd2֜( Lhh̞CUZ#XNOj]7-&&tޯ(Mq>8 6O)ը)׶5r2hJ@[ǁŤ;yӪL.703"ѩ,]P)D P{wyyDŽy{e,MS'rww Vxp:ciݷN/m9-#1eaˆ<{wSY  }2oq[ )}o]VzKpiK=Oo)eD[jL/q؟o֚Z15;5WVjl}}rʠ+ Uu{mA74ASQ!n :t1]TMerz-R,"W[7Uvצ)-(EsuÄ=,4΁c(UZĶ絟Veh&\@iX%n0<<9~)E3eʽ8w1S("wܾHym_AϞS{Hp{c07[G4/_3ΞNLPuۅl6EN?Td; lx&lTc݉p 4aG){ľkZzohׄ ﴇEV+#}"E\0V  y<1nXU0֞(a N-=?9T)~jQOю-d4F0|/-xF Y uEpgeEi-SCȱaJptZ/@p'qWٕ2KTB򝃣{4cٕbʥh?c/fª+A&VցGO>rc +ǐ" Jʵ@BF 8ωRI޷i>9<: WwMr1OoѼK7cs׼;. Xiǿ['Z8密$9+E<1 /3}-m/7}czEGrCƾ>3ڤ94t? jǰO9w8%:߸p64TS~cWdsn~}bl~ 4oڂCJSQ5rOͭ:;0rE~hVݝ4K/d;nV$}g9ЯЧ@,7Z> ctTI\5I= IDATEQ)hz(WwK,L:i "aQc'm]3KϠg :.cI0?E79)箷bEÂYТ.T[Cy.EP.{]e? iѩϖtl`T}WA 4OO[{v: Æٷ>%w7yG!A6k?moU|{4/jݱű3{HV jxMsJjVci;`:LJ)+k- gޙڴeƦm59>qos@rMZu\ɚ%ۮa}|Cwb"G;=ZJ>vzj)tc/<㯐6n`a@2!h8 \!Bڿoy<fݼyӪ~^ K^>h~z?"1g8dy/m;otq6}G=t_Wo)Ϧ2_4l_rL0L{nt"Iu^ Xoz^w8#U5gv sҢWA K=zIDNm}u0ƨT$Sz`c\xLjc1,+LjM1+0GhxO[T!r3j"R\XUȮErk>PS<33x|kE}Ίx s b3d}˯-f_j2sgK__tC9/ṱdU^NkL01T\" U']+-+u25ޮ놽ۿ൅˒OfN jvR]v+Z:{:!TSk= 3E4}QFJu~^NKԗ0cD3N^4'54?ݑXXEdS1fhJ]iWZLz鏺^$7T:8$;>jd3&ťm{?1ZE8&-l{x<.(R.r Ry!&4^@(O;#ye#{W5ёmM? zn>]sj;/߈-}vd3F S!2fNfD i/Nq |mi  wc$XmhQb{8u'B㏇D1H^edZ*62ѬS5tCl}-ڍ [Vuʿ SnLtjk[@` Ƭ7Bv:pKm54cŢb>f9 e4`ZXZl1J)`)fC-_kHxlT[@)q L͡Q-w X9kШ<-pkB>ìN !'ZwqY~v>O92VlSSΌQGnYcK;ȗ-xvΪ(`z513y2} 4<)L)sb-9VNOZ5kqI=Sv|gptu:B[hWӹ7*}~ޭaύieJpC .L--0n"pəUJ茍o0pAvݑ2fd(=X(#"l] Os8͒dI3#\ڍQzeFpC,&u%&CL#\mCF[ZLGG>ReɣYic ωRI޷iZ<: WwMr.ۘfy3A!/:{tJ4x\:j@gg>o_70lyYχY׾ģ f8s3)3ЀҼ>OsQ^ qbVwUz"kaNm˿}je3_?_wzO`8\ůB?ha:Er]U㙱&Okؼ ⡟owhrSkUʪZ{?"wMX/V jlTBWA|zt_3yV+ݴ#Gߐ<4e8yEo9YpG7fy29){{4z/y~%?ڕ6q66:U]`.4 >)=ډqз~b0(cI( Eʚ7`xfvU)Tk&%iY{$ʜ{(:Vlמ/OYp\RvEE񇕝9)5.!0e:y?'S.;1XR(O 4 X8AIQ}4iq@ ʣ&:'()2)z}wzHԷ;s wڹ_䳓F<3jG➮m4+H @b֖1:Ŷ>n?߅d5;Y'#Pq} F$m\馲dPw^a BC=ӵ&F\uQ7Hh.:Ce/o~bcD nӉ2@Tn`Bܮu 1}ZXFQVcj 4& l)*/r(1yG|k,_!Uw ++#Ƹ92h kM0]T_4Ѐwb5uYJ`~]],Fu]Q靥l,/o|`jλ*QOh[|EǼrRח{o4+y?ϱk*X)Ul] Z9EP^뵮| !]OIgz!@"^Kg5hNmuUH]M`azd~_"-?0>]݋}[rg*?}u `j-Yh<]`Į.k$ۆBF̎+9—p\\GOgOK Z8 >~띇eK˵]m`4йJSRuǔ4`љ;qR>}ƺJCdK/2u4:U?g]>_nCu"Oix";2e /GEy5Q`2k0AtsBC 4)e+ ,h*P؄mUu_}O*p)B/]uU- !`PU=c,ѐCmz>. d! \mm9.wl,//EyL{MD*߉ޖ WyB޽x?;Hoq,kh: ryDHK] #CClx(IQZZFq||^ϫ T 2T6($5u^kB&mXҨ/G]4Ԣ7&?+\-wrfuST2zMc sM^~egZMϜj:{}vHsi1-` yv]/%ψ9.߱Ъ]ܨ,Z;bҫOڐكkiO=nU~5m˖2g51oaBkITRwF&z96_i̺\P+ ˆiW:.#(ՙ]e 7^5X ZؿkΛ^SFvKJ%yʶ!$wZtX,[qD {R?Il\m k4/j[¨i !ky:-3 m>IDAT :!jGw\\xG/ϏSv:,!(tlYv=9r!4׹A|\f 5!Qny4H#^?e 'T&,!4]_Ǯ׏r^/#bٖGquZ]Y :3qBO]|壗bBuf Ba[BGWj@!C!!B BaB!?B!0@!!B BBaB!B!0@!!B BuMl6 B!l@!У֩CA!Æ!!B BaB!?B!0@!!B BBaB!B!0@!!B Bu]l_IENDB`pydantic-1.10.14/docs/index.md000066400000000000000000000165101455251250200160560ustar00rootroot00000000000000[![CI](https://github.com/pydantic/pydantic/workflows/CI/badge.svg?event=push)](https://github.com/pydantic/pydantic/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) [![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic.svg?branch=1.10.X-fixes)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic?branch=1.10.X-fixes) [![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) [![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) [![downloads](https://pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) [![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) {!.version.md!} Data validation and settings management using Python type annotations. *pydantic* enforces type hints at runtime, and provides user friendly errors when data is invalid. Define how data should be in pure, canonical Python; validate it with *pydantic*. ## Example {!.tmp_examples/index_main.md!} What's going on here: * `id` is of type int; the annotation-only declaration tells *pydantic* that this field is required. Strings, bytes or floats will be coerced to ints if possible; otherwise an exception will be raised. * `name` is inferred as a string from the provided default; because it has a default, it is not required. * `signup_ts` is a datetime field which is not required (and takes the value ``None`` if it's not supplied). *pydantic* will process either a unix timestamp int (e.g. `1496498400`) or a string representing the date & time. * `friends` uses Python's typing system, and requires a list of integers. As with `id`, integer-like objects will be converted to integers. If validation fails pydantic will raise an error with a breakdown of what was wrong: {!.tmp_examples/index_error.md!} ## Rationale So *pydantic* uses some cool new language features, but why should I actually go and use it? **plays nicely with your IDE/linter/brain** : There's no new schema definition micro-language to learn. If you know how to use Python type hints, you know how to use *pydantic*. Data structures are just instances of classes you define with type annotations, so auto-completion, linting, [mypy](usage/mypy.md), IDEs (especially [PyCharm](pycharm_plugin.md)), and your intuition should all work properly with your validated data. **dual use** : *pydantic's* [BaseSettings](usage/settings.md) class allows *pydantic* to be used in both a "validate this request data" context and in a "load my system settings" context. The main differences are that system settings can be read from environment variables, and more complex objects like DSNs and Python objects are often required. **fast** : *pydantic* has always taken performance seriously, most of the library is compiled with cython giving a ~50% speedup, it's generally as fast or faster than most similar libraries. **validate complex structures** : use of [recursive *pydantic* models](usage/models.md#recursive-models), `typing`'s [standard types](usage/types.md#standard-library-types) (e.g. `List`, `Tuple`, `Dict` etc.) and [validators](usage/validators.md) allow complex data schemas to be clearly and easily defined, validated, and parsed. **extensible** : *pydantic* allows [custom data types](usage/types.md#custom-data-types) to be defined or you can extend validation with methods on a model decorated with the [`validator`](usage/validators.md) decorator. **dataclasses integration** : As well as `BaseModel`, *pydantic* provides a [`dataclass`](usage/dataclasses.md) decorator which creates (almost) vanilla Python dataclasses with input data parsing and validation. ## Using Pydantic Hundreds of organisations and packages are using *pydantic*, including: [FastAPI](https://fastapi.tiangolo.com/) : a high performance API framework, easy to learn, fast to code and ready for production, based on *pydantic* and Starlette. [Project Jupyter](https://jupyter.org/) : developers of the Jupyter notebook are using *pydantic* [for subprojects](https://github.com/pydantic/pydantic/issues/773), through the FastAPI-based Jupyter server [Jupyverse](https://github.com/jupyter-server/jupyverse), and for [FPS](https://github.com/jupyter-server/fps)'s configuration management. **Microsoft** : are using *pydantic* (via FastAPI) for [numerous services](https://github.com/tiangolo/fastapi/pull/26#issuecomment-463768795), some of which are "getting integrated into the core Windows product and some Office products." **Amazon Web Services** : are using *pydantic* in [gluon-ts](https://github.com/awslabs/gluon-ts), an open-source probabilistic time series modeling library. **The NSA** : are using *pydantic* in [WALKOFF](https://github.com/nsacyber/WALKOFF), an open-source automation framework. **Uber** : are using *pydantic* in [Ludwig](https://github.com/uber/ludwig), an open-source TensorFlow wrapper. **Cuenca** : are a Mexican neobank that uses *pydantic* for several internal tools (including API validation) and for open source projects like [stpmex](https://github.com/cuenca-mx/stpmex-python), which is used to process real-time, 24/7, inter-bank transfers in Mexico. [The Molecular Sciences Software Institute](https://molssi.org) : are using *pydantic* in [QCFractal](https://github.com/MolSSI/QCFractal), a massively distributed compute framework for quantum chemistry. [Reach](https://www.reach.vote) : trusts *pydantic* (via FastAPI) and [*arq*](https://github.com/samuelcolvin/arq) (Samuel's excellent asynchronous task queue) to reliably power multiple mission-critical microservices. [Robusta.dev](https://robusta.dev/) : are using *pydantic* to automate Kubernetes troubleshooting and maintenance. For example, their open source [tools to debug and profile Python applications on Kubernetes](https://home.robusta.dev/python/) use *pydantic* models. For a more comprehensive list of open-source projects using *pydantic* see the [list of dependents on github](https://github.com/pydantic/pydantic/network/dependents). ## Discussion of Pydantic Podcasts and videos discussing pydantic. [Talk Python To Me](https://talkpython.fm/episodes/show/313/automate-your-data-exchange-with-pydantic){target=_blank} : Michael Kennedy and Samuel Colvin, the creator of *pydantic*, dive into the history of pydantic and its many uses and benefits. [Podcast.\_\_init\_\_](https://www.pythonpodcast.com/pydantic-data-validation-episode-263/){target=_blank} : Discussion about where *pydantic* came from and ideas for where it might go next with Samuel Colvin the creator of pydantic. [Python Bytes Podcast](https://pythonbytes.fm/episodes/show/157/oh-hai-pandas-hold-my-hand){target=_blank} : "*This is a sweet simple framework that solves some really nice problems... Data validations and settings management using Python type annotations, and it's the Python type annotations that makes me really extra happy... It works automatically with all the IDE's you already have.*" --Michael Kennedy [Python pydantic Introduction – Give your data classes super powers](https://www.youtube.com/watch?v=WJmqgJn9TXg){target=_blank} : a talk by Alexander Hultnér originally for the Python Pizza Conference introducing new users to pydantic and walking through the core features of pydantic. pydantic-1.10.14/docs/install.md000066400000000000000000000060411455251250200164130ustar00rootroot00000000000000Installation is as simple as: ```bash pip install 'pydantic<2' ``` *pydantic* has no required dependencies except Python 3.7, 3.8, 3.9, 3.10 or 3.11 and [`typing-extensions`](https://pypi.org/project/typing-extensions/). If you've got Python 3.7+ and `pip` installed, you're good to go. Pydantic is also available on [conda](https://www.anaconda.com) under the [conda-forge](https://conda-forge.org) channel: ```bash conda install 'pydantic<2' -c conda-forge ``` ## Compiled with Cython *pydantic* can optionally be compiled with [cython](https://cython.org/) which should give a 30-50% performance improvement. By default `pip install` provides optimized binaries via [PyPI](https://pypi.org/project/pydantic/#files) for Linux, MacOS and 64bit Windows. If you're installing manually, install `cython<3` (Pydantic 1.x is incompatible with Cython v3 and above) before installing *pydantic* and compilation should happen automatically. To test if *pydantic* is compiled run: ```py import pydantic print('compiled:', pydantic.compiled) ``` ### Performance vs package size trade-off Compiled binaries can increase the size of your Python environment. If for some reason you want to reduce the size of your *pydantic* installation you can avoid installing any binaries using the [`pip --no-binary`](https://pip.pypa.io/en/stable/cli/pip_install/#install-no-binary) option. Make sure `Cython` is not in your environment, or that you have the `SKIP_CYTHON` environment variable set to avoid re-compiling *pydantic* libraries: ```bash SKIP_CYTHON=1 pip install --no-binary pydantic pydantic<2 ``` !!! note `pydantic` is repeated here intentionally, `--no-binary pydantic` tells `pip` you want no binaries for pydantic, the next `pydantic` tells `pip` which package to install. Alternatively, you can re-compile *pydantic* with custom [build options](https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html), this would require having the [`Cython`](https://pypi.org/project/Cython/) package installed before re-compiling *pydantic* with: ```bash CFLAGS="-Os -g0 -s" pip install \ --no-binary pydantic \ --global-option=build_ext \ pydantic<2 ``` ## Optional dependencies *pydantic* has two optional dependencies: * If you require email validation you can add [email-validator](https://github.com/JoshData/python-email-validator) * [dotenv file support](usage/settings.md#dotenv-env-support) with `Settings` requires [python-dotenv](https://pypi.org/project/python-dotenv) To install these along with *pydantic*: ```bash pip install 'pydantic[email]<2' # or pip install 'pydantic[dotenv]<2' # or just pip install 'pydantic[email,dotenv]<2' ``` Of course, you can also install these requirements manually with `pip install email-validator` and/or `pip install python-dotenv`. ## Install from repository And if you prefer to install *pydantic* directly from the repository: ```bash pip install git+https://github.com/pydantic/pydantic@1.10.X-fixes#egg=pydantic # or with extras pip install git+https://github.com/pydantic/pydantic@1.10.X-fixes#egg=pydantic[email,dotenv] ``` pydantic-1.10.14/docs/logo-white.svg000066400000000000000000000015551455251250200172270ustar00rootroot00000000000000 pydantic-1.10.14/docs/mypy_plugin.md000066400000000000000000000160671455251250200173320ustar00rootroot00000000000000Pydantic works well with [mypy](http://mypy-lang.org/) right [out of the box](usage/mypy.md). However, Pydantic also ships with a mypy plugin that adds a number of important pydantic-specific features to mypy that improve its ability to type-check your code. For example, consider the following script: {!.tmp_examples/mypy_main.md!} Without any special configuration, mypy catches one of the errors (see [here](usage/mypy.md) for usage instructions): ``` 13: error: "Model" has no attribute "middle_name" ``` But [with the plugin enabled](#enabling-the-plugin), it catches both: ``` 13: error: "Model" has no attribute "middle_name" 16: error: Missing named argument "age" for "Model" 16: error: Missing named argument "list_of_ints" for "Model" ``` With the pydantic mypy plugin, you can fearlessly refactor your models knowing mypy will catch any mistakes if your field names or types change. There are other benefits too! See below for more details. ### Plugin Capabilities #### Generate a signature for `Model.__init__` * Any required fields that don't have dynamically-determined aliases will be included as required keyword arguments. * If `Config.allow_population_by_field_name=True`, the generated signature will use the field names, rather than aliases. * For subclasses of [`BaseSettings`](usage/settings.md), all fields are treated as optional since they may be read from the environment. * If `Config.extra="forbid"` and you don't make use of dynamically-determined aliases, the generated signature will not allow unexpected inputs. * **Optional:** If the [`init_forbid_extra` **plugin setting**](#plugin-settings) is set to `True`, unexpected inputs to `__init__` will raise errors even if `Config.extra` is not `"forbid"`. * **Optional:** If the [`init_typed` **plugin setting**](#plugin-settings) is set to `True`, the generated signature will use the types of the model fields (otherwise they will be annotated as `Any` to allow parsing). #### Generate a typed signature for `Model.construct` * The [`construct`](usage/models.md#creating-models-without-validation) method is a faster alternative to `__init__` when input data is known to be valid and does not need to be parsed. But because this method performs no runtime validation, static checking is important to detect errors. #### Respect `Config.allow_mutation` * If `Config.allow_mutation` is `False`, you'll get a mypy error if you try to change the value of a model field; cf. [faux immutability](usage/models.md#faux-immutability). #### Respect `Config.orm_mode` * If `Config.orm_mode` is `False`, you'll get a mypy error if you try to call `.from_orm()`; cf. [ORM mode](usage/models.md#orm-mode-aka-arbitrary-class-instances) #### Generate a signature for `dataclasses` * classes decorated with [`@pydantic.dataclasses.dataclass`](usage/dataclasses.md) are type checked the same as standard Python dataclasses * The `@pydantic.dataclasses.dataclass` decorator accepts a `config` keyword argument which has the same meaning as [the `Config` sub-class](usage/model_config.md). #### Respect the type of the `Field`'s `default` and `default_factory` * Field with both a `default` and a `default_factory` will result in an error during static checking. * The type of the `default` and `default_factory` value must be compatible with the one of the field. ### Optional Capabilities: #### Prevent the use of required dynamic aliases * If the [`warn_required_dynamic_aliases` **plugin setting**](#plugin-settings) is set to `True`, you'll get a mypy error any time you use a dynamically-determined alias or alias generator on a model with `Config.allow_population_by_field_name=False`. * This is important because if such aliases are present, mypy cannot properly type check calls to `__init__`. In this case, it will default to treating all arguments as optional. #### Prevent the use of untyped fields * If the [`warn_untyped_fields` **plugin setting**](#plugin-settings) is set to `True`, you'll get a mypy error any time you create a field on a model without annotating its type. * This is important because non-annotated fields may result in [**validators being applied in a surprising order**](usage/models.md#field-ordering). * In addition, mypy may not be able to correctly infer the type of the field, and may miss checks or raise spurious errors. ### Enabling the Plugin To enable the plugin, just add `pydantic.mypy` to the list of plugins in your [mypy config file](https://mypy.readthedocs.io/en/latest/config_file.html) (this could be `mypy.ini` or `setup.cfg`). To get started, all you need to do is create a `mypy.ini` file with following contents: ```ini [mypy] plugins = pydantic.mypy ``` The plugin is compatible with mypy versions `>=0.910`. See the [mypy usage](usage/mypy.md) and [plugin configuration](#configuring-the-plugin) docs for more details. ### Plugin Settings The plugin offers a few optional strictness flags if you want even stronger checks: * `init_forbid_extra` If enabled, disallow extra arguments to the `__init__` call even when `Config.extra` is not `"forbid"`. * `init_typed` If enabled, include the field types as type hints in the generated signature for the `__init__` method. This means that you'll get mypy errors if you pass an argument that is not already the right type to `__init__`, even if parsing could safely convert the type. * `warn_required_dynamic_aliases` If enabled, raise a mypy error whenever a model is created for which calls to its `__init__` or `construct` methods require the use of aliases that cannot be statically determined. This is the case, for example, if `allow_population_by_field_name=False` and the model uses an alias generator. * `warn_untyped_fields` If enabled, raise a mypy error whenever a field is declared on a model without explicitly specifying its type. #### Configuring the Plugin To change the values of the plugin settings, create a section in your mypy config file called `[pydantic-mypy]`, and add any key-value pairs for settings you want to override. A `mypy.ini` file with all plugin strictness flags enabled (and some other mypy strictness flags, too) might look like: ```ini [mypy] plugins = pydantic.mypy follow_imports = silent warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True [pydantic-mypy] init_forbid_extra = True init_typed = True warn_required_dynamic_aliases = True warn_untyped_fields = True ``` As of `mypy>=0.900`, mypy config may also be included in the `pyproject.toml` file rather than `mypy.ini`. The same configuration as above would be: ```toml [tool.mypy] plugins = [ "pydantic.mypy" ] follow_imports = "silent" warn_redundant_casts = true warn_unused_ignores = true disallow_any_generics = true check_untyped_defs = true no_implicit_reexport = true # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = true [tool.pydantic-mypy] init_forbid_extra = true init_typed = true warn_required_dynamic_aliases = true warn_untyped_fields = true ``` pydantic-1.10.14/docs/pycharm_plugin.md000066400000000000000000000016551455251250200177740ustar00rootroot00000000000000While pydantic will work well with any IDE out of the box, a [PyCharm plugin](https://plugins.jetbrains.com/plugin/12861-pydantic) offering improved pydantic integration is available on the JetBrains Plugins Repository for PyCharm. You can install the plugin for free from the plugin marketplace (PyCharm's Preferences -> Plugin -> Marketplace -> search "pydantic"). The plugin currently supports the following features: * For `pydantic.BaseModel.__init__`: * Inspection * Autocompletion * Type-checking * For fields of `pydantic.BaseModel`: * Refactor-renaming fields updates `__init__` calls, and affects sub- and super-classes * Refactor-renaming `__init__` keyword arguments updates field names, and affects sub- and super-classes More information can be found on the [official plugin page](https://plugins.jetbrains.com/plugin/12861-pydantic) and [Github repository](https://github.com/koxudaxi/pydantic-pycharm-plugin). pydantic-1.10.14/docs/requirements.txt000066400000000000000000000005241455251250200177070ustar00rootroot00000000000000autoflake==1.5.3 ansi2html==1.8.0 flake8==5.0.4 flake8-quotes==3.3.1 hypothesis==6.54.4 markdown-include==0.8.0 mdx-truly-sane-lists==1.3 mkdocs==1.5.3 mkdocs-exclude==1.0.2 mkdocs-material==9.5.1 pyupgrade==2.37.3 sqlalchemy orjson ujson # waiting for https://github.com/jimporter/mike/issues/154 git+https://github.com/jimporter/mike.git pydantic-1.10.14/docs/theme/000077500000000000000000000000001455251250200155245ustar00rootroot00000000000000pydantic-1.10.14/docs/theme/announce.html000066400000000000000000000004351455251250200202220ustar00rootroot00000000000000 Pydantic's commercial roadmap is out 🚀, and we'd love to hear your feedback 📢! pydantic-1.10.14/docs/theme/main.html000066400000000000000000000004151455251250200173360ustar00rootroot00000000000000{% extends "base.html" %} {% block announce %} {% include 'announce.html' ignore missing %} {% endblock %} {% block content %} {{ super() }} {% endblock %} pydantic-1.10.14/docs/usage/000077500000000000000000000000001455251250200155265ustar00rootroot00000000000000pydantic-1.10.14/docs/usage/dataclasses.md000066400000000000000000000126171455251250200203460ustar00rootroot00000000000000If you don't want to use _pydantic_'s `BaseModel` you can instead get the same data validation on standard [dataclasses](https://docs.python.org/3/library/dataclasses.html) (introduced in Python 3.7). {!.tmp_examples/dataclasses_main.md!} !!! note Keep in mind that `pydantic.dataclasses.dataclass` is a drop-in replacement for `dataclasses.dataclass` with validation, **not** a replacement for `pydantic.BaseModel` (with a small difference in how [initialization hooks](#initialize-hooks) work). There are cases where subclassing `pydantic.BaseModel` is the better choice. For more information and discussion see [pydantic/pydantic#710](https://github.com/pydantic/pydantic/issues/710). You can use all the standard _pydantic_ field types, and the resulting dataclass will be identical to the one created by the standard library `dataclass` decorator. The underlying model and its schema can be accessed through `__pydantic_model__`. Also, fields that require a `default_factory` can be specified by either a `pydantic.Field` or a `dataclasses.field`. {!.tmp_examples/dataclasses_default_schema.md!} `pydantic.dataclasses.dataclass`'s arguments are the same as the standard decorator, except one extra keyword argument `config` which has the same meaning as [Config](model_config.md). !!! warning After v1.2, [The Mypy plugin](../mypy_plugin.md) must be installed to type check _pydantic_ dataclasses. For more information about combining validators with dataclasses, see [dataclass validators](validators.md#dataclass-validators). ## Dataclass Config If you want to modify the `Config` like you would with a `BaseModel`, you have three options: {!.tmp_examples/dataclasses_config.md!} !!! warning After v1.10, _pydantic_ dataclasses support `Config.extra` but some default behaviour of stdlib dataclasses may prevail. For example, when `print`ing a _pydantic_ dataclass with allowed extra fields, it will still use the `__str__` method of stdlib dataclass and show only the required fields. This may be improved further in the future. ## Nested dataclasses Nested dataclasses are supported both in dataclasses and normal models. {!.tmp_examples/dataclasses_nested.md!} Dataclasses attributes can be populated by tuples, dictionaries or instances of the dataclass itself. ## Stdlib dataclasses and _pydantic_ dataclasses ### Convert stdlib dataclasses into _pydantic_ dataclasses Stdlib dataclasses (nested or not) can be easily converted into _pydantic_ dataclasses by just decorating them with `pydantic.dataclasses.dataclass`. _Pydantic_ will enhance the given stdlib dataclass but won't alter the default behaviour (i.e. without validation). It will instead create a wrapper around it to trigger validation that will act like a plain proxy. The stdlib dataclass can still be accessed via the `__dataclass__` attribute (see example below). {!.tmp_examples/dataclasses_stdlib_to_pydantic.md!} ### Choose when to trigger validation As soon as your stdlib dataclass has been decorated with _pydantic_ dataclass decorator, magic methods have been added to validate input data. If you want, you can still keep using your dataclass and choose when to trigger it. {!.tmp_examples/dataclasses_stdlib_run_validation.md!} ### Inherit from stdlib dataclasses Stdlib dataclasses (nested or not) can also be inherited and _pydantic_ will automatically validate all the inherited fields. {!.tmp_examples/dataclasses_stdlib_inheritance.md!} ### Use of stdlib dataclasses with `BaseModel` Bear in mind that stdlib dataclasses (nested or not) are **automatically converted** into _pydantic_ dataclasses when mixed with `BaseModel`! Furthermore the generated _pydantic_ dataclass will have the **exact same configuration** (`order`, `frozen`, ...) as the original one. {!.tmp_examples/dataclasses_stdlib_with_basemodel.md!} ### Use custom types Since stdlib dataclasses are automatically converted to add validation using custom types may cause some unexpected behaviour. In this case you can simply add `arbitrary_types_allowed` in the config! {!.tmp_examples/dataclasses_arbitrary_types_allowed.md!} ## Initialize hooks When you initialize a dataclass, it is possible to execute code *after* validation with the help of `__post_init_post_parse__`. This is not the same as `__post_init__`, which executes code *before* validation. !!! tip If you use a stdlib `dataclass`, you may only have `__post_init__` available and wish the validation to be done before. In this case you can set `Config.post_init_call = 'after_validation'` {!.tmp_examples/dataclasses_post_init_post_parse.md!} Since version **v1.0**, any fields annotated with `dataclasses.InitVar` are passed to both `__post_init__` *and* `__post_init_post_parse__`. {!.tmp_examples/dataclasses_initvars.md!} ### Difference with stdlib dataclasses Note that the `dataclasses.dataclass` from Python stdlib implements only the `__post_init__` method since it doesn't run a validation step. When substituting usage of `dataclasses.dataclass` with `pydantic.dataclasses.dataclass`, it is recommended to move the code executed in the `__post_init__` method to the `__post_init_post_parse__` method, and only leave behind part of code which needs to be executed before validation. ## JSON Dumping _Pydantic_ dataclasses do not feature a `.json()` function. To dump them as JSON, you will need to make use of the `pydantic_encoder` as follows: {!.tmp_examples/dataclasses_json_dumps.md!} pydantic-1.10.14/docs/usage/devtools.md000066400000000000000000000013621455251250200177110ustar00rootroot00000000000000!!! note **Admission:** I (the primary developer of *pydantic*) also develop python-devtools. [python-devtools](https://python-devtools.helpmanual.io/) (`pip install devtools`) provides a number of tools which are useful during Python development, including `debug()` an alternative to `print()` which formats output in a way which should be easier to read than `print` as well as giving information about which file/line the print statement is on and what value was printed. *pydantic* integrates with *devtools* by implementing the `__pretty__` method on most public classes. In particular `debug()` is useful when inspecting models: {!.tmp_examples/devtools_main.md!} Will output in your terminal: {!.tmp_examples/devtools_main.html!} pydantic-1.10.14/docs/usage/exporting_models.md000066400000000000000000000176551455251250200214500ustar00rootroot00000000000000As well as accessing model attributes directly via their names (e.g. `model.foobar`), models can be converted and exported in a number of ways: ## `model.dict(...)` This is the primary way of converting a model to a dictionary. Sub-models will be recursively converted to dictionaries. Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-and-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-and-exclude) * `by_alias`: whether field aliases should be used as keys in the returned dictionary; default `False` * `exclude_unset`: whether fields which were not explicitly set when creating the model should be excluded from the returned dictionary; default `False`. Prior to **v1.0**, `exclude_unset` was known as `skip_defaults`; use of `skip_defaults` is now deprecated * `exclude_defaults`: whether fields which are equal to their default values (whether set or otherwise) should be excluded from the returned dictionary; default `False` * `exclude_none`: whether fields which are equal to `None` should be excluded from the returned dictionary; default `False` Example: {!.tmp_examples/exporting_models_dict.md!} ## `dict(model)` and iteration *pydantic* models can also be converted to dictionaries using `dict(model)`, and you can also iterate over a model's field using `for field_name, value in model:`. With this approach the raw field values are returned, so sub-models will not be converted to dictionaries. Example: {!.tmp_examples/exporting_models_iterate.md!} ## `model.copy(...)` `copy()` allows models to be duplicated, which is particularly useful for immutable models. Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-and-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-and-exclude) * `update`: a dictionary of values to change when creating the copied model * `deep`: whether to make a deep copy of the new model; default `False` Example: {!.tmp_examples/exporting_models_copy.md!} ## `model.json(...)` The `.json()` method will serialise a model to JSON. (For models with a [custom root type](models.md#custom-root-types), only the value for the `__root__` key is serialised) Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-and-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-and-exclude) * `by_alias`: whether field aliases should be used as keys in the returned dictionary; default `False` * `exclude_unset`: whether fields which were not set when creating the model and have their default values should be excluded from the returned dictionary; default `False`. Prior to **v1.0**, `exclude_unset` was known as `skip_defaults`; use of `skip_defaults` is now deprecated * `exclude_defaults`: whether fields which are equal to their default values (whether set or otherwise) should be excluded from the returned dictionary; default `False` * `exclude_none`: whether fields which are equal to `None` should be excluded from the returned dictionary; default `False` * `encoder`: a custom encoder function passed to the `default` argument of `json.dumps()`; defaults to a custom encoder designed to take care of all common types * `**dumps_kwargs`: any other keyword arguments are passed to `json.dumps()`, e.g. `indent`. *pydantic* can serialise many commonly used types to JSON (e.g. `datetime`, `date` or `UUID`) which would normally fail with a simple `json.dumps(foobar)`. {!.tmp_examples/exporting_models_json.md!} ### `json_encoders` Serialisation can be customised on a model using the `json_encoders` config property; the keys should be types (or names of types for forward references), and the values should be functions which serialise that type (see the example below): {!.tmp_examples/exporting_models_json_encoders.md!} By default, `timedelta` is encoded as a simple float of total seconds. The `timedelta_isoformat` is provided as an optional alternative which implements [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time diff encoding. The `json_encoders` are also merged during the models inheritance with the child encoders taking precedence over the parent one. {!.tmp_examples/exporting_models_json_encoders_merge.md!} ### Serialising self-reference or other models By default, models are serialised as dictionaries. If you want to serialise them differently, you can add `models_as_dict=False` when calling `json()` method and add the classes of the model in `json_encoders`. In case of forward references, you can use a string with the class name instead of the class itself {!.tmp_examples/exporting_models_json_forward_ref.md!} ### Serialising subclasses !!! note New in version **v1.5**. Subclasses of common types were not automatically serialised to JSON before **v1.5**. Subclasses of common types are automatically encoded like their super-classes: {!.tmp_examples/exporting_models_json_subclass.md!} ### Custom JSON (de)serialisation To improve the performance of encoding and decoding JSON, alternative JSON implementations (e.g. [ujson](https://pypi.python.org/pypi/ujson)) can be used via the `json_loads` and `json_dumps` properties of `Config`. {!.tmp_examples/exporting_models_ujson.md!} `ujson` generally cannot be used to dump JSON since it doesn't support encoding of objects like datetimes and does not accept a `default` fallback function argument. To do this, you may use another library like [orjson](https://github.com/ijl/orjson). {!.tmp_examples/exporting_models_orjson.md!} Note that `orjson` takes care of `datetime` encoding natively, making it faster than `json.dumps` but meaning you cannot always customise the encoding using `Config.json_encoders`. ## `pickle.dumps(model)` Using the same plumbing as `copy()`, *pydantic* models support efficient pickling and unpickling. {!.tmp_examples/exporting_models_pickle.md!} ## Advanced include and exclude The `dict`, `json`, and `copy` methods support `include` and `exclude` arguments which can either be sets or dictionaries. This allows nested selection of which fields to export: {!.tmp_examples/exporting_models_exclude1.md!} The `True` indicates that we want to exclude or include an entire key, just as if we included it in a set. Of course, the same can be done at any depth level. Special care must be taken when including or excluding fields from a list or tuple of submodels or dictionaries. In this scenario, `dict` and related methods expect integer keys for element-wise inclusion or exclusion. To exclude a field from **every** member of a list or tuple, the dictionary key `'__all__'` can be used as follows: {!.tmp_examples/exporting_models_exclude2.md!} The same holds for the `json` and `copy` methods. ### Model and field level include and exclude In addition to the explicit arguments `exclude` and `include` passed to `dict`, `json` and `copy` methods, we can also pass the `include`/`exclude` arguments directly to the `Field` constructor or the equivalent `field` entry in the models `Config` class: {!.tmp_examples/exporting_models_exclude3.md!} In the case where multiple strategies are used, `exclude`/`include` fields are merged according to the following rules: * First, model config level settings (via `"fields"` entry) are merged per field with the field constructor settings (i.e. `Field(..., exclude=True)`), with the field constructor taking priority. * The resulting settings are merged per class with the explicit settings on `dict`, `json`, `copy` calls with the explicit settings taking priority. Note that while merging settings, `exclude` entries are merged by computing the "union" of keys, while `include` entries are merged by computing the "intersection" of keys. The resulting merged exclude settings: {!.tmp_examples/exporting_models_exclude4.md!} are the same as using merged include settings as follows: {!.tmp_examples/exporting_models_exclude5.md!} pydantic-1.10.14/docs/usage/model_config.md000066400000000000000000000207641455251250200205060ustar00rootroot00000000000000Behaviour of _pydantic_ can be controlled via the `Config` class on a model or a _pydantic_ dataclass. {!.tmp_examples/model_config_main.md!} Also, you can specify config options as model class kwargs: {!.tmp_examples/model_config_class_kwargs.md!} Similarly, if using the `@dataclass` decorator: {!.tmp_examples/model_config_dataclass.md!} ## Options **`title`** : the title for the generated JSON Schema **`anystr_strip_whitespace`** : whether to strip leading and trailing whitespace for str & byte types (default: `False`) **`anystr_upper`** : whether to make all characters uppercase for str & byte types (default: `False`) **`anystr_lower`** : whether to make all characters lowercase for str & byte types (default: `False`) **`min_anystr_length`** : the min length for str & byte types (default: `0`) **`max_anystr_length`** : the max length for str & byte types (default: `None`) **`validate_all`** : whether to validate field defaults (default: `False`) **`extra`** : whether to ignore, allow, or forbid extra attributes during model initialization. Accepts the string values of `'ignore'`, `'allow'`, or `'forbid'`, or values of the `Extra` enum (default: `Extra.ignore`). `'forbid'` will cause validation to fail if extra attributes are included, `'ignore'` will silently ignore any extra attributes, and `'allow'` will assign the attributes to the model. **`allow_mutation`** : whether or not models are faux-immutable, i.e. whether `__setattr__` is allowed (default: `True`) **`frozen`** !!! warning This parameter is in beta : setting `frozen=True` does everything that `allow_mutation=False` does, and also generates a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the attributes are hashable. (default: `False`) **`use_enum_values`** : whether to populate models with the `value` property of enums, rather than the raw enum. This may be useful if you want to serialise `model.dict()` later (default: `False`) **`fields`** : a `dict` containing schema information for each field; this is equivalent to using [the `Field` class](schema.md), except when a field is already defined through annotation or the Field class, in which case only `alias`, `include`, `exclude`, `min_length`, `max_length`, `regex`, `gt`, `lt`, `gt`, `le`, `multiple_of`, `max_digits`, `decimal_places`, `min_items`, `max_items`, `unique_items` and allow_mutation can be set (for example you cannot set default of default_factory) (default: `None`) **`validate_assignment`** : whether to perform validation on *assignment* to attributes (default: `False`) **`allow_population_by_field_name`** : whether an aliased field may be populated by its name as given by the model attribute, as well as the alias (default: `False`) !!! note The name of this configuration setting was changed in **v1.0** from `allow_population_by_alias` to `allow_population_by_field_name`. **`error_msg_templates`** : a `dict` used to override the default error message templates. Pass in a dictionary with keys matching the error messages you want to override (default: `{}`) **`arbitrary_types_allowed`** : whether to allow arbitrary user types for fields (they are validated simply by checking if the value is an instance of the type). If `False`, `RuntimeError` will be raised on model declaration (default: `False`). See an example in [Field Types](types.md#arbitrary-types-allowed). **`orm_mode`** : whether to allow usage of [ORM mode](models.md#orm-mode-aka-arbitrary-class-instances) **`getter_dict`** : a custom class (which should inherit from `GetterDict`) to use when decomposing arbitrary classes for validation, for use with `orm_mode`; see [Data binding](models.md#data-binding). **`alias_generator`** : a callable that takes a field name and returns an alias for it; see [the dedicated section](#alias-generator) **`keep_untouched`** : a tuple of types (e.g. descriptors) for a model's default values that should not be changed during model creation and will not be included in the model schemas. **Note**: this means that attributes on the model with *defaults of this type*, not *annotations of this type*, will be left alone. **`schema_extra`** : a `dict` used to extend/update the generated JSON Schema, or a callable to post-process it; see [schema customization](schema.md#schema-customization) **`json_loads`** : a custom function for decoding JSON; see [custom JSON (de)serialisation](exporting_models.md#custom-json-deserialisation) **`json_dumps`** : a custom function for encoding JSON; see [custom JSON (de)serialisation](exporting_models.md#custom-json-deserialisation) **`json_encoders`** : a `dict` used to customise the way types are encoded to JSON; see [JSON Serialisation](exporting_models.md#modeljson) **`underscore_attrs_are_private`** : whether to treat any underscore non-class var attrs as private, or leave them as is; see [Private model attributes](models.md#private-model-attributes) **`copy_on_model_validation`** : string literal to control how models instances are processed during validation, with the following means (see [#4093](https://github.com/pydantic/pydantic/pull/4093) for a full discussion of the changes to this field): * `'none'` - models are not copied on validation, they're simply kept "untouched" * `'shallow'` - models are shallow copied, this is the default * `'deep'` - models are deep copied **`smart_union`** : whether _pydantic_ should try to check all types inside `Union` to prevent undesired coercion; see [the dedicated section](#smart-union) **`post_init_call`** : whether stdlib dataclasses `__post_init__` should be run before (default behaviour with value `'before_validation'`) or after (value `'after_validation'`) parsing and validation when they are [converted](dataclasses.md#stdlib-dataclasses-and-_pydantic_-dataclasses). **`allow_inf_nan`** : whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields, defaults to `True`, set to `False` for compatibility with `JSON`, see [#3994](https://github.com/pydantic/pydantic/pull/3994) for more details, added in **V1.10** ## Change behaviour globally If you wish to change the behaviour of _pydantic_ globally, you can create your own custom `BaseModel` with custom `Config` since the config is inherited {!.tmp_examples/model_config_change_globally_custom.md!} ## Alias Generator If data source field names do not match your code style (e. g. CamelCase fields), you can automatically generate aliases using `alias_generator`: {!.tmp_examples/model_config_alias_generator.md!} Here camel case refers to ["upper camel case"](https://en.wikipedia.org/wiki/Camel_case) aka pascal case e.g. `CamelCase`. If you'd like instead to use lower camel case e.g. `camelCase`, instead use the `to_lower_camel` function. ## Alias Precedence !!! warning Alias priority logic changed in **v1.4** to resolve buggy and unexpected behaviour in previous versions. In some circumstances this may represent a **breaking change**, see [#1178](https://github.com/pydantic/pydantic/issues/1178) and the precedence order below for details. In the case where a field's alias may be defined in multiple places, the selected value is determined as follows (in descending order of priority): 1. Set via `Field(..., alias=)`, directly on the model 2. Defined in `Config.fields`, directly on the model 3. Set via `Field(..., alias=)`, on a parent model 4. Defined in `Config.fields`, on a parent model 5. Generated by `alias_generator`, regardless of whether it's on the model or a parent !!! note This means an `alias_generator` defined on a child model **does not** take priority over an alias defined on a field in a parent model. For example: {!.tmp_examples/model_config_alias_precedence.md!} ## Smart Union By default, as explained [here](types.md#unions), _pydantic_ tries to validate (and coerce if it can) in the order of the `Union`. So sometimes you may have unexpected coerced data. {!.tmp_examples/model_config_smart_union_off.md!} To prevent this, you can enable `Config.smart_union`. _Pydantic_ will then check all allowed types before even trying to coerce. Know that this is of course slower, especially if your `Union` is quite big. {!.tmp_examples/model_config_smart_union_on.md!} !!! warning Note that this option **does not support compound types yet** (e.g. differentiate `List[int]` and `List[str]`). This option will be improved further once a strict mode is added in _pydantic_ and will probably be the default behaviour in v2! {!.tmp_examples/model_config_smart_union_on_edge_case.md!} pydantic-1.10.14/docs/usage/models.md000066400000000000000000000561351455251250200173450ustar00rootroot00000000000000The primary means of defining objects in *pydantic* is via models (models are simply classes which inherit from `BaseModel`). You can think of models as similar to types in strictly typed languages, or as the requirements of a single endpoint in an API. Untrusted data can be passed to a model, and after parsing and validation *pydantic* guarantees that the fields of the resultant model instance will conform to the field types defined on the model. !!! note *pydantic* is primarily a parsing library, **not a validation library**. Validation is a means to an end: building a model which conforms to the types and constraints provided. In other words, *pydantic* guarantees the types and constraints of the output model, not the input data. This might sound like an esoteric distinction, but it is not. If you're unsure what this means or how it might affect your usage you should read the section about [Data Conversion](#data-conversion) below. Although validation is not the main purpose of *pydantic*, you **can** use this library for custom [validation](validators.md). ## Basic model usage ```py from pydantic import BaseModel class User(BaseModel): id: int name = 'Jane Doe' ``` `User` here is a model with two fields `id` which is an integer and is required, and `name` which is a string and is not required (it has a default value). The type of `name` is inferred from the default value, and so a type annotation is not required (however note [this](#field-ordering) warning about field order when some fields do not have type annotations). ```py user = User(id='123') user_x = User(id='123.45') ``` `user` here is an instance of `User`. Initialisation of the object will perform all parsing and validation, if no `ValidationError` is raised, you know the resulting model instance is valid. ```py assert user.id == 123 assert user_x.id == 123 assert isinstance(user_x.id, int) # Note that 123.45 was casted to an int and its value is 123 ``` More details on the casting in the case of `user_x` can be found in [Data Conversion](#data-conversion). Fields of a model can be accessed as normal attributes of the user object. The string '123' has been cast to an int as per the field type ```py assert user.name == 'Jane Doe' ``` `name` wasn't set when user was initialised, so it has the default value ```py assert user.__fields_set__ == {'id'} ``` The fields which were supplied when user was initialised. ```py assert user.dict() == dict(user) == {'id': 123, 'name': 'Jane Doe'} ``` Either `.dict()` or `dict(user)` will provide a dict of fields, but `.dict()` can take numerous other arguments. ```py user.id = 321 assert user.id == 321 ``` This model is mutable so field values can be changed. ### Model properties The example above only shows the tip of the iceberg of what models can do. Models possess the following methods and attributes: `dict()` : returns a dictionary of the model's fields and values; cf. [exporting models](exporting_models.md#modeldict) `json()` : returns a JSON string representation `dict()`; cf. [exporting models](exporting_models.md#modeljson) `copy()` : returns a copy (by default, shallow copy) of the model; cf. [exporting models](exporting_models.md#modelcopy) `parse_obj()` : a utility for loading any object into a model with error handling if the object is not a dictionary; cf. [helper functions](#helper-functions) `parse_raw()` : a utility for loading strings of numerous formats; cf. [helper functions](#helper-functions) `parse_file()` : like `parse_raw()` but for file paths; cf. [helper functions](#helper-functions) `from_orm()` : loads data into a model from an arbitrary class; cf. [ORM mode](#orm-mode-aka-arbitrary-class-instances) `schema()` : returns a dictionary representing the model as JSON Schema; cf. [schema](schema.md) `schema_json()` : returns a JSON string representation of `schema()`; cf. [schema](schema.md) `construct()` : a class method for creating models without running validation; cf. [Creating models without validation](#creating-models-without-validation) `__fields_set__` : Set of names of fields which were set when the model instance was initialised `__fields__` : a dictionary of the model's fields `__config__` : the configuration class for the model, cf. [model config](model_config.md) ## Recursive Models More complex hierarchical data structures can be defined using models themselves as types in annotations. {!.tmp_examples/models_recursive.md!} For self-referencing models, see [postponed annotations](postponed_annotations.md#self-referencing-models). ## ORM Mode (aka Arbitrary Class Instances) Pydantic models can be created from arbitrary class instances to support models that map to ORM objects. To do this: 1. The [Config](model_config.md) property `orm_mode` must be set to `True`. 2. The special constructor `from_orm` must be used to create the model instance. The example here uses SQLAlchemy, but the same approach should work for any ORM. {!.tmp_examples/models_orm_mode.md!} ### Reserved names You may want to name a Column after a reserved SQLAlchemy field. In that case, Field aliases will be convenient: {!.tmp_examples/models_orm_mode_reserved_name.md!} !!! note The example above works because aliases have priority over field names for field population. Accessing `SQLModel`'s `metadata` attribute would lead to a `ValidationError`. ### Recursive ORM models ORM instances will be parsed with `from_orm` recursively as well as at the top level. Here a vanilla class is used to demonstrate the principle, but any ORM class could be used instead. {!.tmp_examples/models_orm_mode_recursive.md!} ### Data binding Arbitrary classes are processed by *pydantic* using the `GetterDict` class (see [utils.py](https://github.com/pydantic/pydantic/blob/1.10.X-fixes/pydantic/utils.py)), which attempts to provide a dictionary-like interface to any class. You can customise how this works by setting your own sub-class of `GetterDict` as the value of `Config.getter_dict` (see [config](model_config.md)). You can also customise class validation using [root_validators](validators.md#root-validators) with `pre=True`. In this case your validator function will be passed a `GetterDict` instance which you may copy and modify. The `GetterDict` instance will be called for each field with a sentinel as a fallback (if no other default value is set). Returning this sentinel means that the field is missing. Any other value will be interpreted as the value of the field. {!.tmp_examples/models_orm_mode_data_binding.md!} ## Error Handling *pydantic* will raise `ValidationError` whenever it finds an error in the data it's validating. !!! note Validation code should not raise `ValidationError` itself, but rather raise `ValueError`, `TypeError` or `AssertionError` (or subclasses of `ValueError` or `TypeError`) which will be caught and used to populate `ValidationError`. One exception will be raised regardless of the number of errors found, that `ValidationError` will contain information about all the errors and how they happened. You can access these errors in several ways: `e.errors()` : method will return list of errors found in the input data. `e.json()` : method will return a JSON representation of `errors`. `str(e)` : method will return a human readable representation of the errors. Each error object contains: `loc` : the error's location as a list. The first item in the list will be the field where the error occurred, and if the field is a [sub-model](models.md#recursive-models), subsequent items will be present to indicate the nested location of the error. `type` : a computer-readable identifier of the error type. `msg` : a human readable explanation of the error. `ctx` : an optional object which contains values required to render the error message. As a demonstration: {!.tmp_examples/models_errors1.md!} ### Custom Errors In your custom data types or validators you should use `ValueError`, `TypeError` or `AssertionError` to raise errors. See [validators](validators.md) for more details on use of the `@validator` decorator. {!.tmp_examples/models_errors2.md!} You can also define your own error classes, which can specify a custom error code, message template, and context: {!.tmp_examples/models_errors3.md!} ## Helper Functions *Pydantic* provides three `classmethod` helper functions on models for parsing data: * **`parse_obj`**: this is very similar to the `__init__` method of the model, except it takes a dict rather than keyword arguments. If the object passed is not a dict a `ValidationError` will be raised. * **`parse_raw`**: this takes a *str* or *bytes* and parses it as *json*, then passes the result to `parse_obj`. Parsing *pickle* data is also supported by setting the `content_type` argument appropriately. * **`parse_file`**: this takes in a file path, reads the file and passes the contents to `parse_raw`. If `content_type` is omitted, it is inferred from the file's extension. {!.tmp_examples/models_parse.md!} !!! warning To quote the [official `pickle` docs](https://docs.python.org/3/library/pickle.html), "The pickle module is not secure against erroneous or maliciously constructed data. Never unpickle data received from an untrusted or unauthenticated source." !!! info Because it can result in arbitrary code execution, as a security measure, you need to explicitly pass `allow_pickle` to the parsing function in order to load `pickle` data. ### Creating models without validation *pydantic* also provides the `construct()` method which allows models to be created **without validation** this can be useful when data has already been validated or comes from a trusted source and you want to create a model as efficiently as possible (`construct()` is generally around 30x faster than creating a model with full validation). !!! warning `construct()` does not do any validation, meaning it can create models which are invalid. **You should only ever use the `construct()` method with data which has already been validated, or you trust.** {!.tmp_examples/models_construct.md!} The `_fields_set` keyword argument to `construct()` is optional, but allows you to be more precise about which fields were originally set and which weren't. If it's omitted `__fields_set__` will just be the keys of the data provided. For example, in the example above, if `_fields_set` was not provided, `new_user.__fields_set__` would be `{'id', 'age', 'name'}`. ## Generic Models Pydantic supports the creation of generic models to make it easier to reuse a common model structure. In order to declare a generic model, you perform the following steps: * Declare one or more `typing.TypeVar` instances to use to parameterize your model. * Declare a pydantic model that inherits from `pydantic.generics.GenericModel` and `typing.Generic`, where you pass the `TypeVar` instances as parameters to `typing.Generic`. * Use the `TypeVar` instances as annotations where you will want to replace them with other types or pydantic models. Here is an example using `GenericModel` to create an easily-reused HTTP response payload wrapper: {!.tmp_examples/models_generics.md!} If you set `Config` or make use of `validator` in your generic model definition, it is applied to concrete subclasses in the same way as when inheriting from `BaseModel`. Any methods defined on your generic class will also be inherited. Pydantic's generics also integrate properly with mypy, so you get all the type checking you would expect mypy to provide if you were to declare the type without using `GenericModel`. !!! note Internally, pydantic uses `create_model` to generate a (cached) concrete `BaseModel` at runtime, so there is essentially zero overhead introduced by making use of `GenericModel`. To inherit from a GenericModel without replacing the `TypeVar` instance, a class must also inherit from `typing.Generic`: {!.tmp_examples/models_generics_inheritance.md!} You can also create a generic subclass of a `GenericModel` that partially or fully replaces the type parameters in the superclass. {!.tmp_examples/models_generics_inheritance_extend.md!} If the name of the concrete subclasses is important, you can also override the default behavior: {!.tmp_examples/models_generics_naming.md!} Using the same TypeVar in nested models allows you to enforce typing relationships at different points in your model: {!.tmp_examples/models_generics_nested.md!} Pydantic also treats `GenericModel` similarly to how it treats built-in generic types like `List` and `Dict` when it comes to leaving them unparameterized, or using bounded `TypeVar` instances: * If you don't specify parameters before instantiating the generic model, they will be treated as `Any` * You can parametrize models with one or more *bounded* parameters to add subclass checks Also, like `List` and `Dict`, any parameters specified using a `TypeVar` can later be substituted with concrete types. {!.tmp_examples/models_generics_typevars.md!} ## Dynamic model creation There are some occasions where the shape of a model is not known until runtime. For this *pydantic* provides the `create_model` method to allow models to be created on the fly. {!.tmp_examples/models_dynamic_creation.md!} Here `StaticFoobarModel` and `DynamicFoobarModel` are identical. !!! warning See the note in [Required Optional Fields](#required-optional-fields) for the distinction between an ellipsis as a field default and annotation-only fields. See [pydantic/pydantic#1047](https://github.com/pydantic/pydantic/issues/1047) for more details. Fields are defined by either a tuple of the form `(, )` or just a default value. The special key word arguments `__config__` and `__base__` can be used to customise the new model. This includes extending a base model with extra fields. {!.tmp_examples/models_dynamic_inheritance.md!} You can also add validators by passing a dict to the `__validators__` argument. {!.tmp_examples/models_dynamic_validators.md!} ## Model creation from `NamedTuple` or `TypedDict` Sometimes you already use in your application classes that inherit from `NamedTuple` or `TypedDict` and you don't want to duplicate all your information to have a `BaseModel`. For this _pydantic_ provides `create_model_from_namedtuple` and `create_model_from_typeddict` methods. Those methods have the exact same keyword arguments as `create_model`. {!.tmp_examples/models_from_typeddict.md!} ## Custom Root Types Pydantic models can be defined with a custom root type by declaring the `__root__` field. The root type can be any type supported by pydantic, and is specified by the type hint on the `__root__` field. The root value can be passed to the model `__init__` via the `__root__` keyword argument, or as the first and only argument to `parse_obj`. {!.tmp_examples/models_custom_root_field.md!} If you call the `parse_obj` method for a model with a custom root type with a *dict* as the first argument, the following logic is used: * If the custom root type is a mapping type (eg., `Dict` or `Mapping`), the argument itself is always validated against the custom root type. * For other custom root types, if the dict has precisely one key with the value `__root__`, the corresponding value will be validated against the custom root type. * Otherwise, the dict itself is validated against the custom root type. This is demonstrated in the following example: {!.tmp_examples/models_custom_root_field_parse_obj.md!} !!! warning Calling the `parse_obj` method on a dict with the single key `"__root__"` for non-mapping custom root types is currently supported for backwards compatibility, but is not recommended and may be dropped in a future version. If you want to access items in the `__root__` field directly or to iterate over the items, you can implement custom `__iter__` and `__getitem__` functions, as shown in the following example. {!.tmp_examples/models_custom_root_access.md!} ## Faux Immutability Models can be configured to be immutable via `allow_mutation = False`. When this is set, attempting to change the values of instance attributes will raise errors. See [model config](model_config.md) for more details on `Config`. !!! warning Immutability in Python is never strict. If developers are determined/stupid they can always modify a so-called "immutable" object. {!.tmp_examples/models_mutation.md!} Trying to change `a` caused an error, and `a` remains unchanged. However, the dict `b` is mutable, and the immutability of `foobar` doesn't stop `b` from being changed. ## Abstract Base Classes Pydantic models can be used alongside Python's [Abstract Base Classes](https://docs.python.org/3/library/abc.html) (ABCs). {!.tmp_examples/models_abc.md!} ## Field Ordering Field order is important in models for the following reasons: * validation is performed in the order fields are defined; [fields validators](validators.md) can access the values of earlier fields, but not later ones * field order is preserved in the model [schema](schema.md) * field order is preserved in [validation errors](#error-handling) * field order is preserved by [`.dict()` and `.json()` etc.](exporting_models.md#modeldict) As of **v1.0** all fields with annotations (whether annotation-only or with a default value) will precede all fields without an annotation. Within their respective groups, fields remain in the order they were defined. {!.tmp_examples/models_field_order.md!} !!! warning As demonstrated by the example above, combining the use of annotated and non-annotated fields in the same model can result in surprising field orderings. (This is due to limitations of Python) Therefore, **we recommend adding type annotations to all fields**, even when a default value would determine the type by itself to guarantee field order is preserved. ## Required fields To declare a field as required, you may declare it using just an annotation, or you may use an ellipsis (`...`) as the value: {!.tmp_examples/models_required_fields.md!} Where `Field` refers to the [field function](schema.md#field-customization). Here `a`, `b` and `c` are all required. However, use of the ellipses in `b` will not work well with [mypy](mypy.md), and as of **v1.0** should be avoided in most cases. ### Required Optional fields !!! warning Since version **v1.2** annotation only nullable (`Optional[...]`, `Union[None, ...]` and `Any`) fields and nullable fields with an ellipsis (`...`) as the default value, no longer mean the same thing. In some situations this may cause **v1.2** to not be entirely backwards compatible with earlier **v1.*** releases. If you want to specify a field that can take a `None` value while still being required, you can use `Optional` with `...`: {!.tmp_examples/models_required_field_optional.md!} In this model, `a`, `b`, and `c` can take `None` as a value. But `a` is optional, while `b` and `c` are required. `b` and `c` require a value, even if the value is `None`. ## Field with dynamic default value When declaring a field with a default value, you may want it to be dynamic (i.e. different for each model). To do this, you may want to use a `default_factory`. !!! info "In Beta" The `default_factory` argument is in **beta**, it has been added to *pydantic* in **v1.5** on a **provisional basis**. It may change significantly in future releases and its signature or behaviour will not be concrete until **v2**. Feedback from the community while it's still provisional would be extremely useful; either comment on [#866](https://github.com/pydantic/pydantic/issues/866) or create a new issue. Example of usage: {!.tmp_examples/models_default_factory.md!} Where `Field` refers to the [field function](schema.md#field-customization). !!! warning The `default_factory` expects the field type to be set. ## Automatically excluded attributes Class variables which begin with an underscore and attributes annotated with `typing.ClassVar` will be automatically excluded from the model. ## Private model attributes If you need to vary or manipulate internal attributes on instances of the model, you can declare them using `PrivateAttr`: {!.tmp_examples/private_attributes.md!} Private attribute names must start with underscore to prevent conflicts with model fields: both `_attr` and `__attr__` are supported. If `Config.underscore_attrs_are_private` is `True`, any non-ClassVar underscore attribute will be treated as private: {!.tmp_examples/private_attributes_underscore_attrs_are_private.md!} Upon class creation pydantic constructs `__slots__` filled with private attributes. ## Parsing data into a specified type Pydantic includes a standalone utility function `parse_obj_as` that can be used to apply the parsing logic used to populate pydantic models in a more ad-hoc way. This function behaves similarly to `BaseModel.parse_obj`, but works with arbitrary pydantic-compatible types. This is especially useful when you want to parse results into a type that is not a direct subclass of `BaseModel`. For example: {!.tmp_examples/parse_obj_as.md!} This function is capable of parsing data into any of the types pydantic can handle as fields of a `BaseModel`. Pydantic also includes two similar standalone functions called `parse_file_as` and `parse_raw_as`, which are analogous to `BaseModel.parse_file` and `BaseModel.parse_raw`. ## Data Conversion *pydantic* may cast input data to force it to conform to model field types, and in some cases this may result in a loss of information. For example: {!.tmp_examples/models_data_conversion.md!} This is a deliberate decision of *pydantic*, and in general it's the most useful approach. See [here](https://github.com/pydantic/pydantic/issues/578) for a longer discussion on the subject. Nevertheless, [strict type checking](types.md#strict-types) is partially supported. ## Model signature All *pydantic* models will have their signature generated based on their fields: {!.tmp_examples/models_signature.md!} An accurate signature is useful for introspection purposes and libraries like `FastAPI` or `hypothesis`. The generated signature will also respect custom `__init__` functions: {!.tmp_examples/models_signature_custom_init.md!} To be included in the signature, a field's alias or name must be a valid Python identifier. *pydantic* prefers aliases over names, but may use field names if the alias is not a valid Python identifier. If a field's alias and name are both invalid identifiers, a `**data` argument will be added. In addition, the `**data` argument will always be present in the signature if `Config.extra` is `Extra.allow`. !!! note Types in the model signature are the same as declared in model annotations, not necessarily all the types that can actually be provided to that field. This may be fixed one day once [#1055](https://github.com/pydantic/pydantic/issues/1055) is solved. ## Structural pattern matching *pydantic* supports structural pattern matching for models, as introduced by [PEP 636](https://peps.python.org/pep-0636/) in Python 3.10. {!.tmp_examples/models_structural_pattern_matching.md!} !!! note A match-case statement may seem as if it creates a new model, but don't be fooled; it is just syntactic sugar for getting an attribute and either comparing it or declaring and initializing it. pydantic-1.10.14/docs/usage/mypy.md000066400000000000000000000025221455251250200170470ustar00rootroot00000000000000*pydantic* models work with [mypy](http://mypy-lang.org/) provided you use the annotation-only version of required fields: {!.tmp_examples/mypy_main.md!} You can run your code through mypy with: ```bash mypy \ --ignore-missing-imports \ --follow-imports=skip \ --strict-optional \ pydantic_mypy_test.py ``` If you call mypy on the example code above, you should see mypy detect the attribute access error: ``` 13: error: "Model" has no attribute "middle_name" ``` ## Strict Optional For your code to pass with `--strict-optional`, you need to to use `Optional[]` or an alias of `Optional[]` for all fields with `None` as the default. (This is standard with mypy.) Pydantic provides a few useful optional or union types: * `NoneStr` aka. `Optional[str]` * `NoneBytes` aka. `Optional[bytes]` * `StrBytes` aka. `Union[str, bytes]` * `NoneStrBytes` aka. `Optional[StrBytes]` If these aren't sufficient you can of course define your own. ## Mypy Plugin Pydantic ships with a mypy plugin that adds a number of important pydantic-specific features to mypy that improve its ability to type-check your code. See the [pydantic mypy plugin docs](../mypy_plugin.md) for more details. ## Other pydantic interfaces Pydantic [dataclasses](dataclasses.md) and the [`validate_arguments` decorator](validation_decorator.md) should also work well with mypy. pydantic-1.10.14/docs/usage/postponed_annotations.md000066400000000000000000000040371455251250200225040ustar00rootroot00000000000000!!! note Both postponed annotations via the future import and `ForwardRef` require Python 3.7+. Postponed annotations (as described in [PEP563](https://www.python.org/dev/peps/pep-0563/)) "just work". {!.tmp_examples/postponed_annotations_main.md!} Internally, *pydantic* will call a method similar to `typing.get_type_hints` to resolve annotations. In cases where the referenced type is not yet defined, `ForwardRef` can be used (although referencing the type directly or by its string is a simpler solution in the case of [self-referencing models](#self-referencing-models)). In some cases, a `ForwardRef` won't be able to be resolved during model creation. For example, this happens whenever a model references itself as a field type. When this happens, you'll need to call `update_forward_refs` after the model has been created before it can be used: {!.tmp_examples/postponed_annotations_forward_ref.md!} !!! warning To resolve strings (type names) into annotations (types), *pydantic* needs a namespace dict in which to perform the lookup. For this it uses `module.__dict__`, just like `get_type_hints`. This means *pydantic* may not play well with types not defined in the global scope of a module. For example, this works fine: {!.tmp_examples/postponed_annotations_works.md!} While this will break: {!.tmp_examples/postponed_annotations_broken.md!} Resolving this is beyond the call for *pydantic*: either remove the future import or declare the types globally. ## Self-referencing Models Data structures with self-referencing models are also supported. Self-referencing fields will be automatically resolved after model creation. Within the model, you can refer to the not-yet-constructed model using a string: {!.tmp_examples/postponed_annotations_self_referencing_string.md!} Since Python 3.7, you can also refer it by its type, provided you import `annotations` (see [above](postponed_annotations.md) for support depending on Python and *pydantic* versions). {!.tmp_examples/postponed_annotations_self_referencing_annotations.md!} pydantic-1.10.14/docs/usage/rich.md000066400000000000000000000005431455251250200167770ustar00rootroot00000000000000Pydantic models may be printed with the [Rich](https://github.com/willmcgugan/rich) library which will add additional formatting and color to the output. Here's an example: ![Printing Pydantic models with Rich](./rich_pydantic.png) See the Rich documentation on [pretty printing](https://rich.readthedocs.io/en/latest/pretty.html) for more information. pydantic-1.10.14/docs/usage/rich_pydantic.png000066400000000000000000000713701455251250200210640ustar00rootroot00000000000000PNG  IHDR@lF iCCPICC ProfileHP7ZB(RBotH =ҫ$PbL(6TWp-`YEWW@ւb[/"PQx}ޛw27盓sOs `X+!ʔ{1b>@8@6ÕYwФg9K@ ' +d: 3œ|a)INϓ4h)po 9I dk qȡ[xB¹sH^07#c$!lf&%f's8)rkJ>B8<-鲙" $ሦ# mYEI CfXțb, jRq|{pЏ-Ɏa7b%幒%ެHfҢv-#,aExY~k6 _{39E٘Xym Q0A@TH҅ bB/ CqP" BEP)t~BP;'a `&̂Hx1/sIx]-7wqB!+  ۄ>8QhBt#FS%;Or&\R $B6'{2Vr!B1xR)*e3[NLNkE"KqbbiۊCJ%c%o%Z2J]J#Te -ǔ+TU|Ux*y**Uz(՛ʥn^Ѱ4J+цUUTUUWWt6=~IRmVQ6>GS^^ޡI᫑C^&Z\3L3[͡99ss 朚H 2 ZUuKkD[G_[W]S'UgA]PwEݗ U(a0dzM7?5 0 v4 .0\mXmȈ`41j53611d\o<`n616ybJ10]nZnz k4K3ov6w0߶---b:-eIdYfYV[Xѭ6X[g8/~ގyZ;X[~lbh歭-׶n]{ {T8:9Jk 9u1iP5g:s]]2]Njzu`|{8nܺ߻w{yp<={x<+=YfTqk/k/1o5M>(6_(Rg~~)~~0A;l.=&%T<}{@A탅?}/!Cu؊RёG[iV W˪'sDCe͡ZzmIpRv叉?v :|y')V mntm=G<}}y/(/ul ye+ߕOxqk8v-4yoy$thXG?2?~?ٗƯA_LdLL9(B [dN@q|=%)g)q \R Y4v:M'&!# /]_*b4:=Oq<8ZGDdO*/ؓ(a pHYs%%IR$iTXtXML:com.adobe.xmp 320 1280 Screenshot ^eIDATxwSUYkjn+䌀*I$K09g"fcssD88|?٬1AaHa#o\Uuu_X Ϫ1~ [k~lKV64Xiii.APff/[UmcWƵ-]`p恻{뽆WX866M[Yo_>0by?.mlj00599ڟ;w8uȱԕ9Z𜺞kz{?ޟĵ -Z^ޛONJ+usX`_W_7oj[s*XTLz v٭ؾc{Uu˷P1]۶Zw`:3N+ z)y>ߏ1L#VH~E :+Og}vIIl'?XG?y4ϝγ۝gCJzZš蹳?O%KDRk¥/tW,byϷLh}W54HҼfa۞{yTI~/Mt6O?Vw>m.Ŧ\=|uhR>{C}Ʈ LjP|TQ ~9x=_-7''>0Rk k=zJכn |e%N.P?Yv8~ytWְ QYw9>~/ݝ{w jb҇l'_Kl]-KUGݧ ijpSΜu8zB@{EA4-uyyyv3++9CVx U#ϵ C)zs1mܴQ#L tq%ƿ~"ϭo}jQq:5DզV߼Xs{=kuG9 [zw5kJdAQߴeXmaߊS_~_'?x>e:؄|(WO_<uI]k註(V/1/9_Yszj)鹳z{<|ȏ~fH2_nnY-wC轟T)<޻گ3>`l~p`uAwrYSכIf`ys}Ls ?ήN=` ǼEg\.i>h~=um箝&x کYYYsyUd?\G˛ǵǎzc fdjmkzx8 /UGuwg^׮?^Mʦw[ԠYoy~ߧqs%i'.lê9蹹Wͬ۩efVQYBS=N_Y5%'Qd:LxyWI Ta~üV-XuLW{cgk6E)jANw[RZ2[CҬυ<|^Lm}1.7:÷ck[V4HiiboZݺt7>_5gam <_ݤ;=9ZU[oki)sׯ ^ޯ@z>[1  [no>lʨOC]>؞ܔ5ݮb}N2ޚޘ$X;w\oir`x._7oLt7?4|dBuO0%"~1ۘ[^d;SV^kb^AAٯg%kl}w_\S:ƭ[iY(dgsW~5M2l͞Ms!#-mJܳL?o9~=7nd\Zp]}:qӽ灔|ta]ʝ@G0\٩Y W^ޯ@nC ⁺V'1zꥺVy?:Q۲.[muns }2.gz㚉~4QWwç0eOl>䘌RT3557P8-MUR]8[[ 8򾞯rPmT1'ĩ.Bmfdϡ>j6.?yxz7s0v}w_\?j-9 = L&Vsj?>{(uPBݭfS-ޡ[۞g^ϲ2ٽޟ82߲aj5e/wR|޳>`U-[]%wq4[ 7K;z${s9$%%浻`Ei2P>Rc#YE-Eª&Ml|.B&Xՠ=qmܡIj2}] i-]| ^ WC`j5u^JRo-GsDɒX͘լ#YRz5D[w]UBZ7_Y?<}b>q5w${s>aҚCK#4íBBnn/QR%aZia/=] _!3ή4铷BNV{3w&#s> Ϗ Bh'?<͝933SVObVVG_58ku.}跊7}8gΎΝ?SP`k&XږRw.  fQ;ٸ Z4y5 k3@u=|S,31 jAK%i2C`~Тl+E 0` /NMMw*Tȱ~p;lo^9(*.2:`~>/矶vQs5Nj9<>yCT)##c-GS/b>C]?NT \BB:a !TU]m]^E@ʆ}<5r:--͛Å߮ݸn}&޸i#w=lj}G?zn, uN45tcS-_ڙ?qȱ#ҌYGz݆>p@WwWAAZm;8urM{K(KXʆ3m' .तƆ{wlִX^߿_V/Yẳǎ;xP[{ٺ7σN%7nd]Uxqqq VXThG/۵g''Ei*ܼ<}-of}=wTFÃa%K_lsab-v=ַ~x~S/k }}^^9Tox~Ғ'ϞNLr7ohM_꯷Ԝwџl**]Rt9^O_7ϛ[O"9[4Gԫ~4"v}U'#;oJ;Ǯ U զ|ZzQ61XkW x]2]95WmPͧ' 5W}{^~.U{?b5 ;WZV&'{Z_+)̆Lub^[^QΞ?nxd}j.ݲmf|}PHbbb4uC3T?}``p_zwkvjm z_2+sp]_M[o-5[{M?GA ޜo7{}=^ϏZ̘fd-:7i{%-x. 8?sΚoLϩ.]g*uV_џ>f?C4ԊvX=. T/ @vZsd6J̽A:VJ ֣]SV^[<rB7ɓOcxa,![48>O[[K^N4yY_[V4y9t m;ux2}YZZj}7 bP?$aQ")P/6 C[}fN C|{~.],Z|޽rubˇ.fvdeeكL>r`G u\?j-*M=5؛5}z_.7 ϛ9;J@8z?z! H7Ҳ2s V/Y k "UfrO攽8ۏ!|Cq)*&ބ|tPliFjs:=ߏ@bƸv,li9}z_.7 ׫@͇~Y)?GpGgUL ;|KSX;|l@s!F3yJ}zWvy`D`V;'g>{lFFl;١zɕ?ج/갹kV+9æL`_첼tk^RTTʏO3{_~v``o7ϛO"</kх¡=2s\R4e[k~KԸ_LZլuqhe/S/Z%UK<i\]L5_k&Yp+ߜL%%%.)}&5ҍ6<z5ۖUÄ_P~~Bq}^+VVT#I47^Z49%9"UηЛ?لyU׉UAA> ]\ B B B CXi!߶ @ A]2q`@Qqx:p&}H=ͽ}9s>?მݚ#1Lwd􌵪U"O\j/]VWǣBيasJ{5wph0/~RRҽ3<&m޾{{eeֈ)? <!Ɨǔ޼{܃gddlٺceMb/-[.Y93 C]~.Scttt[U]m^xS~ȰU^B)N%%%U~KgWϟ]Q^蹳^gw%Kyyy*f6>o߽J;7md/pdm=oqE/^hS}O>e_aKZV^s8{C}|.ݹw*) @zZT֌,+uRk7]5!L$'֗l+^>`VUcZAWfd)7J4o^[{͹UG3_^¿~Mݧv,ЫuΜu86Ӝ{VoS@K/NV~-[4nw7DK[,\ާvH5oݾe쭲5Ջu-֗+ |={{#k:IԠmjMgr_K+Rcv/Y5ھܨdLaڍ[}A( xKCX0Dlo Jvh5rhzA _,z8sWK7n'Xd'WNzy?^mmy vK~^3uY;}7++êžQ`N SR=}Lsq\twiRSL3?۷v<$Xqd+?Pg X튽}Z8׾kJpCcc]u~S'(ۓpCcmծ28$kflPmuu+?|iV6Yo4[ OFʗσظyUXPj]e?>1Mi /f9<ݤn-O8ӁCxRKV@15ҷQG~긦g#{;͊MM k5&Q2E[mjzV`}RCU~M޸fv_.BPZo`M&G+N(Si_ Y>Zg붭ZJ Lu*9//ophдgN\/^R@ǴCÇ{*,= 7O߻;9>q|IGϝ5}5bV\ҚaK7mjR'NT;qܑ"ɟ{vm/y;X`neJ~xeUwwnvb@vəf{uiv`j6Q^^7ԧ_s-HXXTRuYMxY=+Y0|U3'kf]\/k45^~K48Yۺ 3]3gG ڭU[wN_cwk#LͤY|O5'Xj7T?skj.Ͱŏ4p¼l|aC#dԚPg "k1[6p 09{K*@LQQQ<@;2zJֶuO#}{wphb⊳K̫JMLun^z~]r} pgWw=nٶ{I+K7ݴ.ls$5פߋ/ql e x GZw{@Za n*_.+Z>cUbUUwihqrBOxyK׮5?&&&rnwM{Ht~#ͧuwġg8鱗9s*;uUK.\h@B1G {:u,Uk L?u%{n9ט#k/e_QiV[{u I>Th[µ3Y{єsܹtٹ^>!.!1.ɞf~)kwtKZ ٞ~Ϯ]kEKJJ@B-3;O WkWkUDLtٯq69W}&m𞶃d~A=ph4걜|xSDߨzJ77[`Gˊnoi<;pqV:j7N2w>7o|qn9e0Wczͷo=pIS0|q"oܼw~x؄ fV95PVjUqU~á&lׯ7=.1-)e*)MMu ;[kYXޢ` TE8l?tCYsNZ^uҝl'K%]feWAfQqvYx[.{}bu K-.[+ ۝x` x5iƷPkU%+3V ؕ1 b n?#PRRwLSuM5O#6a:B74682iG^0` @ )˩*1ыx\_~{BzbY}v^}4|76uOH(rr=Ww?\=roS\r ;p䋊Ztb%؏ ['zέ]5+eT,3{1.1#,;\{겼'xAHA ٍ9T\wbӾ{^SkSi\{^TCu7lu6U=X[a+ w\k_qolbRlBbaSr)_i/w82Քŷ'ڷe3tHlL:#cdw_qɢןw_y tqkFVcvm?l^U󨽼s5:ϔoC b3CNq}:e8fD5Ubzz.i,O_@@eqs^VE/qj=JMu IY9ڬ//b^]n@@qQqՔ4c7cgEɩѵZ6]?f ܖY\]pp>tZA =p+̍+7tNb"e2Wd:+e1Mvws=?x HkSL^Mղe㦍ܼ ؄$4Z Gܕl>raifx`𢃛ҭ/:|+5f/9*vԄr=u  /|PU^¢ªjw[EeeTTT-lQB{sQ5Àcxt>286u:!(wސ[lXvʥ< :䲷]3 Vyz{2Ht_@Z 8-%[ [7on8k=VmpEC[ꚜ_<2zͻ{n-` Gr.-ZI8`U z?UFIeG77mwT͆:kphp]\\|RgCؑarߚl/\)szwFjBY[cxukGٵmr|oV:/splbh#Z+!-u̾l.+vy|Z_Ul 'Mvu=0mx{(`,vȴΛbUs팅_%58&!eveUUl '5e1qHM[R/;OJN͛lϴi% 8']Lh~6膥51oVdzYVU֧19KWħj]T\d;x󧌌 .0"Q+p5~| \ `D[oi;yW0`ӽc%\ `V ع{WQqP:p蠵/-//aw>g;gyTs50פ&unkX~~GFXmU*nf׮_?kّo/߫20d~/Yyt鑵67JɎs99馵up_>ٻo= {7??*--xYWۣ\s6s"SYLt74l~qeO[muW21#΋</&&=¦羾dQ˶,]2%O(ZpL3ݕL+ݴmVϕFo~=~y,^U=:k>{ȭkT;=r^_9U?>?9%ѓg ;p'+~_غ< bsmL_JE(mlXUY͟խ2 `5|ھx+\w~ m I-\D1)4 gTUk|5cbgz(绬b/\[S}nn?S#ّJ'K㭶w/@du_kccbcg?k^߯#sgdd,Vw!jNkABڴ^2=t9n<|90 HUɡ>ӎ_~G/Z*-Y\9U?>`Z{Ҳ2>lx)X(HC~zjt䃛/ʺv|9-0Cl@V6~=`Rb|`cY{6ՔU VR69)ڟ\:jdO:}[Jzm֩B~1e9-m5˒=|ua\D:*s'ُ$xyrBJЏ,IK**ܯytOؿ11/fZ-*zG}Σ~?9aUyӡMxq$ןw_y! /o1UI[ea%{ɲ}iR,y 5_xZ)>(+L5 OS'g{a5Kǒi־?9lmshx-O{+k>MvnOs8U^۟eؓ\^ZӒϧsjX4O^?O馦?2Pev(lLJ´\aUK?;PR{3 <Ϲ8xչءc8~3ׯh[wW|_.]4_P^^PMلޛ/̷+~3\uC_ݶC%XĉY'! -.Hn;7 ?.+/q_Sy'@Yeqܿs ]a=RCӭO=8&ka.`Ee9Rf6ճ>Lwߝ^Wʒ8lsqa{@}:[]xܾ24;/4VlsKoe絭km\_}筡^r~ÕꂥE TG,PVQYwmpsE=[i}}\{-'-/:j9v%'K8jͼL~͗'ϪVˆf?ޯ~1?M)ʞi5'FVEԜ':-<$qr"<%ڧ;`ٿ>>[̝p}>o\ϛ!>ӈ\+:[man&X.dj4SOe@}r9WW'k|lPs\k`zǮyHI'?qDEm[I]Lፍ.D~ңu҈Qm//ilQTrzjRCp_~?M%%%ļ&{,jaՇ ]6sOh=O&9'k~]--\[멫w}<A2cll眚mV{hMPԈ`:SoYղqF[nTbVMiVa{Pkd ڋV 1yPoa0-ބkTj kW~A=tƯ}Z#pp4{I2nZ p =$u*DJE͝K\}b?xIY!j5`M>V?^_}.y\i^\)eIh;sBKGN5K Pƛb'F5}88a}koYT1q!P]|_iii\PX}5fZMo5[]}ƀTٷiFWb}uq+3G}f|]б+?󒲟Y/NJ |z0yXKD?8{ 6'r! ^¢ªjw[Ee˚­Kmϛ@I6;MAl T F*s3P]/qf~GKךU/r>U}; ^} V1_v]@@\'ر-6[Y#~ܨ =5r@Fö.OTrXD^Xcۋ_öV#>\9=5x} >]/O hԒS~fO,ك4TI;(JxHzΝ@w 槩˚WM\F ۻ_lzj"ɖկxcZvkJ#q㵏xk\5 *YseXs~s -uf@{@>MUW|_?}@ &<@bOxYH^~ͅu tqo^u}浬뙞!9jo7#!lhjs!]/<*_>-ZS Z?w; zΑx}~ m]: E=oMS䦞@<J16Ж/wn<&U)ܙiۭ/ ?(__o޲6qȾF3ceu>s\κdzh<?[*r8g.v. kM?i~ӼFz@~5ןSWβε lѱ]G3o޽2[n ?$QQ֐;=I}s>*5j*쌘Qy-9̢I j?`jI~-kvZ75|bS/ T́_U-l.])~iVkjb}V6QGњZrݨs‰UqW%-j_uzmӒ]@_2vt% ~CvSs?j ?͗u {ZۗIy9 5%kmDž[qƒC$A޲ ߶37~E9GZ?+bbܜŭ۷f~x^Wm}Z 쾬߾/027kZHQnV;2L1uZ`[ߑYۧߍ,vH~` ᏋdZFyҲF&[{Z}ZʃW&Hsto-KvyuB WV ɁX_Bea4S-fK Y XRsrM4zƞenlg?zKCLɭ- 2K׮4tUoeJA== =~*?Ըaiٷe~_s~x|t֐x(XПY f{Vwߩԝi,P=ٹ;U=B\6mCRS毫mV_ǧX_}i@|q{,:oFV;y i}O lhʆa~ &e@6:Ds5y(i3h_]n.}R%wN߫8|GM'aAL˧Wo}#P]%.b ܯyŭ39-5Z:!;<܈OIδ͖nSl_PgeQܽkqY𩨥+zΩ'cJR?SXYy (Ȟ_8Dc\t)85KYSLݐ>7KV}{3u6M҂8xMcvT⺥7U ֗I.Ô˟nK U޴ Nq j@SR~} 6l6_ښ"5poZp\YNŁΣ֗$4k גQ+:,9nn%@w'UDM+P5X_bJS115=?+eu_s~m`>=KSshz?Wfy;QEgRJ.uo-tw=B|z&::: i1X-K\?ܩ[k}*|i9̓WsHg GhMR~c&%F)0ksQs]GJoUWgloaֿ^U"uY;:3{xP\;u̫ROT-:ĘH*V-ɫPI_uEYn򒕚*FW=WI?%+3ӧ㇚W+E\rϸ+WCRf[{ҋ5Wsm=Xѳebwع#_C5QvᯏOZ}Tzy4:OءQ4uiVHJ}H=sggAc̚*c'gzu5OS ʽA2j++ [| ʛ-(Gk4ĵ g:znmdt,_t -yLA^{3<6Ε} kn6 ^I=rή>߼CEEYP;5rڍ-Z6Ж뉎PYc;9OΙo6F0Tv)3xkY c2O[۞bc_e;Һ>0`߰^678 5z_?8\ϟWs}@pZhqfLID˩*5[~쨵hvu~C-7/V[[8bsJx^d-/ϊ~3%gۋڹٚ}yTӒ<ӗ?~_ޟ<}P^3k0oHP{Aȫ%q怏.sؓ:eUO(ZpL3g%fd5:y^oYOBZFyc;/{}Ⓗ;ĵ 0)߾/P V,^")GE-qb~aDp8veþ[̖QRk?WlgKou;\{>:0@^tfȌ x('ff7>K;n^i_v=P{lS޵ީ2ڳ20+nK]yu7l ݴϼ{K)9j/^cMavDž.ݸ761)6!]ԧv^0@ g-ְt'sAN־S2oЌYGהśX[Ӳ@&d uǺ+6~=q֑lLKlH)w;tFjtgc҉#357uXX"wC}Ywgn^7JI Z[z?hv@hoպmQ1fgæV;>j]p)JL}Fo崽g%-mV|xj, R,4;[[uT,XbjC*Dؙ<gSԜVMV$|_h/Ӝ:EԗXC?N?p4ugΩYn9}q] X0@eSէ?)p9cRqrˤPmʗ95޷hm|8ev"-hl/2,rsqZ Cyv椤ZRSS _"+Gs3/bjUjV0@ =;5 ;rjԵkNjI|KuE6㢔!L4!gi=)]XM3reIt,WϚd}e)7ZV]uڊx-.ڜnEU jNNVUiK?2:b˭1l7'pT1[6kY\]pp>tZA XXtӭ=ڴxW)Ĩ$_ñl{5UfdO}gN\εWW'kYQ<fB|ԋ<[;T5=5:!αؕSseUJ54UiS@L%"9oLV^83-6/۱벵x1.VۤkSLu7 &2UҌ\EyM=lyyct540Kp^}3 <* Nu'Έ~% &mjgvi37XT[;_uxkw0ӧzs#_)/ ŏ5#}]<[*vԄr=u ?X33{3` 0 lzz||[*V؊.gV.ЅEUʨH ˶E͗ =]aѱq`sHA E\nZr;5f犩uyI/'3{1!"peduy58r) rfZiӳ3t3clgSSO;6'Ke3-e/hp#-k^e,'󢴔ic5ɳ7آϦ)x[7on8:3o޽w[n S޵Ξ~+6y':&6:6vP=༺p'@^Xodk.-ZI9w4$q$ϚK=YڒaC<6=>M1dTv_y{yK ivGli穹 !xűZȴїrb\/ʍJBvbGəf&7iP?kO91ZG3cr\9f҈_M.pUiV9dgoKUT>/ Lyw'[cxukGٯfjm +[xak88W&&ۯJHw/ˤ~xCq= f}]ued< qԾiC| K>ܰ4QCa},vȜΛb welUUWs팕і_(CM48&!1(8abufl]FbKԔ{Dz]fc{ ֨FW: {) #WM֗I)#{2[Pc}N/'$$ Ά6oHZAIÁ31gIi{^ /PK -uvlyc;rrRZ[/:3=&SmW-[عconHY2˪J4&9gOi:+kv=0{oo^[ XSFFƬܩOn^5<v=0懃:##sN} sz`D$#K| ls|T}]r=0U\\ܭ۷<}P @ 0`D(.0Bes%ʖ$V,8]%%ܑ03,9drJʸrÑ %VBȊZGVKWFyurBb_R+Ptfx.otjWFLV\%{Oeϗi)Պ4%Sb/ܮXҽƶyq1ͮVOpbRtZzNFpVgcc8ߜsJk*1|^/`Gy-_g`JJjZx`cY{6Ք:ɉm+vK;+S%dHIKq&#kˎ)ɏ۳>쾬dOM_/n(ɏuWd-S }~c<訢ĦM9C&g ѕ5]we+;'nd|ʯT޿%֎|P\7wXp掌Ź; LSwbwhqAqBBBta&Ry@eXxm]y~婦X5Zk,Cy'sh~V7y}V;7իY{.ζXC$X_*QWd*ޓi ڋ;velĤKw'Iz꩞ fdK]x[%.1yC~Yh~Oy lbk1QÝå\kZz=_[q &v̴hockieUM=ly! T.pIdToa \[J.N&texS~|.p&Ƙi]K 5_:.''EYH^ǣyVE3`igo0¹p\iΓe}j(wV݌} w:}{ p|Ms  ~`2?7fL%¢ªjw[EWTIqP> d?!*vz*|K!ָs 9]G'Rf8Z+'59S48xMK- l99l;a&^מr`c>̘]uUZQvKM9nU4%%M;*;w**."6ݿK*j ׮ZXۯ~[VWDžD`NJJ}?~TZZʵDZGO . FNse`Zq=G 7kUx." pZz|6,w5x\D@dϛ~." 2#M>{c&߸u|ǮsGύ|j4XݞM$X ~խ\D@v~srr2x|b[oqW64meJ pRR?{ . pttK&jb. pTTԙSk~KSs3icC\8@ .> 2p\\ܩn\oY%Dl xgfQ0@&` `0L @v'==ݧ#<A鴴4o`@pll?Z݇{zf<7>1nO.))`@HeU˫~4A?>ݿ/>>~ %$$ݷӗM\|UcSl'|޷=4^|);;{>त$^['ny02Z+W'f<0s555<><U.((hjjӧ>ń;-)-`bƉzz}~Y)O<σUZVv]{V<{lFFl' ˗/qslRW/W\X.O :66{Z"oYӗyP'UUU]6jQ8"o`U__Fุ8%4OQ<O?Pr#iFgffθ$0/??VNMM LL 2VVT;p&j+sS ?hRh͉e 4vTsaZ~IENDB`pydantic-1.10.14/docs/usage/schema.md000066400000000000000000000232511455251250200173130ustar00rootroot00000000000000*Pydantic* allows auto creation of JSON Schemas from models: {!.tmp_examples/schema_main.md!} The generated schemas are compliant with the specifications: [JSON Schema Core](https://json-schema.org/latest/json-schema-core.html), [JSON Schema Validation](https://json-schema.org/latest/json-schema-validation.html) and [OpenAPI](https://github.com/OAI/OpenAPI-Specification). `BaseModel.schema` will return a dict of the schema, while `BaseModel.schema_json` will return a JSON string representation of that dict. Sub-models used are added to the `definitions` JSON attribute and referenced, as per the spec. All sub-models' (and their sub-models') schemas are put directly in a top-level `definitions` JSON key for easy re-use and reference. "Sub-models" with modifications (via the `Field` class) like a custom title, description or default value, are recursively included instead of referenced. The `description` for models is taken from either the docstring of the class or the argument `description` to the `Field` class. The schema is generated by default using aliases as keys, but it can be generated using model property names instead by calling `MainModel.schema/schema_json(by_alias=False)`. The format of `$ref`s (`"#/definitions/FooBar"` above) can be altered by calling `schema()` or `schema_json()` with the `ref_template` keyword argument, e.g. `ApplePie.schema(ref_template='/schemas/{model}.json#/')`, here `{model}` will be replaced with the model naming using `str.format()`. ## Getting schema of a specified type _Pydantic_ includes two standalone utility functions `schema_of` and `schema_json_of` that can be used to apply the schema generation logic used for _pydantic_ models in a more ad-hoc way. These functions behave similarly to `BaseModel.schema` and `BaseModel.schema_json`, but work with arbitrary pydantic-compatible types. {!.tmp_examples/schema_ad_hoc.md!} ## Field customization Optionally, the `Field` function can be used to provide extra information about the field and validations. It has the following arguments: * `default`: (a positional argument) the default value of the field. Since the `Field` replaces the field's default, this first argument can be used to set the default. Use ellipsis (`...`) to indicate the field is required. * `default_factory`: a zero-argument callable that will be called when a default value is needed for this field. Among other purposes, this can be used to set dynamic default values. It is forbidden to set both `default` and `default_factory`. * `alias`: the public name of the field * `title`: if omitted, `field_name.title()` is used * `description`: if omitted and the annotation is a sub-model, the docstring of the sub-model will be used * `exclude`: exclude this field when dumping (`.dict` and `.json`) the instance. The exact syntax and configuration options are described in details in the [exporting models section](exporting_models.md#advanced-include-and-exclude). * `include`: include (only) this field when dumping (`.dict` and `.json`) the instance. The exact syntax and configuration options are described in details in the [exporting models section](exporting_models.md#advanced-include-and-exclude). * `const`: this argument *must* be the same as the field's default value if present. * `gt`: for numeric values (``int``, `float`, `Decimal`), adds a validation of "greater than" and an annotation of `exclusiveMinimum` to the JSON Schema * `ge`: for numeric values, this adds a validation of "greater than or equal" and an annotation of `minimum` to the JSON Schema * `lt`: for numeric values, this adds a validation of "less than" and an annotation of `exclusiveMaximum` to the JSON Schema * `le`: for numeric values, this adds a validation of "less than or equal" and an annotation of `maximum` to the JSON Schema * `multiple_of`: for numeric values, this adds a validation of "a multiple of" and an annotation of `multipleOf` to the JSON Schema * `max_digits`: for `Decimal` values, this adds a validation to have a maximum number of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. * `decimal_places`: for `Decimal` values, this adds a validation to have at most a number of decimal places allowed. It does not include trailing decimal zeroes. * `min_items`: for list values, this adds a corresponding validation and an annotation of `minItems` to the JSON Schema * `max_items`: for list values, this adds a corresponding validation and an annotation of `maxItems` to the JSON Schema * `unique_items`: for list values, this adds a corresponding validation and an annotation of `uniqueItems` to the JSON Schema * `min_length`: for string values, this adds a corresponding validation and an annotation of `minLength` to the JSON Schema * `max_length`: for string values, this adds a corresponding validation and an annotation of `maxLength` to the JSON Schema * `allow_mutation`: a boolean which defaults to `True`. When False, the field raises a `TypeError` if the field is assigned on an instance. The model config must set `validate_assignment` to `True` for this check to be performed. * `regex`: for string values, this adds a Regular Expression validation generated from the passed string and an annotation of `pattern` to the JSON Schema !!! note *pydantic* validates strings using `re.match`, which treats regular expressions as implicitly anchored at the beginning. On the contrary, JSON Schema validators treat the `pattern` keyword as implicitly unanchored, more like what `re.search` does. For interoperability, depending on your desired behavior, either explicitly anchor your regular expressions with `^` (e.g. `^foo` to match any string starting with `foo`), or explicitly allow an arbitrary prefix with `.*?` (e.g. `.*?foo` to match any string containing the substring `foo`). See [#1631](https://github.com/pydantic/pydantic/issues/1631) for a discussion of possible changes to *pydantic* behavior in **v2**. * `repr`: a boolean which defaults to `True`. When False, the field shall be hidden from the object representation. * `**` any other keyword arguments (e.g. `examples`) will be added verbatim to the field's schema Instead of using `Field`, the `fields` property of [the Config class](model_config.md) can be used to set all of the arguments above except `default`. ### Unenforced Field constraints If *pydantic* finds constraints which are not being enforced, an error will be raised. If you want to force the constraint to appear in the schema, even though it's not being checked upon parsing, you can use variadic arguments to `Field()` with the raw schema attribute name: {!.tmp_examples/schema_unenforced_constraints.md!} ### typing.Annotated Fields Rather than assigning a `Field` value, it can be specified in the type hint with `typing.Annotated`: {!.tmp_examples/schema_annotated.md!} `Field` can only be supplied once per field - an error will be raised if used in `Annotated` and as the assigned value. Defaults can be set outside `Annotated` as the assigned value or with `Field.default_factory` inside `Annotated` - the `Field.default` argument is not supported inside `Annotated`. For versions of Python prior to 3.9, `typing_extensions.Annotated` can be used. ## Modifying schema in custom fields Custom field types can customise the schema generated for them using the `__modify_schema__` class method; see [Custom Data Types](types.md#custom-data-types) for more details. `__modify_schema__` can also take a `field` argument which will have type `Optional[ModelField]`. *pydantic* will inspect the signature of `__modify_schema__` to determine whether the `field` argument should be included. {!.tmp_examples/schema_with_field.md!} ## JSON Schema Types Types, custom field types, and constraints (like `max_length`) are mapped to the corresponding spec formats in the following priority order (when there is an equivalent available): 1. [JSON Schema Core](http://json-schema.org/latest/json-schema-core.html#rfc.section.4.3.1) 2. [JSON Schema Validation](http://json-schema.org/latest/json-schema-validation.html) 3. [OpenAPI Data Types](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#data-types) 4. The standard `format` JSON field is used to define *pydantic* extensions for more complex `string` sub-types. The field schema mapping from Python / *pydantic* to JSON Schema is done as follows: {!.tmp_schema_mappings.html!} ## Top-level schema generation You can also generate a top-level JSON Schema that only includes a list of models and related sub-models in its `definitions`: {!.tmp_examples/schema_top_level.md!} ## Schema customization You can customize the generated `$ref` JSON location: the definitions are always stored under the key `definitions`, but a specified prefix can be used for the references. This is useful if you need to extend or modify the JSON Schema default definitions location. E.g. with OpenAPI: {!.tmp_examples/schema_custom.md!} It's also possible to extend/override the generated JSON schema in a model. To do it, use the `Config` sub-class attribute `schema_extra`. For example, you could add `examples` to the JSON Schema: {!.tmp_examples/schema_with_example.md!} For more fine-grained control, you can alternatively set `schema_extra` to a callable and post-process the generated schema. The callable can have one or two positional arguments. The first will be the schema dictionary. The second, if accepted, will be the model class. The callable is expected to mutate the schema dictionary *in-place*; the return value is not used. For example, the `title` key can be removed from the model's `properties`: {!.tmp_examples/schema_extra_callable.md!} pydantic-1.10.14/docs/usage/settings.md000066400000000000000000000273651455251250200177250ustar00rootroot00000000000000One of pydantic's most useful applications is settings management. If you create a model that inherits from `BaseSettings`, the model initialiser will attempt to determine the values of any fields not passed as keyword arguments by reading from the environment. (Default values will still be used if the matching environment variable is not set.) This makes it easy to: * Create a clearly-defined, type-hinted application configuration class * Automatically read modifications to the configuration from environment variables * Manually override specific settings in the initialiser where desired (e.g. in unit tests) For example: {!.tmp_examples/settings_main.md!} ## Environment variable names The following rules are used to determine which environment variable(s) are read for a given field: * By default, the environment variable name is built by concatenating the prefix and field name. * For example, to override `special_function` above, you could use: export my_prefix_special_function='foo.bar' * Note 1: The default prefix is an empty string. * Note 2: Field aliases are ignored when building the environment variable name. * Custom environment variable names can be set in two ways: * `Config.fields['field_name']['env']` (see `auth_key` and `redis_dsn` above) * `Field(..., env=...)` (see `api_key` above) * When specifying custom environment variable names, either a string or a list of strings may be provided. * When specifying a list of strings, order matters: the first detected value is used. * For example, for `redis_dsn` above, `service_redis_dsn` would take precedence over `redis_url`. !!! warning Since **v1.0** *pydantic* does not consider field aliases when finding environment variables to populate settings models, use `env` instead as described above. To aid the transition from aliases to `env`, a warning will be raised when aliases are used on settings models without a custom env var name. If you really mean to use aliases, either ignore the warning or set `env` to suppress it. Case-sensitivity can be turned on through the `Config`: {!.tmp_examples/settings_case_sensitive.md!} When `case_sensitive` is `True`, the environment variable names must match field names (optionally with a prefix), so in this example `redis_host` could only be modified via `export redis_host`. If you want to name environment variables all upper-case, you should name attribute all upper-case too. You can still name environment variables anything you like through `Field(..., env=...)`. In Pydantic **v1** `case_sensitive` is `False` by default and all variable names are converted to lower-case internally. If you want to define upper-case variable names on nested models like `SubModel` you have to set `case_sensitive=True` to disable this behaviour. !!! note On Windows, Python's `os` module always treats environment variables as case-insensitive, so the `case_sensitive` config setting will have no effect - settings will always be updated ignoring case. ## Parsing environment variable values For most simple field types (such as `int`, `float`, `str`, etc.), the environment variable value is parsed the same way it would be if passed directly to the initialiser (as a string). Complex types like `list`, `set`, `dict`, and sub-models are populated from the environment by treating the environment variable's value as a JSON-encoded string. Another way to populate nested complex variables is to configure your model with the `env_nested_delimiter` config setting, then use an env variable with a name pointing to the nested module fields. What it does is simply explodes your variable into nested models or dicts. So if you define a variable `FOO__BAR__BAZ=123` it will convert it into `FOO={'BAR': {'BAZ': 123}}` If you have multiple variables with the same structure they will be merged. With the following environment variables: ```bash # your environment export V0=0 export SUB_MODEL='{"v1": "json-1", "v2": "json-2"}' export SUB_MODEL__V2=nested-2 export SUB_MODEL__V3=3 export SUB_MODEL__DEEP__V4=v4 ``` You could load a settings module thus: {!.tmp_examples/settings_nested_env.md!} `env_nested_delimiter` can be configured via the `Config` class as shown above, or via the `_env_nested_delimiter` keyword argument on instantiation. JSON is only parsed in top-level fields, if you need to parse JSON in sub-models, you will need to implement validators on those models. Nested environment variables take precedence over the top-level environment variable JSON (e.g. in the example above, `SUB_MODEL__V2` trumps `SUB_MODEL`). You may also populate a complex type by providing your own parsing function to the `parse_env_var` classmethod in the Config object. {!.tmp_examples/settings_with_custom_parsing.md!} ## Dotenv (.env) support !!! note dotenv file parsing requires [python-dotenv](https://pypi.org/project/python-dotenv/) to be installed. This can be done with either `pip install python-dotenv` or `pip install pydantic[dotenv]`. Dotenv files (generally named `.env`) are a common pattern that make it easy to use environment variables in a platform-independent manner. A dotenv file follows the same general principles of all environment variables, and looks something like: ```bash # ignore comment ENVIRONMENT="production" REDIS_ADDRESS=localhost:6379 MEANING_OF_LIFE=42 MY_VAR='Hello world' ``` Once you have your `.env` file filled with variables, *pydantic* supports loading it in two ways: **1.** setting `env_file` (and `env_file_encoding` if you don't want the default encoding of your OS) on `Config` in a `BaseSettings` class: ```py class Settings(BaseSettings): ... class Config: env_file = '.env' env_file_encoding = 'utf-8' ``` **2.** instantiating a `BaseSettings` derived class with the `_env_file` keyword argument (and the `_env_file_encoding` if needed): ```py settings = Settings(_env_file='prod.env', _env_file_encoding='utf-8') ``` In either case, the value of the passed argument can be any valid path or filename, either absolute or relative to the current working directory. From there, *pydantic* will handle everything for you by loading in your variables and validating them. !!! note If a filename is specified for `env_file`, Pydantic will only check the current working directory and won't check any parent directories for the `.env` file. Even when using a dotenv file, *pydantic* will still read environment variables as well as the dotenv file, **environment variables will always take priority over values loaded from a dotenv file**. Passing a file path via the `_env_file` keyword argument on instantiation (method 2) will override the value (if any) set on the `Config` class. If the above snippets were used in conjunction, `prod.env` would be loaded while `.env` would be ignored. If you need to load multiple dotenv files, you can pass the file paths as a `list` or `tuple`. Later files in the list/tuple will take priority over earlier files. ```py from pydantic import BaseSettings class Settings(BaseSettings): ... class Config: # `.env.prod` takes priority over `.env` env_file = '.env', '.env.prod' ``` You can also use the keyword argument override to tell Pydantic not to load any file at all (even if one is set in the `Config` class) by passing `None` as the instantiation keyword argument, e.g. `settings = Settings(_env_file=None)`. Because python-dotenv is used to parse the file, bash-like semantics such as `export` can be used which (depending on your OS and environment) may allow your dotenv file to also be used with `source`, see [python-dotenv's documentation](https://saurabh-kumar.com/python-dotenv/#usages) for more details. ## Secret Support Placing secret values in files is a common pattern to provide sensitive configuration to an application. A secret file follows the same principal as a dotenv file except it only contains a single value and the file name is used as the key. A secret file will look like the following: `/var/run/database_password`: ``` super_secret_database_password ``` Once you have your secret files, *pydantic* supports loading it in two ways: **1.** setting `secrets_dir` on `Config` in a `BaseSettings` class to the directory where your secret files are stored: ```py class Settings(BaseSettings): ... database_password: str class Config: secrets_dir = '/var/run' ``` **2.** instantiating a `BaseSettings` derived class with the `_secrets_dir` keyword argument: ```py settings = Settings(_secrets_dir='/var/run') ``` In either case, the value of the passed argument can be any valid directory, either absolute or relative to the current working directory. **Note that a non existent directory will only generate a warning**. From there, *pydantic* will handle everything for you by loading in your variables and validating them. Even when using a secrets directory, *pydantic* will still read environment variables from a dotenv file or the environment, **a dotenv file and environment variables will always take priority over values loaded from the secrets directory**. Passing a file path via the `_secrets_dir` keyword argument on instantiation (method 2) will override the value (if any) set on the `Config` class. ### Use Case: Docker Secrets Docker Secrets can be used to provide sensitive configuration to an application running in a Docker container. To use these secrets in a *pydantic* application the process is simple. More information regarding creating, managing and using secrets in Docker see the official [Docker documentation](https://docs.docker.com/engine/reference/commandline/secret/). First, define your Settings ```py class Settings(BaseSettings): my_secret_data: str class Config: secrets_dir = '/run/secrets' ``` !!! note By default Docker uses `/run/secrets` as the target mount point. If you want to use a different location, change `Config.secrets_dir` accordingly. Then, create your secret via the Docker CLI ```bash printf "This is a secret" | docker secret create my_secret_data - ``` Last, run your application inside a Docker container and supply your newly created secret ```bash docker service create --name pydantic-with-secrets --secret my_secret_data pydantic-app:latest ``` ## Field value priority In the case where a value is specified for the same `Settings` field in multiple ways, the selected value is determined as follows (in descending order of priority): 1. Arguments passed to the `Settings` class initialiser. 2. Environment variables, e.g. `my_prefix_special_function` as described above. 3. Variables loaded from a dotenv (`.env`) file. 4. Variables loaded from the secrets directory. 5. The default field values for the `Settings` model. ## Customise settings sources If the default order of priority doesn't match your needs, it's possible to change it by overriding the `customise_sources` method on the `Config` class of your `Settings` . `customise_sources` takes three callables as arguments and returns any number of callables as a tuple. In turn these callables are called to build the inputs to the fields of the settings class. Each callable should take an instance of the settings class as its sole argument and return a `dict`. ### Changing Priority The order of the returned callables decides the priority of inputs; first item is the highest priority. {!.tmp_examples/settings_env_priority.md!} By flipping `env_settings` and `init_settings`, environment variables now have precedence over `__init__` kwargs. ### Adding sources As explained earlier, *pydantic* ships with multiples built-in settings sources. However, you may occasionally need to add your own custom sources, `customise_sources` makes this very easy: {!.tmp_examples/settings_add_custom_source.md!} ### Removing sources You might also want to disable a source: {!.tmp_examples/settings_disable_source.md!} pydantic-1.10.14/docs/usage/types.md000066400000000000000000001111701455251250200172150ustar00rootroot00000000000000Where possible *pydantic* uses [standard library types](#standard-library-types) to define fields, thus smoothing the learning curve. For many useful applications, however, no standard library type exists, so *pydantic* implements [many commonly used types](#pydantic-types). If no existing type suits your purpose you can also implement your [own pydantic-compatible types](#custom-data-types) with custom properties and validation. ## Standard Library Types *pydantic* supports many common types from the Python standard library. If you need stricter processing see [Strict Types](#strict-types); if you need to constrain the values allowed (e.g. to require a positive int) see [Constrained Types](#constrained-types). `None`, `type(None)` or `Literal[None]` (equivalent according to [PEP 484](https://www.python.org/dev/peps/pep-0484/#using-none)) : allows only `None` value `bool` : see [Booleans](#booleans) below for details on how bools are validated and what values are permitted `int` : *pydantic* uses `int(v)` to coerce types to an `int`; see [this](models.md#data-conversion) warning on loss of information during data conversion `float` : similarly, `float(v)` is used to coerce values to floats `str` : strings are accepted as-is, `int` `float` and `Decimal` are coerced using `str(v)`, `bytes` and `bytearray` are converted using `v.decode()`, enums inheriting from `str` are converted using `v.value`, and all other types cause an error `bytes` : `bytes` are accepted as-is, `bytearray` is converted using `bytes(v)`, `str` are converted using `v.encode()`, and `int`, `float`, and `Decimal` are coerced using `str(v).encode()` `list` : allows `list`, `tuple`, `set`, `frozenset`, `deque`, or generators and casts to a list; see `typing.List` below for sub-type constraints `tuple` : allows `list`, `tuple`, `set`, `frozenset`, `deque`, or generators and casts to a tuple; see `typing.Tuple` below for sub-type constraints `dict` : `dict(v)` is used to attempt to convert a dictionary; see `typing.Dict` below for sub-type constraints `set` : allows `list`, `tuple`, `set`, `frozenset`, `deque`, or generators and casts to a set; see `typing.Set` below for sub-type constraints `frozenset` : allows `list`, `tuple`, `set`, `frozenset`, `deque`, or generators and casts to a frozen set; see `typing.FrozenSet` below for sub-type constraints `deque` : allows `list`, `tuple`, `set`, `frozenset`, `deque`, or generators and casts to a deque; see `typing.Deque` below for sub-type constraints `datetime.date` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.time` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.datetime` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.timedelta` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `typing.Any` : allows any value including `None`, thus an `Any` field is optional `typing.Annotated` : allows wrapping another type with arbitrary metadata, as per [PEP-593](https://www.python.org/dev/peps/pep-0593/). The `Annotated` hint may contain a single call to the [`Field` function](schema.md#typingannotated-fields), but otherwise the additional metadata is ignored and the root type is used. `typing.TypeVar` : constrains the values allowed based on `constraints` or `bound`, see [TypeVar](#typevar) `typing.Union` : see [Unions](#unions) below for more detail on parsing and validation `typing.Optional` : `Optional[x]` is simply short hand for `Union[x, None]`; see [Unions](#unions) below for more detail on parsing and validation and [Required Fields](models.md#required-fields) for details about required fields that can receive `None` as a value. `typing.List` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Tuple` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `subclass of typing.NamedTuple` : Same as `tuple` but instantiates with the given namedtuple and validates fields since they are annotated. See [Annotated Types](#annotated-types) below for more detail on parsing and validation `subclass of collections.namedtuple` : Same as `subclass of typing.NamedTuple` but all fields will have type `Any` since they are not annotated `typing.Dict` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `subclass of typing.TypedDict` : Same as `dict` but _pydantic_ will validate the dictionary since keys are annotated. See [Annotated Types](#annotated-types) below for more detail on parsing and validation `typing.Set` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.FrozenSet` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Deque` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Sequence` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Iterable` : this is reserved for iterables that shouldn't be consumed. See [Infinite Generators](#infinite-generators) below for more detail on parsing and validation `typing.Type` : see [Type](#type) below for more detail on parsing and validation `typing.Callable` : see [Callable](#callable) below for more detail on parsing and validation `typing.Pattern` : will cause the input value to be passed to `re.compile(v)` to create a regex pattern `ipaddress.IPv4Address` : simply uses the type itself for validation by passing the value to `IPv4Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv4Interface` : simply uses the type itself for validation by passing the value to `IPv4Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv4Network` : simply uses the type itself for validation by passing the value to `IPv4Network(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Address` : simply uses the type itself for validation by passing the value to `IPv6Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Interface` : simply uses the type itself for validation by passing the value to `IPv6Interface(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Network` : simply uses the type itself for validation by passing the value to `IPv6Network(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `enum.Enum` : checks that the value is a valid Enum instance `subclass of enum.Enum` : checks that the value is a valid member of the enum; see [Enums and Choices](#enums-and-choices) for more details `enum.IntEnum` : checks that the value is a valid IntEnum instance `subclass of enum.IntEnum` : checks that the value is a valid member of the integer enum; see [Enums and Choices](#enums-and-choices) for more details `decimal.Decimal` : *pydantic* attempts to convert the value to a string, then passes the string to `Decimal(v)` `pathlib.Path` : simply uses the type itself for validation by passing the value to `Path(v)`; see [Pydantic Types](#pydantic-types) for other more strict path types `uuid.UUID` : strings and bytes (converted to strings) are passed to `UUID(v)`, with a fallback to `UUID(bytes=v)` for `bytes` and `bytearray`; see [Pydantic Types](#pydantic-types) for other stricter UUID types `ByteSize` : converts a bytes string with units to bytes ### Typing Iterables *pydantic* uses standard library `typing` types as defined in PEP 484 to define complex objects. {!.tmp_examples/types_iterables.md!} ### Infinite Generators If you have a generator you can use `Sequence` as described above. In that case, the generator will be consumed and stored on the model as a list and its values will be validated with the sub-type of `Sequence` (e.g. `int` in `Sequence[int]`). But if you have a generator that you don't want to be consumed, e.g. an infinite generator or a remote data loader, you can define its type with `Iterable`: {!.tmp_examples/types_infinite_generator.md!} !!! warning `Iterable` fields only perform a simple check that the argument is iterable and won't be consumed. No validation of their values is performed as it cannot be done without consuming the iterable. !!! tip If you want to validate the values of an infinite generator you can create a separate model and use it while consuming the generator, reporting the validation errors as appropriate. pydantic can't validate the values automatically for you because it would require consuming the infinite generator. #### Validating the first value You can create a [validator](validators.md) to validate the first value in an infinite generator and still not consume it entirely. {!.tmp_examples/types_infinite_generator_validate_first.md!} ### Unions The `Union` type allows a model attribute to accept different types, e.g.: !!! info You may get unexpected coercion with `Union`; see below.
Know that you can also make the check slower but stricter by using [Smart Union](model_config.md#smart-union) {!.tmp_examples/types_union_incorrect.md!} However, as can be seen above, *pydantic* will attempt to 'match' any of the types defined under `Union` and will use the first one that matches. In the above example the `id` of `user_03` was defined as a `uuid.UUID` class (which is defined under the attribute's `Union` annotation) but as the `uuid.UUID` can be marshalled into an `int` it chose to match against the `int` type and disregarded the other types. !!! warning `typing.Union` also ignores order when [defined](https://docs.python.org/3/library/typing.html#typing.Union), so `Union[int, float] == Union[float, int]` which can lead to unexpected behaviour when combined with matching based on the `Union` type order inside other type definitions, such as `List` and `Dict` types (because Python treats these definitions as singletons). For example, `Dict[str, Union[int, float]] == Dict[str, Union[float, int]]` with the order based on the first time it was defined. Please note that this can also be [affected by third party libraries](https://github.com/pydantic/pydantic/issues/2835) and their internal type definitions and the import orders. As such, it is recommended that, when defining `Union` annotations, the most specific type is included first and followed by less specific types. In the above example, the `UUID` class should precede the `int` and `str` classes to preclude the unexpected representation as such: {!.tmp_examples/types_union_correct.md!} !!! tip The type `Optional[x]` is a shorthand for `Union[x, None]`. `Optional[x]` can also be used to specify a required field that can take `None` as a value. See more details in [Required Fields](models.md#required-fields). #### Discriminated Unions (a.k.a. Tagged Unions) When `Union` is used with multiple submodels, you sometimes know exactly which submodel needs to be checked and validated and want to enforce this. To do that you can set the same field - let's call it `my_discriminator` - in each of the submodels with a discriminated value, which is one (or many) `Literal` value(s). For your `Union`, you can set the discriminator in its value: `Field(discriminator='my_discriminator')`. Setting a discriminated union has many benefits: - validation is faster since it is only attempted against one model - only one explicit error is raised in case of failure - the generated JSON schema implements the [associated OpenAPI specification](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminatorObject) {!.tmp_examples/types_union_discriminated.md!} !!! note Using the [Annotated Fields syntax](../schema/#typingannotated-fields) can be handy to regroup the `Union` and `discriminator` information. See below for an example! !!! warning Discriminated unions cannot be used with only a single variant, such as `Union[Cat]`. Python changes `Union[T]` into `T` at interpretation time, so it is not possible for `pydantic` to distinguish fields of `Union[T]` from `T`. #### Nested Discriminated Unions Only one discriminator can be set for a field but sometimes you want to combine multiple discriminators. In this case you can always create "intermediate" models with `__root__` and add your discriminator. {!.tmp_examples/types_union_discriminated_nested.md!} ### Enums and Choices *pydantic* uses Python's standard `enum` classes to define choices. {!.tmp_examples/types_choices.md!} ### Datetime Types *Pydantic* supports the following [datetime](https://docs.python.org/library/datetime.html#available-types) types: * `datetime` fields can be: * `datetime`, existing `datetime` object * `int` or `float`, assumed as Unix time, i.e. seconds (if >= `-2e10` or <= `2e10`) or milliseconds (if < `-2e10`or > `2e10`) since 1 January 1970 * `str`, following formats work: * `YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]` * `int` or `float` as a string (assumed as Unix time) * `date` fields can be: * `date`, existing `date` object * `int` or `float`, see `datetime` * `str`, following formats work: * `YYYY-MM-DD` * `int` or `float`, see `datetime` * `time` fields can be: * `time`, existing `time` object * `str`, following formats work: * `HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]` * `timedelta` fields can be: * `timedelta`, existing `timedelta` object * `int` or `float`, assumed as seconds * `str`, following formats work: * `[-][DD ][HH:MM]SS[.ffffff]` * `[±]P[DD]DT[HH]H[MM]M[SS]S` ([ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format for timedelta) {!.tmp_examples/types_dt.md!} ### Booleans !!! warning The logic for parsing `bool` fields has changed as of version **v1.0**. Prior to **v1.0**, `bool` parsing never failed, leading to some unexpected results. The new logic is described below. A standard `bool` field will raise a `ValidationError` if the value is not one of the following: * A valid boolean (i.e. `True` or `False`), * The integers `0` or `1`, * a `str` which when converted to lower case is one of `'0', 'off', 'f', 'false', 'n', 'no', '1', 'on', 't', 'true', 'y', 'yes'` * a `bytes` which is valid (per the previous rule) when decoded to `str` !!! note If you want stricter boolean logic (e.g. a field which only permits `True` and `False`) you can use [`StrictBool`](#strict-types). Here is a script demonstrating some of these behaviors: {!.tmp_examples/types_boolean.md!} ### Callable Fields can also be of type `Callable`: {!.tmp_examples/types_callable.md!} !!! warning Callable fields only perform a simple check that the argument is callable; no validation of arguments, their types, or the return type is performed. ### Type *pydantic* supports the use of `Type[T]` to specify that a field may only accept classes (not instances) that are subclasses of `T`. {!.tmp_examples/types_type.md!} You may also use `Type` to specify that any class is allowed. {!.tmp_examples/types_bare_type.md!} ### TypeVar `TypeVar` is supported either unconstrained, constrained or with a bound. {!.tmp_examples/types_typevar.md!} ## Literal Type !!! note This is a new feature of the Python standard library as of Python 3.8; prior to Python 3.8, it requires the [typing-extensions](https://pypi.org/project/typing-extensions/) package. *pydantic* supports the use of `typing.Literal` (or `typing_extensions.Literal` prior to Python 3.8) as a lightweight way to specify that a field may accept only specific literal values: {!.tmp_examples/types_literal1.md!} One benefit of this field type is that it can be used to check for equality with one or more specific values without needing to declare custom validators: {!.tmp_examples/types_literal2.md!} With proper ordering in an annotated `Union`, you can use this to parse types of decreasing specificity: {!.tmp_examples/types_literal3.md!} ## Annotated Types ### NamedTuple {!.tmp_examples/annotated_types_named_tuple.md!} ### TypedDict !!! note This is a new feature of the Python standard library as of Python 3.8. Prior to Python 3.8, it requires the [typing-extensions](https://pypi.org/project/typing-extensions/) package. But required and optional fields are properly differentiated only since Python 3.9. We therefore recommend using [typing-extensions](https://pypi.org/project/typing-extensions/) with Python 3.8 as well. {!.tmp_examples/annotated_types_typed_dict.md!} ## Pydantic Types *pydantic* also provides a variety of other useful types: `FilePath` : like `Path`, but the path must exist and be a file `DirectoryPath` : like `Path`, but the path must exist and be a directory `PastDate` : like `date`, but the date should be in the past `FutureDate` : like `date`, but the date should be in the future `EmailStr` : requires [email-validator](https://github.com/JoshData/python-email-validator) to be installed; the input string must be a valid email address, and the output is a simple string `NameEmail` : requires [email-validator](https://github.com/JoshData/python-email-validator) to be installed; the input string must be either a valid email address or in the format `Fred Bloggs `, and the output is a `NameEmail` object which has two properties: `name` and `email`. For `Fred Bloggs ` the name would be `"Fred Bloggs"`; for `fred.bloggs@example.com` it would be `"fred.bloggs"`. `PyObject` : expects a string and loads the Python object importable at that dotted path; e.g. if `'math.cos'` was provided, the resulting field value would be the function `cos` `Color` : for parsing HTML and CSS colors; see [Color Type](#color-type) `Json` : a special type wrapper which loads JSON before parsing; see [JSON Type](#json-type) `PaymentCardNumber` : for parsing and validating payment cards; see [payment cards](#payment-card-numbers) `AnyUrl` : any URL; see [URLs](#urls) `AnyHttpUrl` : an HTTP URL; see [URLs](#urls) `HttpUrl` : a stricter HTTP URL; see [URLs](#urls) `FileUrl` : a file path URL; see [URLs](#urls) `PostgresDsn` : a postgres DSN style URL; see [URLs](#urls) `CockroachDsn` : a cockroachdb DSN style URL; see [URLs](#urls) `AmqpDsn` : an `AMQP` DSN style URL as used by RabbitMQ, StormMQ, ActiveMQ etc.; see [URLs](#urls) `RedisDsn` : a redis DSN style URL; see [URLs](#urls) `MongoDsn` : a MongoDB DSN style URL; see [URLs](#urls) `KafkaDsn` : a kafka DSN style URL; see [URLs](#urls) `stricturl` : a type method for arbitrary URL constraints; see [URLs](#urls) `UUID1` : requires a valid UUID of type 1; see `UUID` [above](#standard-library-types) `UUID3` : requires a valid UUID of type 3; see `UUID` [above](#standard-library-types) `UUID4` : requires a valid UUID of type 4; see `UUID` [above](#standard-library-types) `UUID5` : requires a valid UUID of type 5; see `UUID` [above](#standard-library-types) `SecretBytes` : bytes where the value is kept partially secret; see [Secrets](#secret-types) `SecretStr` : string where the value is kept partially secret; see [Secrets](#secret-types) `IPvAnyAddress` : allows either an `IPv4Address` or an `IPv6Address` `IPvAnyInterface` : allows either an `IPv4Interface` or an `IPv6Interface` `IPvAnyNetwork` : allows either an `IPv4Network` or an `IPv6Network` `NegativeFloat` : allows a float which is negative; uses standard `float` parsing then checks the value is less than 0; see [Constrained Types](#constrained-types) `NegativeInt` : allows an int which is negative; uses standard `int` parsing then checks the value is less than 0; see [Constrained Types](#constrained-types) `PositiveFloat` : allows a float which is positive; uses standard `float` parsing then checks the value is greater than 0; see [Constrained Types](#constrained-types) `PositiveInt` : allows an int which is positive; uses standard `int` parsing then checks the value is greater than 0; see [Constrained Types](#constrained-types) `conbytes` : type method for constraining bytes; see [Constrained Types](#constrained-types) `condecimal` : type method for constraining Decimals; see [Constrained Types](#constrained-types) `confloat` : type method for constraining floats; see [Constrained Types](#constrained-types) `conint` : type method for constraining ints; see [Constrained Types](#constrained-types) `condate` : type method for constraining dates; see [Constrained Types](#constrained-types) `conlist` : type method for constraining lists; see [Constrained Types](#constrained-types) `conset` : type method for constraining sets; see [Constrained Types](#constrained-types) `confrozenset` : type method for constraining frozen sets; see [Constrained Types](#constrained-types) `constr` : type method for constraining strs; see [Constrained Types](#constrained-types) ### URLs For URI/URL validation the following types are available: - `AnyUrl`: any scheme allowed, TLD not required, host required - `AnyHttpUrl`: scheme `http` or `https`, TLD not required, host required - `HttpUrl`: scheme `http` or `https`, TLD required, host required, max length 2083 - `FileUrl`: scheme `file`, host not required - `PostgresDsn`: user info required, TLD not required, host required, as of V.10 `PostgresDsn` supports multiple hosts. The following schemes are supported: - `postgres` - `postgresql` - `postgresql+asyncpg` - `postgresql+pg8000` - `postgresql+psycopg` - `postgresql+psycopg2` - `postgresql+psycopg2cffi` - `postgresql+py-postgresql` - `postgresql+pygresql` - `CockroachDsn`: scheme `cockroachdb`, user info required, TLD not required, host required. Also, its supported DBAPI dialects: - `cockroachdb+asyncpg` - `cockroachdb+psycopg2` - `AmqpDsn`: schema `amqp` or `amqps`, user info not required, TLD not required, host not required - `RedisDsn`: scheme `redis` or `rediss`, user info not required, tld not required, host not required (CHANGED: user info) (e.g., `rediss://:pass@localhost`) - `MongoDsn` : scheme `mongodb`, user info not required, database name not required, port not required from **v1.6** onwards), user info may be passed without user part (e.g., `mongodb://mongodb0.example.com:27017`) - `stricturl`: method with the following keyword arguments: - `strip_whitespace: bool = True` - `min_length: int = 1` - `max_length: int = 2 ** 16` - `tld_required: bool = True` - `host_required: bool = True` - `allowed_schemes: Optional[Set[str]] = None` !!! warning In V1.10.0 and v1.10.1 `stricturl` also took an optional `quote_plus` argument and URL components were percent encoded in some cases. This feature was removed in v1.10.2, see [#4470](https://github.com/pydantic/pydantic/pull/4470) for explanation and more details. The above types (which all inherit from `AnyUrl`) will attempt to give descriptive errors when invalid URLs are provided: {!.tmp_examples/types_urls.md!} If you require a custom URI/URL type, it can be created in a similar way to the types defined above. #### URL Properties Assuming an input URL of `http://samuel:pass@example.com:8000/the/path/?query=here#fragment=is;this=bit`, the above types export the following properties: - `scheme`: always set - the url scheme (`http` above) - `host`: always set - the url host (`example.com` above) - `host_type`: always set - describes the type of host, either: - `domain`: e.g. `example.com`, - `int_domain`: international domain, see [below](#international-domains), e.g. `exampl£e.org`, - `ipv4`: an IP V4 address, e.g. `127.0.0.1`, or - `ipv6`: an IP V6 address, e.g. `2001:db8:ff00:42` - `user`: optional - the username if included (`samuel` above) - `password`: optional - the password if included (`pass` above) - `tld`: optional - the top level domain (`com` above), **Note: this will be wrong for any two-level domain, e.g. "co.uk".** You'll need to implement your own list of TLDs if you require full TLD validation - `port`: optional - the port (`8000` above) - `path`: optional - the path (`/the/path/` above) - `query`: optional - the URL query (aka GET arguments or "search string") (`query=here` above) - `fragment`: optional - the fragment (`fragment=is;this=bit` above) If further validation is required, these properties can be used by validators to enforce specific behaviour: {!.tmp_examples/types_url_properties.md!} #### International Domains "International domains" (e.g. a URL where the host or TLD includes non-ascii characters) will be encoded via [punycode](https://en.wikipedia.org/wiki/Punycode) (see [this article](https://www.xudongz.com/blog/2017/idn-phishing/) for a good description of why this is important): {!.tmp_examples/types_url_punycode.md!} !!! warning #### Underscores in Hostnames In *pydantic* underscores are allowed in all parts of a domain except the tld. Technically this might be wrong - in theory the hostname cannot have underscores, but subdomains can. To explain this; consider the following two cases: - `exam_ple.co.uk`: the hostname is `exam_ple`, which should not be allowed since it contains an underscore - `foo_bar.example.com` the hostname is `example`, which should be allowed since the underscore is in the subdomain Without having an exhaustive list of TLDs, it would be impossible to differentiate between these two. Therefore underscores are allowed, but you can always do further validation in a validator if desired. Also, Chrome, Firefox, and Safari all currently accept `http://exam_ple.com` as a URL, so we're in good (or at least big) company. ### Color Type You can use the `Color` data type for storing colors as per [CSS3 specification](http://www.w3.org/TR/css3-color/#svg-color). Colors can be defined via: - [name](http://www.w3.org/TR/SVG11/types.html#ColorKeywords) (e.g. `"Black"`, `"azure"`) - [hexadecimal value](https://en.wikipedia.org/wiki/Web_colors#Hex_triplet) (e.g. `"0x000"`, `"#FFFFFF"`, `"7fffd4"`) - RGB/RGBA tuples (e.g. `(255, 255, 255)`, `(255, 255, 255, 0.5)`) - [RGB/RGBA strings](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#RGB_colors) (e.g. `"rgb(255, 255, 255)"`, `"rgba(255, 255, 255, 0.5)"`) - [HSL strings](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#HSL_colors) (e.g. `"hsl(270, 60%, 70%)"`, `"hsl(270, 60%, 70%, .5)"`) {!.tmp_examples/types_color.md!} `Color` has the following methods: **`original`** : the original string or tuple passed to `Color` **`as_named`** : returns a named CSS3 color; fails if the alpha channel is set or no such color exists unless `fallback=True` is supplied, in which case it falls back to `as_hex` **`as_hex`** : returns a string in the format `#fff` or `#ffffff`; will contain 4 (or 8) hex values if the alpha channel is set, e.g. `#7f33cc26` **`as_rgb`** : returns a string in the format `rgb(, , )`, or `rgba(, , , )` if the alpha channel is set **`as_rgb_tuple`** : returns a 3- or 4-tuple in RGB(a) format. The `alpha` keyword argument can be used to define whether the alpha channel should be included; options: `True` - always include, `False` - never include, `None` (default) - include if set **`as_hsl`** : string in the format `hsl(, , )` or `hsl(, , , )` if the alpha channel is set **`as_hsl_tuple`** : returns a 3- or 4-tuple in HSL(a) format. The `alpha` keyword argument can be used to define whether the alpha channel should be included; options: `True` - always include, `False` - never include, `None` (the default) - include if set The `__str__` method for `Color` returns `self.as_named(fallback=True)`. !!! note the `as_hsl*` refer to hue, saturation, lightness "HSL" as used in html and most of the world, **not** "HLS" as used in Python's `colorsys`. ### Secret Types You can use the `SecretStr` and the `SecretBytes` data types for storing sensitive information that you do not want to be visible in logging or tracebacks. `SecretStr` and `SecretBytes` can be initialized idempotently or by using `str` or `bytes` literals respectively. The `SecretStr` and `SecretBytes` will be formatted as either `'**********'` or `''` on conversion to json. {!.tmp_examples/types_secret_types.md!} ### Json Type You can use `Json` data type to make *pydantic* first load a raw JSON string. It can also optionally be used to parse the loaded object into another type base on the type `Json` is parameterised with: {!.tmp_examples/types_json_type.md!} ### Payment Card Numbers The `PaymentCardNumber` type validates [payment cards](https://en.wikipedia.org/wiki/Payment_card) (such as a debit or credit card). {!.tmp_examples/types_payment_card_number.md!} `PaymentCardBrand` can be one of the following based on the BIN: * `PaymentCardBrand.amex` * `PaymentCardBrand.mastercard` * `PaymentCardBrand.visa` * `PaymentCardBrand.other` The actual validation verifies the card number is: * a `str` of only digits * [luhn](https://en.wikipedia.org/wiki/Luhn_algorithm) valid * the correct length based on the BIN, if Amex, Mastercard or Visa, and between 12 and 19 digits for all other brands ## Constrained Types The value of numerous common types can be restricted using `con*` type functions: {!.tmp_examples/types_constrained.md!} Where `Field` refers to the [field function](schema.md#field-customization). ### Arguments to `conlist` The following arguments are available when using the `conlist` type function - `item_type: Type[T]`: type of the list items - `min_items: int = None`: minimum number of items in the list - `max_items: int = None`: maximum number of items in the list - `unique_items: bool = None`: enforces list elements to be unique ### Arguments to `conset` The following arguments are available when using the `conset` type function - `item_type: Type[T]`: type of the set items - `min_items: int = None`: minimum number of items in the set - `max_items: int = None`: maximum number of items in the set ### Arguments to `confrozenset` The following arguments are available when using the `confrozenset` type function - `item_type: Type[T]`: type of the frozenset items - `min_items: int = None`: minimum number of items in the frozenset - `max_items: int = None`: maximum number of items in the frozenset ### Arguments to `conint` The following arguments are available when using the `conint` type function - `strict: bool = False`: controls type coercion - `gt: int = None`: enforces integer to be greater than the set value - `ge: int = None`: enforces integer to be greater than or equal to the set value - `lt: int = None`: enforces integer to be less than the set value - `le: int = None`: enforces integer to be less than or equal to the set value - `multiple_of: int = None`: enforces integer to be a multiple of the set value ### Arguments to `confloat` The following arguments are available when using the `confloat` type function - `strict: bool = False`: controls type coercion - `gt: float = None`: enforces float to be greater than the set value - `ge: float = None`: enforces float to be greater than or equal to the set value - `lt: float = None`: enforces float to be less than the set value - `le: float = None`: enforces float to be less than or equal to the set value - `multiple_of: float = None`: enforces float to be a multiple of the set value - `allow_inf_nan: bool = True`: whether to allows infinity (`+inf` an `-inf`) and NaN values, defaults to `True`, set to `False` for compatibility with `JSON`, see [#3994](https://github.com/pydantic/pydantic/pull/3994) for more details, added in **V1.10** ### Arguments to `condecimal` The following arguments are available when using the `condecimal` type function - `gt: Decimal = None`: enforces decimal to be greater than the set value - `ge: Decimal = None`: enforces decimal to be greater than or equal to the set value - `lt: Decimal = None`: enforces decimal to be less than the set value - `le: Decimal = None`: enforces decimal to be less than or equal to the set value - `max_digits: int = None`: maximum number of digits within the decimal. it does not include a zero before the decimal point or trailing decimal zeroes - `decimal_places: int = None`: max number of decimal places allowed. it does not include trailing decimal zeroes - `multiple_of: Decimal = None`: enforces decimal to be a multiple of the set value ### Arguments to `constr` The following arguments are available when using the `constr` type function - `strip_whitespace: bool = False`: removes leading and trailing whitespace - `to_upper: bool = False`: turns all characters to uppercase - `to_lower: bool = False`: turns all characters to lowercase - `strict: bool = False`: controls type coercion - `min_length: int = None`: minimum length of the string - `max_length: int = None`: maximum length of the string - `curtail_length: int = None`: shrinks the string length to the set value when it is longer than the set value - `regex: str = None`: regex to validate the string against ### Arguments to `conbytes` The following arguments are available when using the `conbytes` type function - `strip_whitespace: bool = False`: removes leading and trailing whitespace - `to_upper: bool = False`: turns all characters to uppercase - `to_lower: bool = False`: turns all characters to lowercase - `min_length: int = None`: minimum length of the byte string - `max_length: int = None`: maximum length of the byte string - `strict: bool = False`: controls type coercion ### Arguments to `condate` The following arguments are available when using the `condate` type function - `gt: date = None`: enforces date to be greater than the set value - `ge: date = None`: enforces date to be greater than or equal to the set value - `lt: date = None`: enforces date to be less than the set value - `le: date = None`: enforces date to be less than or equal to the set value ## Strict Types You can use the `StrictStr`, `StrictBytes`, `StrictInt`, `StrictFloat`, and `StrictBool` types to prevent coercion from compatible types. These types will only pass validation when the validated value is of the respective type or is a subtype of that type. This behavior is also exposed via the `strict` field of the `ConstrainedStr`, `ConstrainedBytes`, `ConstrainedFloat` and `ConstrainedInt` classes and can be combined with a multitude of complex validation rules. The following caveats apply: - `StrictBytes` (and the `strict` option of `ConstrainedBytes`) will accept both `bytes`, and `bytearray` types. - `StrictInt` (and the `strict` option of `ConstrainedInt`) will not accept `bool` types, even though `bool` is a subclass of `int` in Python. Other subclasses will work. - `StrictFloat` (and the `strict` option of `ConstrainedFloat`) will not accept `int`. {!.tmp_examples/types_strict.md!} ## ByteSize You can use the `ByteSize` data type to convert byte string representation to raw bytes and print out human readable versions of the bytes as well. !!! info Note that `1b` will be parsed as "1 byte" and not "1 bit". {!.tmp_examples/types_bytesize.md!} ## Custom Data Types You can also define your own custom data types. There are several ways to achieve it. ### Classes with `__get_validators__` You use a custom class with a classmethod `__get_validators__`. It will be called to get validators to parse and validate the input data. !!! tip These validators have the same semantics as in [Validators](validators.md), you can declare a parameter `config`, `field`, etc. {!.tmp_examples/types_custom_type.md!} Similar validation could be achieved using [`constr(regex=...)`](#constrained-types) except the value won't be formatted with a space, the schema would just include the full pattern and the returned value would be a vanilla string. See [schema](schema.md) for more details on how the model's schema is generated. ### Arbitrary Types Allowed You can allow arbitrary types using the `arbitrary_types_allowed` config in the [Model Config](model_config.md). {!.tmp_examples/types_arbitrary_allowed.md!} ### Generic Classes as Types !!! warning This is an advanced technique that you might not need in the beginning. In most of the cases you will probably be fine with standard *pydantic* models. You can use [Generic Classes](https://docs.python.org/3/library/typing.html#typing.Generic) as field types and perform custom validation based on the "type parameters" (or sub-types) with `__get_validators__`. If the Generic class that you are using as a sub-type has a classmethod `__get_validators__` you don't need to use `arbitrary_types_allowed` for it to work. Because you can declare validators that receive the current `field`, you can extract the `sub_fields` (from the generic class type parameters) and validate data with them. {!.tmp_examples/types_generics.md!} pydantic-1.10.14/docs/usage/validation_decorator.md000066400000000000000000000167121455251250200222530ustar00rootroot00000000000000The `validate_arguments` decorator allows the arguments passed to a function to be parsed and validated using the function's annotations before the function is called. While under the hood this uses the same approach of model creation and initialisation; it provides an extremely easy way to apply validation to your code with minimal boilerplate. !!! info "In Beta" The `validate_arguments` decorator is in **beta**, it has been added to *pydantic* in **v1.5** on a **provisional basis**. It may change significantly in future releases and its interface will not be concrete until **v2**. Feedback from the community while it's still provisional would be extremely useful; either comment on [#1205](https://github.com/pydantic/pydantic/issues/1205) or create a new issue. Example of usage: {!.tmp_examples/validation_decorator_main.md!} ## Argument Types Argument types are inferred from type annotations on the function, arguments without a type decorator are considered as `Any`. Since `validate_arguments` internally uses a standard `BaseModel`, all types listed in [types](types.md) can be validated, including *pydantic* models and [custom types](types.md#custom-data-types). As with the rest of *pydantic*, types can be coerced by the decorator before they're passed to the actual function: {!.tmp_examples/validation_decorator_types.md!} A few notes: - though they're passed as strings, `path` and `regex` are converted to a `Path` object and regex respectively by the decorator - `max` has no type annotation, so will be considered as `Any` by the decorator Type coercion like this can be extremely helpful but also confusing or not desired, see [below](#coercion-and-strictness) for a discussion of `validate_arguments`'s limitations in this regard. ## Function Signatures The decorator is designed to work with functions using all possible parameter configurations and all possible combinations of these: * positional or keyword arguments with or without defaults * variable positional arguments defined via `*` (often `*args`) * variable keyword arguments defined via `**` (often `**kwargs`) * keyword only arguments - arguments after `*,` * positional only arguments - arguments before `, /` (new in Python 3.8) To demonstrate all the above parameter types: {!.tmp_examples/validation_decorator_parameter_types.md!} ## Using Field to describe function arguments [Field](schema.md#field-customization) can also be used with `validate_arguments` to provide extra information about the field and validations. In general it should be used in a type hint with [Annotated](schema.md#typingannotated-fields), unless `default_factory` is specified, in which case it should be used as the default value of the field: {!.tmp_examples/validation_decorator_field.md!} The [alias](model_config.md#alias-precedence) can be used with the decorator as normal. {!.tmp_examples/validation_decorator_field_alias.md!} ## Usage with mypy The `validate_arguments` decorator should work "out of the box" with [mypy](http://mypy-lang.org/) since it's defined to return a function with the same signature as the function it decorates. The only limitation is that since we trick mypy into thinking the function returned by the decorator is the same as the function being decorated; access to the [raw function](#raw-function) or other attributes will require `type: ignore`. ## Validate without calling the function By default, arguments validation is done by directly calling the decorated function with parameters. But what if you wanted to validate them without *actually* calling the function? To do that you can call the `validate` method bound to the decorated function. {!.tmp_examples/validation_decorator_validate.md!} ## Raw function The raw function which was decorated is accessible, this is useful if in some scenarios you trust your input arguments and want to call the function in the most performant way (see [notes on performance](#performance) below): {!.tmp_examples/validation_decorator_raw_function.md!} ## Async Functions `validate_arguments` can also be used on async functions: {!.tmp_examples/validation_decorator_async.md!} ## Custom Config The model behind `validate_arguments` can be customised using a config setting which is equivalent to setting the `Config` sub-class in normal models. !!! warning The `fields` and `alias_generator` properties of `Config` which allow aliases to be configured are not supported yet with `@validate_arguments`, using them will raise an error. Configuration is set using the `config` keyword argument to the decorator, it may be either a config class or a dict of properties which are converted to a class later. {!.tmp_examples/validation_decorator_config.md!} ## Limitations `validate_arguments` has been released on a provisional basis without all the bells and whistles, which may be added later, see [#1205](https://github.com/pydantic/pydantic/issues/1205) for some more discussion of this. In particular: ### Validation Exception Currently upon validation failure, a standard *pydantic* `ValidationError` is raised, see [model error handling](models.md#error-handling). This is helpful since it's `str()` method provides useful details of the error which occurred and methods like `.errors()` and `.json()` can be useful when exposing the errors to end users, however `ValidationError` inherits from `ValueError` **not** `TypeError` which may be unexpected since Python would raise a `TypeError` upon invalid or missing arguments. This may be addressed in future by either allow a custom error or raising a different exception by default, or both. ### Coercion and Strictness *pydantic* currently leans on the side of trying to coerce types rather than raise an error if a type is wrong, see [model data conversion](models.md#data-conversion) and `validate_arguments` is no different. See [#1098](https://github.com/pydantic/pydantic/issues/1098) and other issues with the "strictness" label for a discussion of this. If *pydantic* gets a "strict" mode in future, `validate_arguments` will have an option to use this, it may even become the default for the decorator. ### Performance We've made a big effort to make *pydantic* as performant as possible and argument inspect and model creation is only performed once when the function is defined, however there will still be a performance impact to using the `validate_arguments` decorator compared to calling the raw function. In many situations this will have little or no noticeable effect, however be aware that `validate_arguments` is not an equivalent or alternative to function definitions in strongly typed languages; it never will be. ### Return Value The return value of the function is not validated against its return type annotation, this may be added as an option in future. ### Config and Validators `fields` and `alias_generator` on custom [`Config`](model_config.md) are not supported, see [above](#custom-config). Neither are [validators](validators.md). ### Model fields and reserved arguments The following names may not be used by arguments since they can be used internally to store information about the function's signature: * `v__args` * `v__kwargs` * `v__positional_only` These names (together with `"args"` and `"kwargs"`) may or may not (depending on the function's signature) appear as fields on the internal *pydantic* model accessible via `.model` thus this model isn't especially useful (e.g. for generating a schema) at the moment. This should be fixable in future as the way error are raised is changed. pydantic-1.10.14/docs/usage/validators.md000066400000000000000000000117771455251250200202350ustar00rootroot00000000000000Custom validation and complex relationships between objects can be achieved using the `validator` decorator. {!.tmp_examples/validators_simple.md!} A few things to note on validators: * validators are "class methods", so the first argument value they receive is the `UserModel` class, not an instance of `UserModel`. * the second argument is always the field value to validate; it can be named as you please * you can also add any subset of the following arguments to the signature (the names **must** match): * `values`: a dict containing the name-to-value mapping of any previously-validated fields * `config`: the model config * `field`: the field being validated. Type of object is `pydantic.fields.ModelField`. * `**kwargs`: if provided, this will include the arguments above not explicitly listed in the signature * validators should either return the parsed value or raise a `ValueError`, `TypeError`, or `AssertionError` (``assert`` statements may be used). !!! warning If you make use of `assert` statements, keep in mind that running Python with the [`-O` optimization flag](https://docs.python.org/3/using/cmdline.html#cmdoption-o) disables `assert` statements, and **validators will stop working**. * where validators rely on other values, you should be aware that: * Validation is done in the order fields are defined. E.g. in the example above, `password2` has access to `password1` (and `name`), but `password1` does not have access to `password2`. See [Field Ordering](models.md#field-ordering) for more information on how fields are ordered * If validation fails on another field (or that field is missing) it will not be included in `values`, hence `if 'password1' in values and ...` in this example. ## Pre and per-item validators Validators can do a few more complex things: {!.tmp_examples/validators_pre_item.md!} A few more things to note: * a single validator can be applied to multiple fields by passing it multiple field names * a single validator can also be called on *all* fields by passing the special value `'*'` * the keyword argument `pre` will cause the validator to be called prior to other validation * passing `each_item=True` will result in the validator being applied to individual values (e.g. of `List`, `Dict`, `Set`, etc.), rather than the whole object ## Subclass Validators and `each_item` If using a validator with a subclass that references a `List` type field on a parent class, using `each_item=True` will cause the validator not to run; instead, the list must be iterated over programmatically. {!.tmp_examples/validators_subclass_each_item.md!} ## Validate Always For performance reasons, by default validators are not called for fields when a value is not supplied. However there are situations where it may be useful or required to always call the validator, e.g. to set a dynamic default value. {!.tmp_examples/validators_always.md!} You'll often want to use this together with `pre`, since otherwise with `always=True` *pydantic* would try to validate the default `None` which would cause an error. ## Reuse validators Occasionally, you will want to use the same validator on multiple fields/models (e.g. to normalize some input data). The "naive" approach would be to write a separate function, then call it from multiple decorators. Obviously, this entails a lot of repetition and boiler plate code. To circumvent this, the `allow_reuse` parameter has been added to `pydantic.validator` in **v1.2** (`False` by default): {!.tmp_examples/validators_allow_reuse.md!} As it is obvious, repetition has been reduced and the models become again almost declarative. !!! tip If you have a lot of fields that you want to validate, it usually makes sense to define a help function with which you will avoid setting `allow_reuse=True` over and over again. ## Root Validators Validation can also be performed on the entire model's data. {!.tmp_examples/validators_root.md!} As with field validators, root validators can have `pre=True`, in which case they're called before field validation occurs (and are provided with the raw input data), or `pre=False` (the default), in which case they're called after field validation. Field validation will not occur if `pre=True` root validators raise an error. As with field validators, "post" (i.e. `pre=False`) root validators by default will be called even if prior validators fail; this behaviour can be changed by setting the `skip_on_failure=True` keyword argument to the validator. The `values` argument will be a dict containing the values which passed field validation and field defaults where applicable. ## Field Checks On class creation, validators are checked to confirm that the fields they specify actually exist on the model. Occasionally however this is undesirable: e.g. if you define a validator to validate fields on inheriting models. In this case you should set `check_fields=False` on the validator. ## Dataclass Validators Validators also work with *pydantic* dataclasses. {!.tmp_examples/validators_dataclass.md!} pydantic-1.10.14/docs/visual_studio_code.md000066400000000000000000000302251455251250200206320ustar00rootroot00000000000000*pydantic* works well with any editor or IDE out of the box because it's made on top of standard Python type annotations. When using [Visual Studio Code (VS Code)](https://code.visualstudio.com/), there are some **additional editor features** supported, comparable to the ones provided by the [PyCharm plugin](./pycharm_plugin.md). This means that you will have **autocompletion** (or "IntelliSense") and **error checks** for types and required arguments even while creating new *pydantic* model instances. ![pydantic autocompletion in VS Code](./img/vs_code_01.png) ## Configure VS Code To take advantage of these features, you need to make sure you configure VS Code correctly, using the recommended settings. In case you have a different configuration, here's a short overview of the steps. ### Install Pylance You should use the [Pylance](https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance) extension for VS Code. It is the recommended, next-generation, official VS Code plug-in for Python. Pylance is installed as part of the [Python Extension for VS Code](https://marketplace.visualstudio.com/items?itemName=ms-python.python) by default, so it should probably just work. Otherwise, you can double check it's installed and enabled in your editor. ### Configure your environment Then you need to make sure your editor knows the [Python environment](https://code.visualstudio.com/docs/python/python-tutorial#_install-and-use-packages) (probably a virtual environment) for your Python project. This would be the environment in where you installed *pydantic*. ### Configure Pylance With the default configurations, you will get support for autocompletion, but Pylance might not check for type errors. You can enable type error checks from Pylance with these steps: * Open the "User Settings" * Search for `Type Checking Mode` * You will find an option under `Python › Analysis: Type Checking Mode` * Set it to `basic` or `strict` (by default it's `off`) ![Type Checking Mode set to strict in VS Code](./img/vs_code_02.png) Now you will not only get autocompletion when creating new *pydantic* model instances but also error checks for **required arguments**. ![Required arguments error checks in VS Code](./img/vs_code_03.png) And you will also get error checks for **invalid data types**. ![Invalid data types error checks in VS Code](./img/vs_code_04.png) !!! note "Technical Details" Pylance is the VS Code extension, it's closed source, but free to use. Underneath, Pylance uses an open source tool (also from Microsoft) called [Pyright](https://github.com/microsoft/pyright) that does all the heavy lifting. You can read more about it in the [Pylance Frequently Asked Questions](https://github.com/microsoft/pylance-release/blob/main/FAQ.md#what-is-the-relationship-between-pylance-pyright-and-the-python-extension). ### Configure mypy You might also want to configure mypy in VS Code to get mypy error checks inline in your editor (alternatively/additionally to Pylance). This would include the errors detected by the [*pydantic* mypy plugin](./mypy_plugin.md), if you configured it. To enable mypy in VS Code, do the following: * Open the "User Settings" * Search for `Mypy Enabled` * You will find an option under `Python › Linting: Mypy Enabled` * Check the box (by default it's unchecked) ![mypy enabled in VS Code](./img/vs_code_05.png) ## Tips and tricks Here are some additional tips and tricks to improve your developer experience when using VS Code with *pydantic*. ### Strict errors The way this additional editor support works is that Pylance will treat your *pydantic* models as if they were Python's pure `dataclasses`. And it will show **strict type error checks** about the data types passed in arguments when creating a new *pydantic* model instance. In this example you can see that it shows that a `str` of `'23'` is not a valid `int` for the argument `age`. ![VS Code strict type errors](./img/vs_code_06.png) It would expect `age=23` instead of `age='23'`. Nevertheless, the design, and one of the main features of *pydantic*, is that it is very **lenient with data types**. It will actually accept the `str` with value `'23'` and will convert it to an `int` with value `23`. These strict error checks are **very useful** most of the time and can help you **detect many bugs early**. But there are cases, like with `age='23'`, where they could be inconvenient by reporting a "false positive" error. --- This example above with `age='23'` is intentionally simple, to show the error and the differences in types. But more common cases where these strict errors would be inconvenient would be when using more sophisticated data types, like `int` values for `datetime` fields, or `dict` values for *pydantic* sub-models. For example, this is valid for *pydantic*: ```Python hl_lines="12 17" from pydantic import BaseModel class Knight(BaseModel): title: str age: int color: str = 'blue' class Quest(BaseModel): title: str knight: Knight quest = Quest( title='To seek the Holy Grail', knight={'title': 'Sir Lancelot', 'age': 23} ) ``` The type of the field `knight` is declared with the class `Knight` (a *pydantic* model) and the code is passing a literal `dict` instead. This is still valid for *pydantic*, and the `dict` would be automatically converted to a `Knight` instance. Nevertheless, it would be detected as a type error: ![VS Code strict type errors with model](./img/vs_code_07.png) In those cases, there are several ways to disable or ignore strict errors in very specific places, while still preserving them in the rest of the code. Below are several techniques to achieve it. #### Disable type checks in a line You can disable the errors for a specific line using a comment of: ```py # type: ignore ``` or (to be specific to pylance/pyright): ```py # pyright: ignore ``` ([pyright](https://github.com/microsoft/pyright) is the language server used by Pylance.). coming back to the example with `age='23'`, it would be: ```Python hl_lines="10" from pydantic import BaseModel class Knight(BaseModel): title: str age: int color: str = 'blue' lancelot = Knight(title='Sir Lancelot', age='23') # pyright: ignore ``` that way Pylance and mypy will ignore errors in that line. **Pros**: it's a simple change in that line to remove errors there. **Cons**: any other error in that line will also be omitted, including type checks, misspelled arguments, required arguments not provided, etc. #### Override the type of a variable You can also create a variable with the value you want to use and declare it's type explicitly with `Any`. ```Python hl_lines="1 11-12" from typing import Any from pydantic import BaseModel class Knight(BaseModel): title: str age: int color: str = 'blue' age_str: Any = '23' lancelot = Knight(title='Sir Lancelot', age=age_str) ``` that way Pylance and mypy will interpret the variable `age_str` as if they didn't know its type, instead of knowing it has a type of `str` when an `int` was expected (and then showing the corresponding error). **Pros**: errors will be ignored only for a specific value, and you will still see any additional errors for the other arguments. **Cons**: it requires importing `Any` and a new variable in a new line for each argument that needs ignoring errors. #### Override the type of a value with `cast` The same idea from the previous example can be put on the same line with the help of `cast()`. This way, the type declaration of the value is overridden inline, without requiring another variable. ```Python hl_lines="1 11" from typing import Any, cast from pydantic import BaseModel class Knight(BaseModel): title: str age: int color: str = 'blue' lancelot = Knight(title='Sir Lancelot', age=cast(Any, '23')) ``` `cast(Any, '23')` doesn't affect the value, it's still just `'23'`, but now Pylance and mypy will assume it is of type `Any`, which means, they will act as if they didn't know the type of the value. So, this is the equivalent of the previous example, without the additional variable. **Pros**: errors will be ignored only for a specific value, and you will still see any additional errors for the other arguments. There's no need for additional variables. **Cons**: it requires importing `Any` and `cast`, and if you are not used to using `cast()`, it could seem strange at first. ### Config in class arguments *pydantic* has a rich set of [Model Configurations](./usage/model_config.md) available. These configurations can be set in an internal `class Config` on each model: ```Python hl_lines="9-10" from pydantic import BaseModel class Knight(BaseModel): title: str age: int color: str = 'blue' class Config: frozen = True ``` or passed as keyword arguments when defining the model class: ```Python hl_lines="4" from pydantic import BaseModel class Knight(BaseModel, frozen=True): title: str age: int color: str = 'blue' ``` The specific configuration **`frozen`** (in beta) has a special meaning. It prevents other code from changing a model instance once it's created, keeping it **"frozen"**. When using the second version to declare `frozen=True` (with **keyword arguments** in the class definition), Pylance can use it to help you check in your code and **detect errors** when something is trying to set values in a model that is "frozen". ![VS Code strict type errors with model](./img/vs_code_08.png) ## BaseSettings and ignoring Pylance/pyright errors Pylance/pyright does not work well with [`BaseSettings`](./usage/settings.md) - fields in settings classes can be configured via environment variables and therefore "required" fields do not have to be explicitly set when initialising a settings instance. However, pyright considers these fields as "required" and will therefore show an error when they're not set. See [#3753](https://github.com/pydantic/pydantic/issues/3753#issuecomment-1087417884) for an explanation of the reasons behind this, and why we can't avoid the problem. There are two potential workarounds: * use an ignore comment (`# pyright: ignore`) when initialising `settings` * or, use `settings.parse_obj({})` to avoid the warning ## Adding a default with `Field` Pylance/pyright requires `default` to be a keyword argument to `Field` in order to infer that the field is optional. ```py from pydantic import BaseModel, Field class Knight(BaseModel): title: str = Field(default='Sir Lancelot') # this is okay age: int = Field(23) # this works fine at runtime but will case an error for pyright lance = Knight() # error: Argument missing for parameter "age" ``` Like the issue with `BaseSettings`, this is a limitation of dataclass transforms and cannot be fixed in pydantic. ## Technical Details !!! warning As a *pydantic* user, you don't need the details below. Feel free to skip the rest of this section. These details are only useful for other library authors, etc. This additional editor support works by implementing the proposed draft standard for [Dataclass Transform](https://github.com/microsoft/pyright/blob/master/specs/dataclass_transforms.md). The proposed draft standard is written by Eric Traut, from the Microsoft team, the same author of the open source package Pyright (used by Pylance to provide Python support in VS Code). The intention of the standard is to provide a way for libraries like *pydantic* and others to tell editors and tools that they (the editors) should treat these libraries (e.g. *pydantic*) as if they were `dataclasses`, providing autocompletion, type checks, etc. The draft standard also includes an [Alternate Form](https://github.com/microsoft/pyright/blob/master/specs/dataclass_transforms.md#alternate-form) for early adopters, like *pydantic*, to add support for it right away, even before the new draft standard is finished and approved. This new draft standard, with the Alternate Form, is already supported by Pyright, so it can be used via Pylance in VS Code. As it is being proposed as an official standard for Python, other editors can also easily add support for it. And authors of other libraries similar to *pydantic* can also easily adopt the standard right away (using the "Alternate Form") and get the benefits of these additional editor features. pydantic-1.10.14/mkdocs.yml000066400000000000000000000043131455251250200154760ustar00rootroot00000000000000site_name: Pydantic site_description: Data validation using Python type hints strict: true site_url: https://docs.pydantic.dev/ theme: name: 'material' custom_dir: 'docs/theme' palette: - media: "(prefers-color-scheme: light)" scheme: default primary: pink accent: pink toggle: icon: material/lightbulb-outline name: "Switch to dark mode" - media: "(prefers-color-scheme: dark)" scheme: slate primary: pink accent: pink toggle: icon: material/lightbulb name: "Switch to light mode" features: - content.tabs.link - announce.dismiss logo: 'logo-white.svg' favicon: 'favicon.png' repo_name: pydantic/pydantic repo_url: https://github.com/pydantic/pydantic edit_uri: edit/main/docs/ extra_css: - 'extra/terminal.css' - 'extra/tweaks.css' extra_javascript: - 'extra/redirects.js' nav: - Overview: index.md - install.md - Usage: - usage/models.md - 'Field Types': usage/types.md - usage/validators.md - 'Model Config': usage/model_config.md - usage/schema.md - usage/exporting_models.md - usage/dataclasses.md - usage/validation_decorator.md - 'Settings management': usage/settings.md - usage/postponed_annotations.md - 'Usage with mypy': usage/mypy.md - 'Usage with devtools': usage/devtools.md - 'Usage with rich': usage/rich.md - Blog: - blog/pydantic-v2-alpha.md - blog/pydantic-v2.md - Contributing to pydantic: contributing.md - 'Mypy plugin': mypy_plugin.md - 'PyCharm plugin': pycharm_plugin.md - 'Visual Studio Code': visual_studio_code.md - 'Hypothesis plugin': hypothesis_plugin.md - 'Code Generation': datamodel_code_generator.md - changelog.md markdown_extensions: - tables - markdown_include.include: base_path: docs - toc: permalink: true - admonition - pymdownx.highlight - pymdownx.extra - mdx_truly_sane_lists - pymdownx.emoji: emoji_index: !!python/name:material.extensions.emoji.twemoji emoji_generator: !!python/name:material.extensions.emoji.to_svg - pymdownx.tabbed: alternate_style: true extra: version: provider: mike plugins: - mike: alias_type: symlink canonical_version: latest - search - exclude: glob: - _build/* - build/* - examples/* - requirements.txt pydantic-1.10.14/pydantic/000077500000000000000000000000001455251250200153055ustar00rootroot00000000000000pydantic-1.10.14/pydantic/__init__.py000066400000000000000000000053231455251250200174210ustar00rootroot00000000000000# flake8: noqa from . import dataclasses from .annotated_types import create_model_from_namedtuple, create_model_from_typeddict from .class_validators import root_validator, validator from .config import BaseConfig, ConfigDict, Extra from .decorator import validate_arguments from .env_settings import BaseSettings from .error_wrappers import ValidationError from .errors import * from .fields import Field, PrivateAttr, Required from .main import * from .networks import * from .parse import Protocol from .tools import * from .types import * from .version import VERSION, compiled __version__ = VERSION # WARNING __all__ from .errors is not included here, it will be removed as an export here in v2 # please use "from pydantic.errors import ..." instead __all__ = [ # annotated types utils 'create_model_from_namedtuple', 'create_model_from_typeddict', # dataclasses 'dataclasses', # class_validators 'root_validator', 'validator', # config 'BaseConfig', 'ConfigDict', 'Extra', # decorator 'validate_arguments', # env_settings 'BaseSettings', # error_wrappers 'ValidationError', # fields 'Field', 'Required', # main 'BaseModel', 'create_model', 'validate_model', # network 'AnyUrl', 'AnyHttpUrl', 'FileUrl', 'HttpUrl', 'stricturl', 'EmailStr', 'NameEmail', 'IPvAnyAddress', 'IPvAnyInterface', 'IPvAnyNetwork', 'PostgresDsn', 'CockroachDsn', 'AmqpDsn', 'RedisDsn', 'MongoDsn', 'KafkaDsn', 'validate_email', # parse 'Protocol', # tools 'parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of', # types 'NoneStr', 'NoneBytes', 'StrBytes', 'NoneStrBytes', 'StrictStr', 'ConstrainedBytes', 'conbytes', 'ConstrainedList', 'conlist', 'ConstrainedSet', 'conset', 'ConstrainedFrozenSet', 'confrozenset', 'ConstrainedStr', 'constr', 'PyObject', 'ConstrainedInt', 'conint', 'PositiveInt', 'NegativeInt', 'NonNegativeInt', 'NonPositiveInt', 'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat', 'NonNegativeFloat', 'NonPositiveFloat', 'FiniteFloat', 'ConstrainedDecimal', 'condecimal', 'ConstrainedDate', 'condate', 'UUID1', 'UUID3', 'UUID4', 'UUID5', 'FilePath', 'DirectoryPath', 'Json', 'JsonWrapper', 'SecretField', 'SecretStr', 'SecretBytes', 'StrictBool', 'StrictBytes', 'StrictInt', 'StrictFloat', 'PaymentCardNumber', 'PrivateAttr', 'ByteSize', 'PastDate', 'FutureDate', # version 'compiled', 'VERSION', ] pydantic-1.10.14/pydantic/_hypothesis_plugin.py000066400000000000000000000347741455251250200216120ustar00rootroot00000000000000""" Register Hypothesis strategies for Pydantic custom types. This enables fully-automatic generation of test data for most Pydantic classes. Note that this module has *no* runtime impact on Pydantic itself; instead it is registered as a setuptools entry point and Hypothesis will import it if Pydantic is installed. See also: https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov https://docs.pydantic.dev/usage/types/#pydantic-types Note that because our motivation is to *improve user experience*, the strategies are always sound (never generate invalid data) but sacrifice completeness for maintainability (ie may be unable to generate some tricky but valid data). Finally, this module makes liberal use of `# type: ignore[]` pragmas. This is because Hypothesis annotates `register_type_strategy()` with `(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt` to generate instances of the builtin `int` type which match the constraints. """ import contextlib import datetime import ipaddress import json import math from fractions import Fraction from typing import Callable, Dict, Type, Union, cast, overload import hypothesis.strategies as st import pydantic import pydantic.color import pydantic.types from pydantic.utils import lenient_issubclass # FilePath and DirectoryPath are explicitly unsupported, as we'd have to create # them on-disk, and that's unsafe in general without being told *where* to do so. # # URLs are unsupported because it's easy for users to define their own strategy for # "normal" URLs, and hard for us to define a general strategy which includes "weird" # URLs but doesn't also have unpredictable performance problems. # # conlist() and conset() are unsupported for now, because the workarounds for # Cython and Hypothesis to handle parametrized generic types are incompatible. # We are rethinking Hypothesis compatibility in Pydantic v2. # Emails try: import email_validator except ImportError: # pragma: no cover pass else: def is_valid_email(s: str) -> bool: # Hypothesis' st.emails() occasionally generates emails like 0@A0--0.ac # that are invalid according to email-validator, so we filter those out. try: email_validator.validate_email(s, check_deliverability=False) return True except email_validator.EmailNotValidError: # pragma: no cover return False # Note that these strategies deliberately stay away from any tricky Unicode # or other encoding issues; we're just trying to generate *something* valid. st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email)) # type: ignore[arg-type] st.register_type_strategy( pydantic.NameEmail, st.builds( '{} <{}>'.format, # type: ignore[arg-type] st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True), st.emails().filter(is_valid_email), ), ) # PyObject - dotted names, in this case taken from the math module. st.register_type_strategy( pydantic.PyObject, # type: ignore[arg-type] st.sampled_from( [cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')] ), ) # CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings _color_regexes = ( '|'.join( ( pydantic.color.r_hex_short, pydantic.color.r_hex_long, pydantic.color.r_rgb, pydantic.color.r_rgba, pydantic.color.r_hsl, pydantic.color.r_hsla, ) ) # Use more precise regex patterns to avoid value-out-of-range errors .replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)') .replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))') .replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))') ) st.register_type_strategy( pydantic.color.Color, st.one_of( st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)), st.tuples( st.integers(0, 255), st.integers(0, 255), st.integers(0, 255), st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format), ), st.from_regex(_color_regexes, fullmatch=True), ), ) # Card numbers, valid according to the Luhn algorithm def add_luhn_digit(card_number: str) -> str: # See https://en.wikipedia.org/wiki/Luhn_algorithm for digit in '0123456789': with contextlib.suppress(Exception): pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit) return card_number + digit raise AssertionError('Unreachable') # pragma: no cover card_patterns = ( # Note that these patterns omit the Luhn check digit; that's added by the function above '4[0-9]{14}', # Visa '5[12345][0-9]{13}', # Mastercard '3[47][0-9]{12}', # American Express '[0-26-9][0-9]{10,17}', # other (incomplete to avoid overlap) ) st.register_type_strategy( pydantic.PaymentCardNumber, st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit), # type: ignore[arg-type] ) # UUIDs st.register_type_strategy(pydantic.UUID1, st.uuids(version=1)) st.register_type_strategy(pydantic.UUID3, st.uuids(version=3)) st.register_type_strategy(pydantic.UUID4, st.uuids(version=4)) st.register_type_strategy(pydantic.UUID5, st.uuids(version=5)) # Secrets st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes)) st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr)) # IP addresses, networks, and interfaces st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses()) # type: ignore[arg-type] st.register_type_strategy( pydantic.IPvAnyInterface, st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface), # type: ignore[arg-type] ) st.register_type_strategy( pydantic.IPvAnyNetwork, st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network), # type: ignore[arg-type] ) # We hook into the con***() functions and the ConstrainedNumberMeta metaclass, # so here we only have to register subclasses for other constrained types which # don't go via those mechanisms. Then there are the registration hooks below. st.register_type_strategy(pydantic.StrictBool, st.booleans()) st.register_type_strategy(pydantic.StrictStr, st.text()) # FutureDate, PastDate st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.date.today() + datetime.timedelta(days=1))) st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.date.today() - datetime.timedelta(days=1))) # Constrained-type resolver functions # # For these ones, we actually want to inspect the type in order to work out a # satisfying strategy. First up, the machinery for tracking resolver functions: RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {} # type: ignore[type-arg] @overload def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]: pass @overload def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta: pass def _registered( typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta] ) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]: # This function replaces the version in `pydantic.types`, in order to # effect the registration of new constrained types so that Hypothesis # can generate valid examples. pydantic.types._DEFINED_TYPES.add(typ) for supertype, resolver in RESOLVERS.items(): if issubclass(typ, supertype): st.register_type_strategy(typ, resolver(typ)) # type: ignore return typ raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register') # pragma: no cover def resolves( typ: Union[type, pydantic.types.ConstrainedNumberMeta] ) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]: # type: ignore[type-arg] def inner(f): # type: ignore assert f not in RESOLVERS RESOLVERS[typ] = f return f return inner # Type-to-strategy resolver functions @resolves(pydantic.JsonWrapper) def resolve_json(cls): # type: ignore[no-untyped-def] try: inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type) except Exception: # pragma: no cover finite = st.floats(allow_infinity=False, allow_nan=False) inner = st.recursive( base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()), extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x), # type: ignore ) inner_type = getattr(cls, 'inner_type', None) return st.builds( cls.inner_type.json if lenient_issubclass(inner_type, pydantic.BaseModel) else json.dumps, inner, ensure_ascii=st.booleans(), indent=st.none() | st.integers(0, 16), sort_keys=st.booleans(), ) @resolves(pydantic.ConstrainedBytes) def resolve_conbytes(cls): # type: ignore[no-untyped-def] # pragma: no cover min_size = cls.min_length or 0 max_size = cls.max_length if not cls.strip_whitespace: return st.binary(min_size=min_size, max_size=max_size) # Fun with regex to ensure we neither start nor end with whitespace repeats = '{{{},{}}}'.format( min_size - 2 if min_size > 2 else 0, max_size - 2 if (max_size or 0) > 2 else '', ) if min_size >= 2: pattern = rf'\W.{repeats}\W' elif min_size == 1: pattern = rf'\W(.{repeats}\W)?' else: assert min_size == 0 pattern = rf'(\W(.{repeats}\W)?)?' return st.from_regex(pattern.encode(), fullmatch=True) @resolves(pydantic.ConstrainedDecimal) def resolve_condecimal(cls): # type: ignore[no-untyped-def] min_value = cls.ge max_value = cls.le if cls.gt is not None: assert min_value is None, 'Set `gt` or `ge`, but not both' min_value = cls.gt if cls.lt is not None: assert max_value is None, 'Set `lt` or `le`, but not both' max_value = cls.lt s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places) if cls.lt is not None: s = s.filter(lambda d: d < cls.lt) if cls.gt is not None: s = s.filter(lambda d: cls.gt < d) return s @resolves(pydantic.ConstrainedFloat) def resolve_confloat(cls): # type: ignore[no-untyped-def] min_value = cls.ge max_value = cls.le exclude_min = False exclude_max = False if cls.gt is not None: assert min_value is None, 'Set `gt` or `ge`, but not both' min_value = cls.gt exclude_min = True if cls.lt is not None: assert max_value is None, 'Set `lt` or `le`, but not both' max_value = cls.lt exclude_max = True if cls.multiple_of is None: return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False) if min_value is not None: min_value = math.ceil(min_value / cls.multiple_of) if exclude_min: min_value = min_value + 1 if max_value is not None: assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of' max_value = math.floor(max_value / cls.multiple_of) if exclude_max: max_value = max_value - 1 return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) @resolves(pydantic.ConstrainedInt) def resolve_conint(cls): # type: ignore[no-untyped-def] min_value = cls.ge max_value = cls.le if cls.gt is not None: assert min_value is None, 'Set `gt` or `ge`, but not both' min_value = cls.gt + 1 if cls.lt is not None: assert max_value is None, 'Set `lt` or `le`, but not both' max_value = cls.lt - 1 if cls.multiple_of is None or cls.multiple_of == 1: return st.integers(min_value, max_value) # These adjustments and the .map handle integer-valued multiples, while the # .filter handles trickier cases as for confloat. if min_value is not None: min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of)) if max_value is not None: max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of)) return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) @resolves(pydantic.ConstrainedDate) def resolve_condate(cls): # type: ignore[no-untyped-def] if cls.ge is not None: assert cls.gt is None, 'Set `gt` or `ge`, but not both' min_value = cls.ge elif cls.gt is not None: min_value = cls.gt + datetime.timedelta(days=1) else: min_value = datetime.date.min if cls.le is not None: assert cls.lt is None, 'Set `lt` or `le`, but not both' max_value = cls.le elif cls.lt is not None: max_value = cls.lt - datetime.timedelta(days=1) else: max_value = datetime.date.max return st.dates(min_value, max_value) @resolves(pydantic.ConstrainedStr) def resolve_constr(cls): # type: ignore[no-untyped-def] # pragma: no cover min_size = cls.min_length or 0 max_size = cls.max_length if cls.regex is None and not cls.strip_whitespace: return st.text(min_size=min_size, max_size=max_size) if cls.regex is not None: strategy = st.from_regex(cls.regex) if cls.strip_whitespace: strategy = strategy.filter(lambda s: s == s.strip()) elif cls.strip_whitespace: repeats = '{{{},{}}}'.format( min_size - 2 if min_size > 2 else 0, max_size - 2 if (max_size or 0) > 2 else '', ) if min_size >= 2: strategy = st.from_regex(rf'\W.{repeats}\W') elif min_size == 1: strategy = st.from_regex(rf'\W(.{repeats}\W)?') else: assert min_size == 0 strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?') if min_size == 0 and max_size is None: return strategy elif max_size is None: return strategy.filter(lambda s: min_size <= len(s)) return strategy.filter(lambda s: min_size <= len(s) <= max_size) # Finally, register all previously-defined types, and patch in our new function for typ in list(pydantic.types._DEFINED_TYPES): _registered(typ) pydantic.types._registered = _registered st.register_type_strategy(pydantic.Json, resolve_json) pydantic-1.10.14/pydantic/annotated_types.py000066400000000000000000000060641455251250200210660ustar00rootroot00000000000000import sys from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type from .fields import Required from .main import BaseModel, create_model from .typing import is_typeddict, is_typeddict_special if TYPE_CHECKING: from typing_extensions import TypedDict if sys.version_info < (3, 11): def is_legacy_typeddict(typeddict_cls: Type['TypedDict']) -> bool: # type: ignore[valid-type] return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == 'typing' else: def is_legacy_typeddict(_: Any) -> Any: return False def create_model_from_typeddict( # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030 typeddict_cls: Type['TypedDict'], # type: ignore[valid-type] **kwargs: Any, ) -> Type['BaseModel']: """ Create a `BaseModel` based on the fields of a `TypedDict`. Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys, we raise an error if this happens (see https://bugs.python.org/issue38834). """ field_definitions: Dict[str, Any] # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions` if not hasattr(typeddict_cls, '__required_keys__'): raise TypeError( 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. ' 'Without it, there is no way to differentiate required and optional fields when subclassed.' ) if is_legacy_typeddict(typeddict_cls) and any( is_typeddict_special(t) for t in typeddict_cls.__annotations__.values() ): raise TypeError( 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. ' 'Without it, there is no way to reflect Required/NotRequired keys.' ) required_keys: FrozenSet[str] = typeddict_cls.__required_keys__ # type: ignore[attr-defined] field_definitions = { field_name: (field_type, Required if field_name in required_keys else None) for field_name, field_type in typeddict_cls.__annotations__.items() } return create_model(typeddict_cls.__name__, **kwargs, **field_definitions) def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']: """ Create a `BaseModel` based on the fields of a named tuple. A named tuple can be created with `typing.NamedTuple` and declared annotations but also with `collections.namedtuple`, in this case we consider all fields to have type `Any`. """ # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or { k: Any for k in namedtuple_cls._fields } field_definitions: Dict[str, Any] = { field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items() } return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions) pydantic-1.10.14/pydantic/class_validators.py000066400000000000000000000344031455251250200212200ustar00rootroot00000000000000import warnings from collections import ChainMap from functools import partial, partialmethod, wraps from itertools import chain from types import FunctionType from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload from .errors import ConfigError from .typing import AnyCallable from .utils import ROOT_KEY, in_ipython if TYPE_CHECKING: from .typing import AnyClassMethod class Validator: __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure' def __init__( self, func: AnyCallable, pre: bool = False, each_item: bool = False, always: bool = False, check_fields: bool = False, skip_on_failure: bool = False, ): self.func = func self.pre = pre self.each_item = each_item self.always = always self.check_fields = check_fields self.skip_on_failure = skip_on_failure if TYPE_CHECKING: from inspect import Signature from .config import BaseConfig from .fields import ModelField from .types import ModelOrDc ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] ValidatorsList = List[ValidatorCallable] ValidatorListDict = Dict[str, List[Validator]] _FUNCS: Set[str] = set() VALIDATOR_CONFIG_KEY = '__validator_config__' ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' def validator( *fields: str, pre: bool = False, each_item: bool = False, always: bool = False, check_fields: bool = True, whole: Optional[bool] = None, allow_reuse: bool = False, ) -> Callable[[AnyCallable], 'AnyClassMethod']: """ Decorate methods on the class indicating that they should be used to validate fields :param fields: which field(s) the method should be called on :param pre: whether or not this validator should be called before the standard validators (else after) :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the whole object :param always: whether this method and other validators should be called even if the value is missing :param check_fields: whether to check that the fields actually exist on the model :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function """ if not fields: raise ConfigError('validator with no fields specified') elif isinstance(fields[0], FunctionType): raise ConfigError( "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 "E.g. usage should be `@validator('', ...)`" ) elif not all(isinstance(field, str) for field in fields): raise ConfigError( "validator fields should be passed as separate string args. " # noqa: Q000 "E.g. usage should be `@validator('', '', ...)`" ) if whole is not None: warnings.warn( 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', DeprecationWarning, ) assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' each_item = not whole def dec(f: AnyCallable) -> 'AnyClassMethod': f_cls = _prepare_validator(f, allow_reuse) setattr( f_cls, VALIDATOR_CONFIG_KEY, ( fields, Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), ), ) return f_cls return dec @overload def root_validator(_func: AnyCallable) -> 'AnyClassMethod': ... @overload def root_validator( *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False ) -> Callable[[AnyCallable], 'AnyClassMethod']: ... def root_validator( _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False ) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]: """ Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either before or after standard model parsing/validation is performed. """ if _func: f_cls = _prepare_validator(_func, allow_reuse) setattr( f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) ) return f_cls def dec(f: AnyCallable) -> 'AnyClassMethod': f_cls = _prepare_validator(f, allow_reuse) setattr( f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) ) return f_cls return dec def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod': """ Avoid validators with duplicated names since without this, validators can be overwritten silently which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. """ f_cls = function if isinstance(function, classmethod) else classmethod(function) if not in_ipython() and not allow_reuse: ref = ( getattr(f_cls.__func__, '__module__', '') + '.' + getattr(f_cls.__func__, '__qualname__', f'') ) if ref in _FUNCS: raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') _FUNCS.add(ref) return f_cls class ValidatorGroup: def __init__(self, validators: 'ValidatorListDict') -> None: self.validators = validators self.used_validators = {'*'} def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: self.used_validators.add(name) validators = self.validators.get(name, []) if name != ROOT_KEY: validators += self.validators.get('*', []) if validators: return {getattr(v.func, '__name__', f''): v for v in validators} else: return None def check_for_unused(self) -> None: unused_validators = set( chain.from_iterable( ( getattr(v.func, '__name__', f'') for v in self.validators[f] if v.check_fields ) for f in (self.validators.keys() - self.used_validators) ) ) if unused_validators: fn = ', '.join(unused_validators) raise ConfigError( f"Validators defined with incorrect fields: {fn} " # noqa: Q000 f"(use check_fields=False if you're inheriting from the model and intended this)" ) def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: validators: Dict[str, List[Validator]] = {} for var_name, value in namespace.items(): validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) if validator_config: fields, v = validator_config for field in fields: if field in validators: validators[field].append(v) else: validators[field] = [v] return validators def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: from inspect import signature pre_validators: List[AnyCallable] = [] post_validators: List[Tuple[bool, AnyCallable]] = [] for name, value in namespace.items(): validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) if validator_config: sig = signature(validator_config.func) args = list(sig.parameters.keys()) if args[0] == 'self': raise ConfigError( f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' f'should be: (cls, values).' ) if len(args) != 2: raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') # check function signature if validator_config.pre: pre_validators.append(validator_config.func) else: post_validators.append((validator_config.skip_on_failure, validator_config.func)) return pre_validators, post_validators def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': for field, field_validators in base_validators.items(): if field not in validators: validators[field] = [] validators[field] += field_validators return validators def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': """ Make a generic function which calls a validator with the right arguments. Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, hence this laborious way of doing things. It's done like this so validators don't all need **kwargs in their signature, eg. any combination of the arguments "values", "fields" and/or "config" are permitted. """ from inspect import signature if not isinstance(validator, (partial, partialmethod)): # This should be the default case, so overhead is reduced sig = signature(validator) args = list(sig.parameters.keys()) else: # Fix the generated argument lists of partial methods sig = signature(validator.func) args = [ k for k in signature(validator.func).parameters.keys() if k not in validator.args | validator.keywords.keys() ] first_arg = args.pop(0) if first_arg == 'self': raise ConfigError( f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' ) elif first_arg == 'cls': # assume the second argument is value return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) else: # assume the first argument was value which has already been removed return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': return [make_generic_validator(f) for f in v_funcs if f] all_kwargs = {'values', 'field', 'config'} def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': # assume the first argument is value has_kwargs = False if 'kwargs' in args: has_kwargs = True args -= {'kwargs'} if not args.issubset(all_kwargs): raise ConfigError( f'Invalid signature for validator {validator}: {sig}, should be: ' f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' ) if has_kwargs: return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) elif args == set(): return lambda cls, v, values, field, config: validator(cls, v) elif args == {'values'}: return lambda cls, v, values, field, config: validator(cls, v, values=values) elif args == {'field'}: return lambda cls, v, values, field, config: validator(cls, v, field=field) elif args == {'config'}: return lambda cls, v, values, field, config: validator(cls, v, config=config) elif args == {'values', 'field'}: return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) elif args == {'values', 'config'}: return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) elif args == {'field', 'config'}: return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) else: # args == {'values', 'field', 'config'} return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': has_kwargs = False if 'kwargs' in args: has_kwargs = True args -= {'kwargs'} if not args.issubset(all_kwargs): raise ConfigError( f'Invalid signature for validator {validator}: {sig}, should be: ' f'(value, values, config, field), "values", "config" and "field" are all optional.' ) if has_kwargs: return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) elif args == set(): return lambda cls, v, values, field, config: validator(v) elif args == {'values'}: return lambda cls, v, values, field, config: validator(v, values=values) elif args == {'field'}: return lambda cls, v, values, field, config: validator(v, field=field) elif args == {'config'}: return lambda cls, v, values, field, config: validator(v, config=config) elif args == {'values', 'field'}: return lambda cls, v, values, field, config: validator(v, values=values, field=field) elif args == {'values', 'config'}: return lambda cls, v, values, field, config: validator(v, values=values, config=config) elif args == {'field', 'config'}: return lambda cls, v, values, field, config: validator(v, field=field, config=config) else: # args == {'values', 'field', 'config'} return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']: all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] return { k: v for k, v in all_attributes.items() if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) } pydantic-1.10.14/pydantic/color.py000066400000000000000000000406531455251250200170050ustar00rootroot00000000000000""" Color definitions are used as per CSS3 specification: http://www.w3.org/TR/css3-color/#svg-color A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. In these cases the LAST color when sorted alphabetically takes preferences, eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". """ import math import re from colorsys import hls_to_rgb, rgb_to_hls from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast from .errors import ColorError from .utils import Representation, almost_equal_floats if TYPE_CHECKING: from .typing import CallableGenerator, ReprArgs ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] ColorType = Union[ColorTuple, str] HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] class RGBA: """ Internal use only as a representation of a color. """ __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): self.r = r self.g = g self.b = b self.alpha = alpha self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) def __getitem__(self, item: Any) -> Any: return self._tuple[item] # these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' _r_255 = r'(\d{1,3}(?:\.\d+)?)' _r_comma = r'\s*,\s*' r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*' _r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*' _r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' _r_sl = r'(\d{1,3}(?:\.\d+)?)%' r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*' r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*' # colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} rads = 2 * math.pi class Color(Representation): __slots__ = '_original', '_rgba' def __init__(self, value: ColorType) -> None: self._rgba: RGBA self._original: ColorType if isinstance(value, (tuple, list)): self._rgba = parse_tuple(value) elif isinstance(value, str): self._rgba = parse_str(value) elif isinstance(value, Color): self._rgba = value._rgba value = value._original else: raise ColorError(reason='value must be a tuple, list or string') # if we've got here value must be a valid color self._original = value @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='color') def original(self) -> ColorType: """ Original value passed to Color """ return self._original def as_named(self, *, fallback: bool = False) -> str: if self._rgba.alpha is None: rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) try: return COLORS_BY_VALUE[rgb] except KeyError as e: if fallback: return self.as_hex() else: raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e else: return self.as_hex() def as_hex(self) -> str: """ Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string a "short" representation of the color is possible and whether there's an alpha channel. """ values = [float_to_255(c) for c in self._rgba[:3]] if self._rgba.alpha is not None: values.append(float_to_255(self._rgba.alpha)) as_hex = ''.join(f'{v:02x}' for v in values) if all(c in repeat_colors for c in values): as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) return '#' + as_hex def as_rgb(self) -> str: """ Color as an rgb(, , ) or rgba(, , , ) string. """ if self._rgba.alpha is None: return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' else: return ( f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' f'{round(self._alpha_float(), 2)})' ) def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: """ Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is in the range 0 to 1. :param alpha: whether to include the alpha channel, options are None - (default) include alpha only if it's set (e.g. not None) True - always include alpha, False - always omit alpha, """ r, g, b = (float_to_255(c) for c in self._rgba[:3]) if alpha is None: if self._rgba.alpha is None: return r, g, b else: return r, g, b, self._alpha_float() elif alpha: return r, g, b, self._alpha_float() else: # alpha is False return r, g, b def as_hsl(self) -> str: """ Color as an hsl(, , ) or hsl(, , , ) string. """ if self._rgba.alpha is None: h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' else: h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: """ Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in the range 0 to 1. NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. :param alpha: whether to include the alpha channel, options are None - (default) include alpha only if it's set (e.g. not None) True - always include alpha, False - always omit alpha, """ h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) if alpha is None: if self._rgba.alpha is None: return h, s, l else: return h, s, l, self._alpha_float() if alpha: return h, s, l, self._alpha_float() else: # alpha is False return h, s, l def _alpha_float(self) -> float: return 1 if self._rgba.alpha is None else self._rgba.alpha @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls def __str__(self) -> str: return self.as_named(fallback=True) def __repr_args__(self) -> 'ReprArgs': return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore def __eq__(self, other: Any) -> bool: return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() def __hash__(self) -> int: return hash(self.as_rgb_tuple()) def parse_tuple(value: Tuple[Any, ...]) -> RGBA: """ Parse a tuple or list as a color. """ if len(value) == 3: r, g, b = (parse_color_value(v) for v in value) return RGBA(r, g, b, None) elif len(value) == 4: r, g, b = (parse_color_value(v) for v in value[:3]) return RGBA(r, g, b, parse_float_alpha(value[3])) else: raise ColorError(reason='tuples must have length 3 or 4') def parse_str(value: str) -> RGBA: """ Parse a string to an RGBA tuple, trying the following formats (in this order): * named color, see COLORS_BY_NAME below * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) * `rgb(, , ) ` * `rgba(, , , )` """ value_lower = value.lower() try: r, g, b = COLORS_BY_NAME[value_lower] except KeyError: pass else: return ints_to_rgba(r, g, b, None) m = re.fullmatch(r_hex_short, value_lower) if m: *rgb, a = m.groups() r, g, b = (int(v * 2, 16) for v in rgb) if a: alpha: Optional[float] = int(a * 2, 16) / 255 else: alpha = None return ints_to_rgba(r, g, b, alpha) m = re.fullmatch(r_hex_long, value_lower) if m: *rgb, a = m.groups() r, g, b = (int(v, 16) for v in rgb) if a: alpha = int(a, 16) / 255 else: alpha = None return ints_to_rgba(r, g, b, alpha) m = re.fullmatch(r_rgb, value_lower) if m: return ints_to_rgba(*m.groups(), None) # type: ignore m = re.fullmatch(r_rgba, value_lower) if m: return ints_to_rgba(*m.groups()) # type: ignore m = re.fullmatch(r_hsl, value_lower) if m: h, h_units, s, l_ = m.groups() return parse_hsl(h, h_units, s, l_) m = re.fullmatch(r_hsla, value_lower) if m: h, h_units, s, l_, a = m.groups() return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) raise ColorError(reason='string not recognised as a valid color') def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: """ Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number in the range 0 to 1 """ try: color = float(value) except ValueError: raise ColorError(reason='color values must be a valid number') if 0 <= color <= max_val: return color / max_val else: raise ColorError(reason=f'color values must be in the range 0 to {max_val}') def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: """ Parse a value checking it's a valid float in the range 0 to 1 """ if value is None: return None try: if isinstance(value, str) and value.endswith('%'): alpha = float(value[:-1]) / 100 else: alpha = float(value) except ValueError: raise ColorError(reason='alpha values must be a valid float') if almost_equal_floats(alpha, 1): return None elif 0 <= alpha <= 1: return alpha else: raise ColorError(reason='alpha values must be in the range 0 to 1') def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: """ Parse raw hue, saturation, lightness and alpha values and convert to RGBA. """ s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) h_value = float(h) if h_units in {None, 'deg'}: h_value = h_value % 360 / 360 elif h_units == 'rad': h_value = h_value % rads / rads else: # turns h_value = h_value % 1 r, g, b = hls_to_rgb(h_value, l_value, s_value) return RGBA(r, g, b, alpha) def float_to_255(c: float) -> int: return int(round(c * 255)) COLORS_BY_NAME = { 'aliceblue': (240, 248, 255), 'antiquewhite': (250, 235, 215), 'aqua': (0, 255, 255), 'aquamarine': (127, 255, 212), 'azure': (240, 255, 255), 'beige': (245, 245, 220), 'bisque': (255, 228, 196), 'black': (0, 0, 0), 'blanchedalmond': (255, 235, 205), 'blue': (0, 0, 255), 'blueviolet': (138, 43, 226), 'brown': (165, 42, 42), 'burlywood': (222, 184, 135), 'cadetblue': (95, 158, 160), 'chartreuse': (127, 255, 0), 'chocolate': (210, 105, 30), 'coral': (255, 127, 80), 'cornflowerblue': (100, 149, 237), 'cornsilk': (255, 248, 220), 'crimson': (220, 20, 60), 'cyan': (0, 255, 255), 'darkblue': (0, 0, 139), 'darkcyan': (0, 139, 139), 'darkgoldenrod': (184, 134, 11), 'darkgray': (169, 169, 169), 'darkgreen': (0, 100, 0), 'darkgrey': (169, 169, 169), 'darkkhaki': (189, 183, 107), 'darkmagenta': (139, 0, 139), 'darkolivegreen': (85, 107, 47), 'darkorange': (255, 140, 0), 'darkorchid': (153, 50, 204), 'darkred': (139, 0, 0), 'darksalmon': (233, 150, 122), 'darkseagreen': (143, 188, 143), 'darkslateblue': (72, 61, 139), 'darkslategray': (47, 79, 79), 'darkslategrey': (47, 79, 79), 'darkturquoise': (0, 206, 209), 'darkviolet': (148, 0, 211), 'deeppink': (255, 20, 147), 'deepskyblue': (0, 191, 255), 'dimgray': (105, 105, 105), 'dimgrey': (105, 105, 105), 'dodgerblue': (30, 144, 255), 'firebrick': (178, 34, 34), 'floralwhite': (255, 250, 240), 'forestgreen': (34, 139, 34), 'fuchsia': (255, 0, 255), 'gainsboro': (220, 220, 220), 'ghostwhite': (248, 248, 255), 'gold': (255, 215, 0), 'goldenrod': (218, 165, 32), 'gray': (128, 128, 128), 'green': (0, 128, 0), 'greenyellow': (173, 255, 47), 'grey': (128, 128, 128), 'honeydew': (240, 255, 240), 'hotpink': (255, 105, 180), 'indianred': (205, 92, 92), 'indigo': (75, 0, 130), 'ivory': (255, 255, 240), 'khaki': (240, 230, 140), 'lavender': (230, 230, 250), 'lavenderblush': (255, 240, 245), 'lawngreen': (124, 252, 0), 'lemonchiffon': (255, 250, 205), 'lightblue': (173, 216, 230), 'lightcoral': (240, 128, 128), 'lightcyan': (224, 255, 255), 'lightgoldenrodyellow': (250, 250, 210), 'lightgray': (211, 211, 211), 'lightgreen': (144, 238, 144), 'lightgrey': (211, 211, 211), 'lightpink': (255, 182, 193), 'lightsalmon': (255, 160, 122), 'lightseagreen': (32, 178, 170), 'lightskyblue': (135, 206, 250), 'lightslategray': (119, 136, 153), 'lightslategrey': (119, 136, 153), 'lightsteelblue': (176, 196, 222), 'lightyellow': (255, 255, 224), 'lime': (0, 255, 0), 'limegreen': (50, 205, 50), 'linen': (250, 240, 230), 'magenta': (255, 0, 255), 'maroon': (128, 0, 0), 'mediumaquamarine': (102, 205, 170), 'mediumblue': (0, 0, 205), 'mediumorchid': (186, 85, 211), 'mediumpurple': (147, 112, 219), 'mediumseagreen': (60, 179, 113), 'mediumslateblue': (123, 104, 238), 'mediumspringgreen': (0, 250, 154), 'mediumturquoise': (72, 209, 204), 'mediumvioletred': (199, 21, 133), 'midnightblue': (25, 25, 112), 'mintcream': (245, 255, 250), 'mistyrose': (255, 228, 225), 'moccasin': (255, 228, 181), 'navajowhite': (255, 222, 173), 'navy': (0, 0, 128), 'oldlace': (253, 245, 230), 'olive': (128, 128, 0), 'olivedrab': (107, 142, 35), 'orange': (255, 165, 0), 'orangered': (255, 69, 0), 'orchid': (218, 112, 214), 'palegoldenrod': (238, 232, 170), 'palegreen': (152, 251, 152), 'paleturquoise': (175, 238, 238), 'palevioletred': (219, 112, 147), 'papayawhip': (255, 239, 213), 'peachpuff': (255, 218, 185), 'peru': (205, 133, 63), 'pink': (255, 192, 203), 'plum': (221, 160, 221), 'powderblue': (176, 224, 230), 'purple': (128, 0, 128), 'red': (255, 0, 0), 'rosybrown': (188, 143, 143), 'royalblue': (65, 105, 225), 'saddlebrown': (139, 69, 19), 'salmon': (250, 128, 114), 'sandybrown': (244, 164, 96), 'seagreen': (46, 139, 87), 'seashell': (255, 245, 238), 'sienna': (160, 82, 45), 'silver': (192, 192, 192), 'skyblue': (135, 206, 235), 'slateblue': (106, 90, 205), 'slategray': (112, 128, 144), 'slategrey': (112, 128, 144), 'snow': (255, 250, 250), 'springgreen': (0, 255, 127), 'steelblue': (70, 130, 180), 'tan': (210, 180, 140), 'teal': (0, 128, 128), 'thistle': (216, 191, 216), 'tomato': (255, 99, 71), 'turquoise': (64, 224, 208), 'violet': (238, 130, 238), 'wheat': (245, 222, 179), 'white': (255, 255, 255), 'whitesmoke': (245, 245, 245), 'yellow': (255, 255, 0), 'yellowgreen': (154, 205, 50), } COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} pydantic-1.10.14/pydantic/config.py000066400000000000000000000145151455251250200171320ustar00rootroot00000000000000import json from enum import Enum from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union from typing_extensions import Literal, Protocol from .typing import AnyArgTCallable, AnyCallable from .utils import GetterDict from .version import compiled if TYPE_CHECKING: from typing import overload from .fields import ModelField from .main import BaseModel ConfigType = Type['BaseConfig'] class SchemaExtraCallable(Protocol): @overload def __call__(self, schema: Dict[str, Any]) -> None: pass @overload def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: pass else: SchemaExtraCallable = Callable[..., None] __all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config' class Extra(str, Enum): allow = 'allow' ignore = 'ignore' forbid = 'forbid' # https://github.com/cython/cython/issues/4003 # Fixed in Cython 3 and Pydantic v1 won't support Cython 3. # Pydantic v2 doesn't depend on Cython at all. if not compiled: from typing_extensions import TypedDict class ConfigDict(TypedDict, total=False): title: Optional[str] anystr_lower: bool anystr_strip_whitespace: bool min_anystr_length: int max_anystr_length: Optional[int] validate_all: bool extra: Extra allow_mutation: bool frozen: bool allow_population_by_field_name: bool use_enum_values: bool fields: Dict[str, Union[str, Dict[str, str]]] validate_assignment: bool error_msg_templates: Dict[str, str] arbitrary_types_allowed: bool orm_mode: bool getter_dict: Type[GetterDict] alias_generator: Optional[Callable[[str], str]] keep_untouched: Tuple[type, ...] schema_extra: Union[Dict[str, object], 'SchemaExtraCallable'] json_loads: Callable[[str], object] json_dumps: AnyArgTCallable[str] json_encoders: Dict[Type[object], AnyCallable] underscore_attrs_are_private: bool allow_inf_nan: bool copy_on_model_validation: Literal['none', 'deep', 'shallow'] # whether dataclass `__post_init__` should be run after validation post_init_call: Literal['before_validation', 'after_validation'] else: ConfigDict = dict # type: ignore class BaseConfig: title: Optional[str] = None anystr_lower: bool = False anystr_upper: bool = False anystr_strip_whitespace: bool = False min_anystr_length: int = 0 max_anystr_length: Optional[int] = None validate_all: bool = False extra: Extra = Extra.ignore allow_mutation: bool = True frozen: bool = False allow_population_by_field_name: bool = False use_enum_values: bool = False fields: Dict[str, Union[str, Dict[str, str]]] = {} validate_assignment: bool = False error_msg_templates: Dict[str, str] = {} arbitrary_types_allowed: bool = False orm_mode: bool = False getter_dict: Type[GetterDict] = GetterDict alias_generator: Optional[Callable[[str], str]] = None keep_untouched: Tuple[type, ...] = () schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {} json_loads: Callable[[str], Any] = json.loads json_dumps: Callable[..., str] = json.dumps json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} underscore_attrs_are_private: bool = False allow_inf_nan: bool = True # whether inherited models as fields should be reconstructed as base model, # and whether such a copy should be shallow or deep copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow' # whether `Union` should check all allowed types before even trying to coerce smart_union: bool = False # whether dataclass `__post_init__` should be run before or after validation post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation' @classmethod def get_field_info(cls, name: str) -> Dict[str, Any]: """ Get properties of FieldInfo from the `fields` property of the config class. """ fields_value = cls.fields.get(name) if isinstance(fields_value, str): field_info: Dict[str, Any] = {'alias': fields_value} elif isinstance(fields_value, dict): field_info = fields_value else: field_info = {} if 'alias' in field_info: field_info.setdefault('alias_priority', 2) if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator: alias = cls.alias_generator(name) if not isinstance(alias, str): raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}') field_info.update(alias=alias, alias_priority=1) return field_info @classmethod def prepare_field(cls, field: 'ModelField') -> None: """ Optional hook to check or modify fields during model creation. """ pass def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: if config is None: return BaseConfig else: config_dict = ( config if isinstance(config, dict) else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} ) class Config(BaseConfig): ... for k, v in config_dict.items(): setattr(Config, k, v) return Config def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType': if not self_config: base_classes: Tuple['ConfigType', ...] = (parent_config,) elif self_config == parent_config: base_classes = (self_config,) else: base_classes = self_config, parent_config namespace['json_encoders'] = { **getattr(parent_config, 'json_encoders', {}), **getattr(self_config, 'json_encoders', {}), **namespace.get('json_encoders', {}), } return type('Config', base_classes, namespace) def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: if not isinstance(config.extra, Extra): try: config.extra = Extra(config.extra) except ValueError: raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') pydantic-1.10.14/pydantic/dataclasses.py000066400000000000000000000432311455251250200201510ustar00rootroot00000000000000""" The main purpose is to enhance stdlib dataclasses by adding validation A pydantic dataclass can be generated from scratch or from a stdlib one. Behind the scene, a pydantic dataclass is just like a regular one on which we attach a `BaseModel` and magic methods to trigger the validation of the data. `__init__` and `__post_init__` are hence overridden and have extra logic to be able to validate input data. When a pydantic dataclass is generated from scratch, it's just a plain dataclass with validation triggered at initialization The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. ```py @dataclasses.dataclass class M: x: int ValidatedM = pydantic.dataclasses.dataclass(M) ``` We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! ```py assert isinstance(ValidatedM(x=1), M) assert ValidatedM(x=1) == M(x=1) ``` This means we **don't want to create a new dataclass that inherits from it** The trick is to create a wrapper around `M` that will act as a proxy to trigger validation without altering default `M` behaviour. """ import copy import dataclasses import sys from contextlib import contextmanager from functools import wraps try: from functools import cached_property except ImportError: # cached_property available only for python3.8+ pass from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload from typing_extensions import dataclass_transform from .class_validators import gather_all_validators from .config import BaseConfig, ConfigDict, Extra, get_config from .error_wrappers import ValidationError from .errors import DataclassTypeError from .fields import Field, FieldInfo, Required, Undefined from .main import create_model, validate_model from .utils import ClassAttribute if TYPE_CHECKING: from .main import BaseModel from .typing import CallableGenerator, NoArgAnyCallable DataclassT = TypeVar('DataclassT', bound='Dataclass') DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy'] class Dataclass: # stdlib attributes __dataclass_fields__: ClassVar[Dict[str, Any]] __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` __post_init__: ClassVar[Callable[..., None]] # Added by pydantic __pydantic_run_validation__: ClassVar[bool] __post_init_post_parse__: ClassVar[Callable[..., None]] __pydantic_initialised__: ClassVar[bool] __pydantic_model__: ClassVar[Type[BaseModel]] __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]] __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value def __init__(self, *args: object, **kwargs: object) -> None: pass @classmethod def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator': pass @classmethod def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': pass __all__ = [ 'dataclass', 'set_validation', 'create_pydantic_model_from_dataclass', 'is_builtin_dataclass', 'make_dataclass_validator', ] _T = TypeVar('_T') if sys.version_info >= (3, 10): @dataclass_transform(field_specifiers=(dataclasses.field, Field)) @overload def dataclass( *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Union[ConfigDict, Type[object], None] = None, validate_on_init: Optional[bool] = None, use_proxy: Optional[bool] = None, kw_only: bool = ..., ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: ... @dataclass_transform(field_specifiers=(dataclasses.field, Field)) @overload def dataclass( _cls: Type[_T], *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Union[ConfigDict, Type[object], None] = None, validate_on_init: Optional[bool] = None, use_proxy: Optional[bool] = None, kw_only: bool = ..., ) -> 'DataclassClassOrWrapper': ... else: @dataclass_transform(field_specifiers=(dataclasses.field, Field)) @overload def dataclass( *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Union[ConfigDict, Type[object], None] = None, validate_on_init: Optional[bool] = None, use_proxy: Optional[bool] = None, ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: ... @dataclass_transform(field_specifiers=(dataclasses.field, Field)) @overload def dataclass( _cls: Type[_T], *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Union[ConfigDict, Type[object], None] = None, validate_on_init: Optional[bool] = None, use_proxy: Optional[bool] = None, ) -> 'DataclassClassOrWrapper': ... @dataclass_transform(field_specifiers=(dataclasses.field, Field)) def dataclass( _cls: Optional[Type[_T]] = None, *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Union[ConfigDict, Type[object], None] = None, validate_on_init: Optional[bool] = None, use_proxy: Optional[bool] = None, kw_only: bool = False, ) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']: """ Like the python standard lib dataclasses but with type validation. The result is either a pydantic dataclass that will validate input data or a wrapper that will trigger validation around a stdlib dataclass to avoid modifying it directly """ the_config = get_config(config) def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper': should_use_proxy = ( use_proxy if use_proxy is not None else ( is_builtin_dataclass(cls) and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0]))) ) ) if should_use_proxy: dc_cls_doc = '' dc_cls = DataclassProxy(cls) default_validate_on_init = False else: dc_cls_doc = cls.__doc__ or '' # needs to be done before generating dataclass if sys.version_info >= (3, 10): dc_cls = dataclasses.dataclass( cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen, kw_only=kw_only, ) else: dc_cls = dataclasses.dataclass( # type: ignore cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) default_validate_on_init = True should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) return dc_cls if _cls is None: return wrap return wrap(_cls) @contextmanager def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]: original_run_validation = cls.__pydantic_run_validation__ try: cls.__pydantic_run_validation__ = value yield cls finally: cls.__pydantic_run_validation__ = original_run_validation class DataclassProxy: __slots__ = '__dataclass__' def __init__(self, dc_cls: Type['Dataclass']) -> None: object.__setattr__(self, '__dataclass__', dc_cls) def __call__(self, *args: Any, **kwargs: Any) -> Any: with set_validation(self.__dataclass__, True): return self.__dataclass__(*args, **kwargs) def __getattr__(self, name: str) -> Any: return getattr(self.__dataclass__, name) def __setattr__(self, __name: str, __value: Any) -> None: return setattr(self.__dataclass__, __name, __value) def __instancecheck__(self, instance: Any) -> bool: return isinstance(instance, self.__dataclass__) def __copy__(self) -> 'DataclassProxy': return DataclassProxy(copy.copy(self.__dataclass__)) def __deepcopy__(self, memo: Any) -> 'DataclassProxy': return DataclassProxy(copy.deepcopy(self.__dataclass__, memo)) def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) dc_cls: Type['Dataclass'], config: Type[BaseConfig], validate_on_init: bool, dc_cls_doc: str, ) -> None: """ We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass it won't even exist (code is generated on the fly by `dataclasses`) By default, we run validation after `__init__` or `__post_init__` if defined """ init = dc_cls.__init__ @wraps(init) def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: if config.extra == Extra.ignore: init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) elif config.extra == Extra.allow: for k, v in kwargs.items(): self.__dict__.setdefault(k, v) init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) else: init(self, *args, **kwargs) if hasattr(dc_cls, '__post_init__'): try: post_init = dc_cls.__post_init__.__wrapped__ # type: ignore[attr-defined] except AttributeError: post_init = dc_cls.__post_init__ @wraps(post_init) def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: if config.post_init_call == 'before_validation': post_init(self, *args, **kwargs) if self.__class__.__pydantic_run_validation__: self.__pydantic_validate_values__() if hasattr(self, '__post_init_post_parse__'): self.__post_init_post_parse__(*args, **kwargs) if config.post_init_call == 'after_validation': post_init(self, *args, **kwargs) setattr(dc_cls, '__init__', handle_extra_init) setattr(dc_cls, '__post_init__', new_post_init) else: @wraps(init) def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: handle_extra_init(self, *args, **kwargs) if self.__class__.__pydantic_run_validation__: self.__pydantic_validate_values__() if hasattr(self, '__post_init_post_parse__'): # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of # public method `dataclasses.fields` # get all initvars and their default values initvars_and_values: Dict[str, Any] = {} for i, f in enumerate(self.__class__.__dataclass_fields__.values()): if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] try: # set arg value by default initvars_and_values[f.name] = args[i] except IndexError: initvars_and_values[f.name] = kwargs.get(f.name, f.default) self.__post_init_post_parse__(**initvars_and_values) setattr(dc_cls, '__init__', new_init) setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init)) setattr(dc_cls, '__pydantic_initialised__', False) setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values) setattr(dc_cls, '__validate__', classmethod(_validate_dataclass)) setattr(dc_cls, '__get_validators__', classmethod(_get_validators)) if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr) def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator': yield cls.__validate__ def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': with set_validation(cls, True): if isinstance(v, cls): v.__pydantic_validate_values__() return v elif isinstance(v, (list, tuple)): return cls(*v) elif isinstance(v, dict): return cls(**v) else: raise DataclassTypeError(class_name=cls.__name__) def create_pydantic_model_from_dataclass( dc_cls: Type['Dataclass'], config: Type[Any] = BaseConfig, dc_cls_doc: Optional[str] = None, ) -> Type['BaseModel']: field_definitions: Dict[str, Any] = {} for field in dataclasses.fields(dc_cls): default: Any = Undefined default_factory: Optional['NoArgAnyCallable'] = None field_info: FieldInfo if field.default is not dataclasses.MISSING: default = field.default elif field.default_factory is not dataclasses.MISSING: default_factory = field.default_factory else: default = Required if isinstance(default, FieldInfo): field_info = default dc_cls.__pydantic_has_field_info_default__ = True else: field_info = Field(default=default, default_factory=default_factory, **field.metadata) field_definitions[field.name] = (field.type, field_info) validators = gather_all_validators(dc_cls) model: Type['BaseModel'] = create_model( dc_cls.__name__, __config__=config, __module__=dc_cls.__module__, __validators__=validators, __cls_kwargs__={'__resolve_forward_refs__': False}, **field_definitions, ) model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or '' return model if sys.version_info >= (3, 8): def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: return isinstance(getattr(type(obj), k, None), cached_property) else: def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: return False def _dataclass_validate_values(self: 'Dataclass') -> None: # validation errors can occur if this function is called twice on an already initialised dataclass. # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property if getattr(self, '__pydantic_initialised__'): return if getattr(self, '__pydantic_has_field_info_default__', False): # We need to remove `FieldInfo` values since they are not valid as input # It's ok to do that because they are obviously the default values! input_data = { k: v for k, v in self.__dict__.items() if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k)) } else: input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)} d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) if validation_error: raise validation_error self.__dict__.update(d) object.__setattr__(self, '__pydantic_initialised__', True) def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None: if self.__pydantic_initialised__: d = dict(self.__dict__) d.pop(name, None) known_field = self.__pydantic_model__.__fields__.get(name, None) if known_field: value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) if error_: raise ValidationError([error_], self.__class__) object.__setattr__(self, name, value) def is_builtin_dataclass(_cls: Type[Any]) -> bool: """ Whether a class is a stdlib dataclass (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) we check that - `_cls` is a dataclass - `_cls` is not a processed pydantic dataclass (with a basemodel attached) - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass e.g. ``` @dataclasses.dataclass class A: x: int @pydantic.dataclasses.dataclass class B(A): y: int ``` In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') """ return ( dataclasses.is_dataclass(_cls) and not hasattr(_cls, '__pydantic_model__') and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) ) def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator': """ Create a pydantic.dataclass from a builtin dataclass to add type validation and yield the validators It retrieves the parameters of the dataclass and forwards them to the newly created dataclass """ yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True)) pydantic-1.10.14/pydantic/datetime_parse.py000066400000000000000000000170421455251250200206510ustar00rootroot00000000000000""" Functions to parse datetime objects. We're using regular expressions rather than time.strptime because: - They provide both validation and parsing. - They're more flexible for datetimes. - The date/datetime/time constructors produce friendlier error messages. Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at 9718fa2e8abe430c3526a9278dd976443d4ae3c6 Changed to: * use standard python datetime types not django.utils.timezone * raise ValueError when regex doesn't match rather than returning None * support parsing unix timestamps for dates and datetimes """ import re from datetime import date, datetime, time, timedelta, timezone from typing import Dict, Optional, Type, Union from . import errors date_expr = r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' time_expr = ( r'(?P\d{1,2}):(?P\d{1,2})' r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' ) date_re = re.compile(f'{date_expr}$') time_re = re.compile(time_expr) datetime_re = re.compile(f'{date_expr}[T ]{time_expr}') standard_duration_re = re.compile( r'^' r'(?:(?P-?\d+) (days?, )?)?' r'((?:(?P-?\d+):)(?=\d+:\d+))?' r'(?:(?P-?\d+):)?' r'(?P-?\d+)' r'(?:\.(?P\d{1,6})\d{0,6})?' r'$' ) # Support the sections of ISO 8601 date representation that are accepted by timedelta iso8601_duration_re = re.compile( r'^(?P[-+]?)' r'P' r'(?:(?P\d+(.\d+)?)D)?' r'(?:T' r'(?:(?P\d+(.\d+)?)H)?' r'(?:(?P\d+(.\d+)?)M)?' r'(?:(?P\d+(.\d+)?)S)?' r')?' r'$' ) EPOCH = datetime(1970, 1, 1) # if greater than this, the number is in ms, if less than or equal it's in seconds # (in seconds this is 11th October 2603, in ms it's 20th August 1970) MS_WATERSHED = int(2e10) # slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 MAX_NUMBER = int(3e20) StrBytesIntFloat = Union[str, bytes, int, float] def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: if isinstance(value, (int, float)): return value try: return float(value) except ValueError: return None except TypeError: raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') def from_unix_seconds(seconds: Union[int, float]) -> datetime: if seconds > MAX_NUMBER: return datetime.max elif seconds < -MAX_NUMBER: return datetime.min while abs(seconds) > MS_WATERSHED: seconds /= 1000 dt = EPOCH + timedelta(seconds=seconds) return dt.replace(tzinfo=timezone.utc) def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: if value == 'Z': return timezone.utc elif value is not None: offset_mins = int(value[-2:]) if len(value) > 3 else 0 offset = 60 * int(value[1:3]) + offset_mins if value[0] == '-': offset = -offset try: return timezone(timedelta(minutes=offset)) except ValueError: raise error() else: return None def parse_date(value: Union[date, StrBytesIntFloat]) -> date: """ Parse a date/int/float/string and return a datetime.date. Raise ValueError if the input is well formatted but not a valid date. Raise ValueError if the input isn't well formatted. """ if isinstance(value, date): if isinstance(value, datetime): return value.date() else: return value number = get_numeric(value, 'date') if number is not None: return from_unix_seconds(number).date() if isinstance(value, bytes): value = value.decode() match = date_re.match(value) # type: ignore if match is None: raise errors.DateError() kw = {k: int(v) for k, v in match.groupdict().items()} try: return date(**kw) except ValueError: raise errors.DateError() def parse_time(value: Union[time, StrBytesIntFloat]) -> time: """ Parse a time/string and return a datetime.time. Raise ValueError if the input is well formatted but not a valid time. Raise ValueError if the input isn't well formatted, in particular if it contains an offset. """ if isinstance(value, time): return value number = get_numeric(value, 'time') if number is not None: if number >= 86400: # doesn't make sense since the time time loop back around to 0 raise errors.TimeError() return (datetime.min + timedelta(seconds=number)).time() if isinstance(value, bytes): value = value.decode() match = time_re.match(value) # type: ignore if match is None: raise errors.TimeError() kw = match.groupdict() if kw['microsecond']: kw['microsecond'] = kw['microsecond'].ljust(6, '0') tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError) kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} kw_['tzinfo'] = tzinfo try: return time(**kw_) # type: ignore except ValueError: raise errors.TimeError() def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: """ Parse a datetime/int/float/string and return a datetime.datetime. This function supports time zone offsets. When the input contains one, the output uses a timezone with a fixed offset from UTC. Raise ValueError if the input is well formatted but not a valid datetime. Raise ValueError if the input isn't well formatted. """ if isinstance(value, datetime): return value number = get_numeric(value, 'datetime') if number is not None: return from_unix_seconds(number) if isinstance(value, bytes): value = value.decode() match = datetime_re.match(value) # type: ignore if match is None: raise errors.DateTimeError() kw = match.groupdict() if kw['microsecond']: kw['microsecond'] = kw['microsecond'].ljust(6, '0') tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError) kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} kw_['tzinfo'] = tzinfo try: return datetime(**kw_) # type: ignore except ValueError: raise errors.DateTimeError() def parse_duration(value: StrBytesIntFloat) -> timedelta: """ Parse a duration int/float/string and return a datetime.timedelta. The preferred format for durations in Django is '%d %H:%M:%S.%f'. Also supports ISO 8601 representation. """ if isinstance(value, timedelta): return value if isinstance(value, (int, float)): # below code requires a string value = f'{value:f}' elif isinstance(value, bytes): value = value.decode() try: match = standard_duration_re.match(value) or iso8601_duration_re.match(value) except TypeError: raise TypeError('invalid type; expected timedelta, string, bytes, int or float') if not match: raise errors.DurationError() kw = match.groupdict() sign = -1 if kw.pop('sign', '+') == '-' else 1 if kw.get('microseconds'): kw['microseconds'] = kw['microseconds'].ljust(6, '0') if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): kw['microseconds'] = '-' + kw['microseconds'] kw_ = {k: float(v) for k, v in kw.items() if v is not None} return sign * timedelta(**kw_) pydantic-1.10.14/pydantic/decorator.py000066400000000000000000000240271455251250200176460ustar00rootroot00000000000000from functools import wraps from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload from . import validator from .config import Extra from .errors import ConfigError from .main import BaseModel, create_model from .typing import get_all_type_hints from .utils import to_camel __all__ = ('validate_arguments',) if TYPE_CHECKING: from .typing import AnyCallable AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) ConfigType = Union[None, Type[Any], Dict[str, Any]] @overload def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: ... @overload def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': ... def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: """ Decorator to validate the arguments passed to a function. """ def validate(_func: 'AnyCallable') -> 'AnyCallable': vd = ValidatedFunction(_func, config) @wraps(_func) def wrapper_function(*args: Any, **kwargs: Any) -> Any: return vd.call(*args, **kwargs) wrapper_function.vd = vd # type: ignore wrapper_function.validate = vd.init_model_instance # type: ignore wrapper_function.raw_function = vd.raw_function # type: ignore wrapper_function.model = vd.model # type: ignore return wrapper_function if func: return validate(func) else: return validate ALT_V_ARGS = 'v__args' ALT_V_KWARGS = 'v__kwargs' V_POSITIONAL_ONLY_NAME = 'v__positional_only' V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' class ValidatedFunction: def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901 from inspect import Parameter, signature parameters: Mapping[str, Parameter] = signature(function).parameters if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: raise ConfigError( f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator' ) self.raw_function = function self.arg_mapping: Dict[int, str] = {} self.positional_only_args = set() self.v_args_name = 'args' self.v_kwargs_name = 'kwargs' type_hints = get_all_type_hints(function) takes_args = False takes_kwargs = False fields: Dict[str, Tuple[Any, Any]] = {} for i, (name, p) in enumerate(parameters.items()): if p.annotation is p.empty: annotation = Any else: annotation = type_hints[name] default = ... if p.default is p.empty else p.default if p.kind == Parameter.POSITIONAL_ONLY: self.arg_mapping[i] = name fields[name] = annotation, default fields[V_POSITIONAL_ONLY_NAME] = List[str], None self.positional_only_args.add(name) elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: self.arg_mapping[i] = name fields[name] = annotation, default fields[V_DUPLICATE_KWARGS] = List[str], None elif p.kind == Parameter.KEYWORD_ONLY: fields[name] = annotation, default elif p.kind == Parameter.VAR_POSITIONAL: self.v_args_name = name fields[name] = Tuple[annotation, ...], None takes_args = True else: assert p.kind == Parameter.VAR_KEYWORD, p.kind self.v_kwargs_name = name fields[name] = Dict[str, annotation], None # type: ignore takes_kwargs = True # these checks avoid a clash between "args" and a field with that name if not takes_args and self.v_args_name in fields: self.v_args_name = ALT_V_ARGS # same with "kwargs" if not takes_kwargs and self.v_kwargs_name in fields: self.v_kwargs_name = ALT_V_KWARGS if not takes_args: # we add the field so validation below can raise the correct exception fields[self.v_args_name] = List[Any], None if not takes_kwargs: # same with kwargs fields[self.v_kwargs_name] = Dict[Any, Any], None self.create_model(fields, takes_args, takes_kwargs, config) def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: values = self.build_values(args, kwargs) return self.model(**values) def call(self, *args: Any, **kwargs: Any) -> Any: m = self.init_model_instance(*args, **kwargs) return self.execute(m) def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: values: Dict[str, Any] = {} if args: arg_iter = enumerate(args) while True: try: i, a = next(arg_iter) except StopIteration: break arg_name = self.arg_mapping.get(i) if arg_name is not None: values[arg_name] = a else: values[self.v_args_name] = [a] + [a for _, a in arg_iter] break var_kwargs: Dict[str, Any] = {} wrong_positional_args = [] duplicate_kwargs = [] fields_alias = [ field.alias for name, field in self.model.__fields__.items() if name not in (self.v_args_name, self.v_kwargs_name) ] non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name} for k, v in kwargs.items(): if k in non_var_fields or k in fields_alias: if k in self.positional_only_args: wrong_positional_args.append(k) if k in values: duplicate_kwargs.append(k) values[k] = v else: var_kwargs[k] = v if var_kwargs: values[self.v_kwargs_name] = var_kwargs if wrong_positional_args: values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args if duplicate_kwargs: values[V_DUPLICATE_KWARGS] = duplicate_kwargs return values def execute(self, m: BaseModel) -> Any: d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory} var_kwargs = d.pop(self.v_kwargs_name, {}) if self.v_args_name in d: args_: List[Any] = [] in_kwargs = False kwargs = {} for name, value in d.items(): if in_kwargs: kwargs[name] = value elif name == self.v_args_name: args_ += value in_kwargs = True else: args_.append(value) return self.raw_function(*args_, **kwargs, **var_kwargs) elif self.positional_only_args: args_ = [] kwargs = {} for name, value in d.items(): if name in self.positional_only_args: args_.append(value) else: kwargs[name] = value return self.raw_function(*args_, **kwargs, **var_kwargs) else: return self.raw_function(**d, **var_kwargs) def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: pos_args = len(self.arg_mapping) class CustomConfig: pass if not TYPE_CHECKING: # pragma: no branch if isinstance(config, dict): CustomConfig = type('Config', (), config) # noqa: F811 elif config is not None: CustomConfig = config # noqa: F811 if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'): raise ConfigError( 'Setting the "fields" and "alias_generator" property on custom Config for ' '@validate_arguments is not yet supported, please remove.' ) class DecoratorBaseModel(BaseModel): @validator(self.v_args_name, check_fields=False, allow_reuse=True) def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: if takes_args or v is None: return v raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True) def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: if takes_kwargs or v is None: return v plural = '' if len(v) == 1 else 's' keys = ', '.join(map(repr, v.keys())) raise TypeError(f'unexpected keyword argument{plural}: {keys}') @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True) def check_positional_only(cls, v: Optional[List[str]]) -> None: if v is None: return plural = '' if len(v) == 1 else 's' keys = ', '.join(map(repr, v)) raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True) def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: if v is None: return plural = '' if len(v) == 1 else 's' keys = ', '.join(map(repr, v)) raise TypeError(f'multiple values for argument{plural}: {keys}') class Config(CustomConfig): extra = getattr(CustomConfig, 'extra', Extra.forbid) self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) pydantic-1.10.14/pydantic/env_settings.py000066400000000000000000000333271455251250200203770ustar00rootroot00000000000000import os import warnings from pathlib import Path from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union from .config import BaseConfig, Extra from .fields import ModelField from .main import BaseModel from .types import JsonWrapper from .typing import StrPath, display_as_type, get_origin, is_union from .utils import deep_update, lenient_issubclass, path_type, sequence_like env_file_sentinel = str(object()) SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]] DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] class SettingsError(ValueError): pass class BaseSettings(BaseModel): """ Base class for settings, allowing values to be overridden by environment variables. This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), Heroku and any 12 factor app design. """ def __init__( __pydantic_self__, _env_file: Optional[DotenvType] = env_file_sentinel, _env_file_encoding: Optional[str] = None, _env_nested_delimiter: Optional[str] = None, _secrets_dir: Optional[StrPath] = None, **values: Any, ) -> None: # Uses something other than `self` the first arg to allow "self" as a settable attribute super().__init__( **__pydantic_self__._build_values( values, _env_file=_env_file, _env_file_encoding=_env_file_encoding, _env_nested_delimiter=_env_nested_delimiter, _secrets_dir=_secrets_dir, ) ) def _build_values( self, init_kwargs: Dict[str, Any], _env_file: Optional[DotenvType] = None, _env_file_encoding: Optional[str] = None, _env_nested_delimiter: Optional[str] = None, _secrets_dir: Optional[StrPath] = None, ) -> Dict[str, Any]: # Configure built-in sources init_settings = InitSettingsSource(init_kwargs=init_kwargs) env_settings = EnvSettingsSource( env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), env_file_encoding=( _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding ), env_nested_delimiter=( _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter ), env_prefix_len=len(self.__config__.env_prefix), ) file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) # Provide a hook to set built-in sources priority and add / remove sources sources = self.__config__.customise_sources( init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings ) if sources: return deep_update(*reversed([source(self) for source in sources])) else: # no one should mean to do this, but I think returning an empty dict is marginally preferable # to an informative error and much better than a confusing error return {} class Config(BaseConfig): env_prefix: str = '' env_file: Optional[DotenvType] = None env_file_encoding: Optional[str] = None env_nested_delimiter: Optional[str] = None secrets_dir: Optional[StrPath] = None validate_all: bool = True extra: Extra = Extra.forbid arbitrary_types_allowed: bool = True case_sensitive: bool = False @classmethod def prepare_field(cls, field: ModelField) -> None: env_names: Union[List[str], AbstractSet[str]] field_info_from_config = cls.get_field_info(field.name) env = field_info_from_config.get('env') or field.field_info.extra.get('env') if env is None: if field.has_alias: warnings.warn( 'aliases are no longer used by BaseSettings to define which environment variables to read. ' 'Instead use the "env" field setting. ' 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', FutureWarning, ) env_names = {cls.env_prefix + field.name} elif isinstance(env, str): env_names = {env} elif isinstance(env, (set, frozenset)): env_names = env elif sequence_like(env): env_names = list(env) else: raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') if not cls.case_sensitive: env_names = env_names.__class__(n.lower() for n in env_names) field.field_info.extra['env_names'] = env_names @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return init_settings, env_settings, file_secret_settings @classmethod def parse_env_var(cls, field_name: str, raw_val: str) -> Any: return cls.json_loads(raw_val) # populated by the metaclass using the Config class defined above, annotated here to help IDEs only __config__: ClassVar[Type[Config]] class InitSettingsSource: __slots__ = ('init_kwargs',) def __init__(self, init_kwargs: Dict[str, Any]): self.init_kwargs = init_kwargs def __call__(self, settings: BaseSettings) -> Dict[str, Any]: return self.init_kwargs def __repr__(self) -> str: return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})' class EnvSettingsSource: __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len') def __init__( self, env_file: Optional[DotenvType], env_file_encoding: Optional[str], env_nested_delimiter: Optional[str] = None, env_prefix_len: int = 0, ): self.env_file: Optional[DotenvType] = env_file self.env_file_encoding: Optional[str] = env_file_encoding self.env_nested_delimiter: Optional[str] = env_nested_delimiter self.env_prefix_len: int = env_prefix_len def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 """ Build environment variables suitable for passing to the Model. """ d: Dict[str, Any] = {} if settings.__config__.case_sensitive: env_vars: Mapping[str, Optional[str]] = os.environ else: env_vars = {k.lower(): v for k, v in os.environ.items()} dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) if dotenv_vars: env_vars = {**dotenv_vars, **env_vars} for field in settings.__fields__.values(): env_val: Optional[str] = None for env_name in field.field_info.extra['env_names']: env_val = env_vars.get(env_name) if env_val is not None: break is_complex, allow_parse_failure = self.field_is_complex(field) if is_complex: if env_val is None: # field is complex but no value found so far, try explode_env_vars env_val_built = self.explode_env_vars(field, env_vars) if env_val_built: d[field.alias] = env_val_built else: # field is complex and there's a value, decode that as JSON, then add explode_env_vars try: env_val = settings.__config__.parse_env_var(field.name, env_val) except ValueError as e: if not allow_parse_failure: raise SettingsError(f'error parsing env var "{env_name}"') from e if isinstance(env_val, dict): d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) else: d[field.alias] = env_val elif env_val is not None: # simplest case, field is not complex, we only need to add the value if it was found d[field.alias] = env_val return d def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: env_files = self.env_file if env_files is None: return {} if isinstance(env_files, (str, os.PathLike)): env_files = [env_files] dotenv_vars = {} for env_file in env_files: env_path = Path(env_file).expanduser() if env_path.is_file(): dotenv_vars.update( read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) ) return dotenv_vars def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: """ Find out if a field is complex, and if so whether JSON errors should be ignored """ if lenient_issubclass(field.annotation, JsonWrapper): return False, False if field.is_complex(): allow_parse_failure = False elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): allow_parse_failure = True else: return False, False return True, allow_parse_failure def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: """ Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. This is applied to a single field, hence filtering by env_var prefix. """ prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']] result: Dict[str, Any] = {} for env_name, env_val in env_vars.items(): if not any(env_name.startswith(prefix) for prefix in prefixes): continue # we remove the prefix before splitting in case the prefix has characters in common with the delimiter env_name_without_prefix = env_name[self.env_prefix_len :] _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) env_var = result for key in keys: env_var = env_var.setdefault(key, {}) env_var[last_key] = env_val return result def __repr__(self) -> str: return ( f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, ' f'env_nested_delimiter={self.env_nested_delimiter!r})' ) class SecretsSettingsSource: __slots__ = ('secrets_dir',) def __init__(self, secrets_dir: Optional[StrPath]): self.secrets_dir: Optional[StrPath] = secrets_dir def __call__(self, settings: BaseSettings) -> Dict[str, Any]: """ Build fields from "secrets" files. """ secrets: Dict[str, Optional[str]] = {} if self.secrets_dir is None: return secrets secrets_path = Path(self.secrets_dir).expanduser() if not secrets_path.exists(): warnings.warn(f'directory "{secrets_path}" does not exist') return secrets if not secrets_path.is_dir(): raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}') for field in settings.__fields__.values(): for env_name in field.field_info.extra['env_names']: path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) if not path: # path does not exist, we currently don't return a warning for this continue if path.is_file(): secret_value = path.read_text().strip() if field.is_complex(): try: secret_value = settings.__config__.parse_env_var(field.name, secret_value) except ValueError as e: raise SettingsError(f'error parsing env var "{env_name}"') from e secrets[field.alias] = secret_value else: warnings.warn( f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', stacklevel=4, ) return secrets def __repr__(self) -> str: return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})' def read_env_file( file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False ) -> Dict[str, Optional[str]]: try: from dotenv import dotenv_values except ImportError as e: raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8') if not case_sensitive: return {k.lower(): v for k, v in file_vars.items()} else: return file_vars def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: """ Find a file within path's directory matching filename, optionally ignoring case. """ for f in dir_path.iterdir(): if f.name == file_name: return f elif not case_sensitive and f.name.lower() == file_name.lower(): return f return None pydantic-1.10.14/pydantic/error_wrappers.py000066400000000000000000000120251455251250200207330ustar00rootroot00000000000000import json from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union from .json import pydantic_encoder from .utils import Representation if TYPE_CHECKING: from typing_extensions import TypedDict from .config import BaseConfig from .types import ModelOrDc from .typing import ReprArgs Loc = Tuple[Union[int, str], ...] class _ErrorDictRequired(TypedDict): loc: Loc msg: str type: str class ErrorDict(_ErrorDictRequired, total=False): ctx: Dict[str, Any] __all__ = 'ErrorWrapper', 'ValidationError' class ErrorWrapper(Representation): __slots__ = 'exc', '_loc' def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: self.exc = exc self._loc = loc def loc_tuple(self) -> 'Loc': if isinstance(self._loc, tuple): return self._loc else: return (self._loc,) def __repr_args__(self) -> 'ReprArgs': return [('exc', self.exc), ('loc', self.loc_tuple())] # ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] # but recursive, therefore just use: ErrorList = Union[Sequence[Any], ErrorWrapper] class ValidationError(Representation, ValueError): __slots__ = 'raw_errors', 'model', '_error_cache' def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: self.raw_errors = errors self.model = model self._error_cache: Optional[List['ErrorDict']] = None def errors(self) -> List['ErrorDict']: if self._error_cache is None: try: config = self.model.__config__ # type: ignore except AttributeError: config = self.model.__pydantic_model__.__config__ # type: ignore self._error_cache = list(flatten_errors(self.raw_errors, config)) return self._error_cache def json(self, *, indent: Union[None, int, str] = 2) -> str: return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) def __str__(self) -> str: errors = self.errors() no_errors = len(errors) return ( f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' f'{display_errors(errors)}' ) def __repr_args__(self) -> 'ReprArgs': return [('model', self.model.__name__), ('errors', self.errors())] def display_errors(errors: List['ErrorDict']) -> str: return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) def _display_error_loc(error: 'ErrorDict') -> str: return ' -> '.join(str(e) for e in error['loc']) def _display_error_type_and_ctx(error: 'ErrorDict') -> str: t = 'type=' + error['type'] ctx = error.get('ctx') if ctx: return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) else: return t def flatten_errors( errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None ) -> Generator['ErrorDict', None, None]: for error in errors: if isinstance(error, ErrorWrapper): if loc: error_loc = loc + error.loc_tuple() else: error_loc = error.loc_tuple() if isinstance(error.exc, ValidationError): yield from flatten_errors(error.exc.raw_errors, config, error_loc) else: yield error_dict(error.exc, config, error_loc) elif isinstance(error, list): yield from flatten_errors(error, config, loc=loc) else: raise RuntimeError(f'Unknown error object: {error}') def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict': type_ = get_exc_type(exc.__class__) msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) ctx = exc.__dict__ if msg_template: msg = msg_template.format(**ctx) else: msg = str(exc) d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} if ctx: d['ctx'] = ctx return d _EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} def get_exc_type(cls: Type[Exception]) -> str: # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up try: return _EXC_TYPE_CACHE[cls] except KeyError: r = _get_exc_type(cls) _EXC_TYPE_CACHE[cls] = r return r def _get_exc_type(cls: Type[Exception]) -> str: if issubclass(cls, AssertionError): return 'assertion_error' base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' if cls in (TypeError, ValueError): # just TypeError or ValueError, no extra code return base_name # if it's not a TypeError or ValueError, we just take the lowercase of the exception name # no chaining or snake case logic, use "code" for more complex error types. code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() return base_name + '.' + code pydantic-1.10.14/pydantic/errors.py000066400000000000000000000424351455251250200172030ustar00rootroot00000000000000from decimal import Decimal from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union from .typing import display_as_type if TYPE_CHECKING: from .typing import DictStrAny # explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. __all__ = ( 'PydanticTypeError', 'PydanticValueError', 'ConfigError', 'MissingError', 'ExtraError', 'NoneIsNotAllowedError', 'NoneIsAllowedError', 'WrongConstantError', 'NotNoneError', 'BoolError', 'BytesError', 'DictError', 'EmailError', 'UrlError', 'UrlSchemeError', 'UrlSchemePermittedError', 'UrlUserInfoError', 'UrlHostError', 'UrlHostTldError', 'UrlPortError', 'UrlExtraError', 'EnumError', 'IntEnumError', 'EnumMemberError', 'IntegerError', 'FloatError', 'PathError', 'PathNotExistsError', 'PathNotAFileError', 'PathNotADirectoryError', 'PyObjectError', 'SequenceError', 'ListError', 'SetError', 'FrozenSetError', 'TupleError', 'TupleLengthError', 'ListMinLengthError', 'ListMaxLengthError', 'ListUniqueItemsError', 'SetMinLengthError', 'SetMaxLengthError', 'FrozenSetMinLengthError', 'FrozenSetMaxLengthError', 'AnyStrMinLengthError', 'AnyStrMaxLengthError', 'StrError', 'StrRegexError', 'NumberNotGtError', 'NumberNotGeError', 'NumberNotLtError', 'NumberNotLeError', 'NumberNotMultipleError', 'DecimalError', 'DecimalIsNotFiniteError', 'DecimalMaxDigitsError', 'DecimalMaxPlacesError', 'DecimalWholeDigitsError', 'DateTimeError', 'DateError', 'DateNotInThePastError', 'DateNotInTheFutureError', 'TimeError', 'DurationError', 'HashableError', 'UUIDError', 'UUIDVersionError', 'ArbitraryTypeError', 'ClassError', 'SubclassError', 'JsonError', 'JsonTypeError', 'PatternError', 'DataclassTypeError', 'CallableError', 'IPvAnyAddressError', 'IPvAnyInterfaceError', 'IPvAnyNetworkError', 'IPv4AddressError', 'IPv6AddressError', 'IPv4NetworkError', 'IPv6NetworkError', 'IPv4InterfaceError', 'IPv6InterfaceError', 'ColorError', 'StrictBoolError', 'NotDigitError', 'LuhnValidationError', 'InvalidLengthForBrand', 'InvalidByteSize', 'InvalidByteSizeUnit', 'MissingDiscriminator', 'InvalidDiscriminator', ) def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin': """ For built-in exceptions like ValueError or TypeError, we need to implement __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) By default pickle protocol 2 calls `cls.__new__(cls, *args)`. Since we only use kwargs, we need a little constructor to change that. Note: the callable can't be a lambda as pickle looks in the namespace to find it """ return cls(**ctx) class PydanticErrorMixin: code: str msg_template: str def __init__(self, **ctx: Any) -> None: self.__dict__ = ctx def __str__(self) -> str: return self.msg_template.format(**self.__dict__) def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]: return cls_kwargs, (self.__class__, self.__dict__) class PydanticTypeError(PydanticErrorMixin, TypeError): pass class PydanticValueError(PydanticErrorMixin, ValueError): pass class ConfigError(RuntimeError): pass class MissingError(PydanticValueError): msg_template = 'field required' class ExtraError(PydanticValueError): msg_template = 'extra fields not permitted' class NoneIsNotAllowedError(PydanticTypeError): code = 'none.not_allowed' msg_template = 'none is not an allowed value' class NoneIsAllowedError(PydanticTypeError): code = 'none.allowed' msg_template = 'value is not none' class WrongConstantError(PydanticValueError): code = 'const' def __str__(self) -> str: permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore return f'unexpected value; permitted: {permitted}' class NotNoneError(PydanticTypeError): code = 'not_none' msg_template = 'value is not None' class BoolError(PydanticTypeError): msg_template = 'value could not be parsed to a boolean' class BytesError(PydanticTypeError): msg_template = 'byte type expected' class DictError(PydanticTypeError): msg_template = 'value is not a valid dict' class EmailError(PydanticValueError): msg_template = 'value is not a valid email address' class UrlError(PydanticValueError): code = 'url' class UrlSchemeError(UrlError): code = 'url.scheme' msg_template = 'invalid or missing URL scheme' class UrlSchemePermittedError(UrlError): code = 'url.scheme' msg_template = 'URL scheme not permitted' def __init__(self, allowed_schemes: Set[str]): super().__init__(allowed_schemes=allowed_schemes) class UrlUserInfoError(UrlError): code = 'url.userinfo' msg_template = 'userinfo required in URL but missing' class UrlHostError(UrlError): code = 'url.host' msg_template = 'URL host invalid' class UrlHostTldError(UrlError): code = 'url.host' msg_template = 'URL host invalid, top level domain required' class UrlPortError(UrlError): code = 'url.port' msg_template = 'URL port invalid, port cannot exceed 65535' class UrlExtraError(UrlError): code = 'url.extra' msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' class EnumMemberError(PydanticTypeError): code = 'enum' def __str__(self) -> str: permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore return f'value is not a valid enumeration member; permitted: {permitted}' class IntegerError(PydanticTypeError): msg_template = 'value is not a valid integer' class FloatError(PydanticTypeError): msg_template = 'value is not a valid float' class PathError(PydanticTypeError): msg_template = 'value is not a valid path' class _PathValueError(PydanticValueError): def __init__(self, *, path: Path) -> None: super().__init__(path=str(path)) class PathNotExistsError(_PathValueError): code = 'path.not_exists' msg_template = 'file or directory at path "{path}" does not exist' class PathNotAFileError(_PathValueError): code = 'path.not_a_file' msg_template = 'path "{path}" does not point to a file' class PathNotADirectoryError(_PathValueError): code = 'path.not_a_directory' msg_template = 'path "{path}" does not point to a directory' class PyObjectError(PydanticTypeError): msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' class SequenceError(PydanticTypeError): msg_template = 'value is not a valid sequence' class IterableError(PydanticTypeError): msg_template = 'value is not a valid iterable' class ListError(PydanticTypeError): msg_template = 'value is not a valid list' class SetError(PydanticTypeError): msg_template = 'value is not a valid set' class FrozenSetError(PydanticTypeError): msg_template = 'value is not a valid frozenset' class DequeError(PydanticTypeError): msg_template = 'value is not a valid deque' class TupleError(PydanticTypeError): msg_template = 'value is not a valid tuple' class TupleLengthError(PydanticValueError): code = 'tuple.length' msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' def __init__(self, *, actual_length: int, expected_length: int) -> None: super().__init__(actual_length=actual_length, expected_length=expected_length) class ListMinLengthError(PydanticValueError): code = 'list.min_items' msg_template = 'ensure this value has at least {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class ListMaxLengthError(PydanticValueError): code = 'list.max_items' msg_template = 'ensure this value has at most {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class ListUniqueItemsError(PydanticValueError): code = 'list.unique_items' msg_template = 'the list has duplicated items' class SetMinLengthError(PydanticValueError): code = 'set.min_items' msg_template = 'ensure this value has at least {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class SetMaxLengthError(PydanticValueError): code = 'set.max_items' msg_template = 'ensure this value has at most {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class FrozenSetMinLengthError(PydanticValueError): code = 'frozenset.min_items' msg_template = 'ensure this value has at least {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class FrozenSetMaxLengthError(PydanticValueError): code = 'frozenset.max_items' msg_template = 'ensure this value has at most {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class AnyStrMinLengthError(PydanticValueError): code = 'any_str.min_length' msg_template = 'ensure this value has at least {limit_value} characters' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class AnyStrMaxLengthError(PydanticValueError): code = 'any_str.max_length' msg_template = 'ensure this value has at most {limit_value} characters' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class StrError(PydanticTypeError): msg_template = 'str type expected' class StrRegexError(PydanticValueError): code = 'str.regex' msg_template = 'string does not match regex "{pattern}"' def __init__(self, *, pattern: str) -> None: super().__init__(pattern=pattern) class _NumberBoundError(PydanticValueError): def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: super().__init__(limit_value=limit_value) class NumberNotGtError(_NumberBoundError): code = 'number.not_gt' msg_template = 'ensure this value is greater than {limit_value}' class NumberNotGeError(_NumberBoundError): code = 'number.not_ge' msg_template = 'ensure this value is greater than or equal to {limit_value}' class NumberNotLtError(_NumberBoundError): code = 'number.not_lt' msg_template = 'ensure this value is less than {limit_value}' class NumberNotLeError(_NumberBoundError): code = 'number.not_le' msg_template = 'ensure this value is less than or equal to {limit_value}' class NumberNotFiniteError(PydanticValueError): code = 'number.not_finite_number' msg_template = 'ensure this value is a finite number' class NumberNotMultipleError(PydanticValueError): code = 'number.not_multiple' msg_template = 'ensure this value is a multiple of {multiple_of}' def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: super().__init__(multiple_of=multiple_of) class DecimalError(PydanticTypeError): msg_template = 'value is not a valid decimal' class DecimalIsNotFiniteError(PydanticValueError): code = 'decimal.not_finite' msg_template = 'value is not a valid decimal' class DecimalMaxDigitsError(PydanticValueError): code = 'decimal.max_digits' msg_template = 'ensure that there are no more than {max_digits} digits in total' def __init__(self, *, max_digits: int) -> None: super().__init__(max_digits=max_digits) class DecimalMaxPlacesError(PydanticValueError): code = 'decimal.max_places' msg_template = 'ensure that there are no more than {decimal_places} decimal places' def __init__(self, *, decimal_places: int) -> None: super().__init__(decimal_places=decimal_places) class DecimalWholeDigitsError(PydanticValueError): code = 'decimal.whole_digits' msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' def __init__(self, *, whole_digits: int) -> None: super().__init__(whole_digits=whole_digits) class DateTimeError(PydanticValueError): msg_template = 'invalid datetime format' class DateError(PydanticValueError): msg_template = 'invalid date format' class DateNotInThePastError(PydanticValueError): code = 'date.not_in_the_past' msg_template = 'date is not in the past' class DateNotInTheFutureError(PydanticValueError): code = 'date.not_in_the_future' msg_template = 'date is not in the future' class TimeError(PydanticValueError): msg_template = 'invalid time format' class DurationError(PydanticValueError): msg_template = 'invalid duration format' class HashableError(PydanticTypeError): msg_template = 'value is not a valid hashable' class UUIDError(PydanticTypeError): msg_template = 'value is not a valid uuid' class UUIDVersionError(PydanticValueError): code = 'uuid.version' msg_template = 'uuid version {required_version} expected' def __init__(self, *, required_version: int) -> None: super().__init__(required_version=required_version) class ArbitraryTypeError(PydanticTypeError): code = 'arbitrary_type' msg_template = 'instance of {expected_arbitrary_type} expected' def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) class ClassError(PydanticTypeError): code = 'class' msg_template = 'a class is expected' class SubclassError(PydanticTypeError): code = 'subclass' msg_template = 'subclass of {expected_class} expected' def __init__(self, *, expected_class: Type[Any]) -> None: super().__init__(expected_class=display_as_type(expected_class)) class JsonError(PydanticValueError): msg_template = 'Invalid JSON' class JsonTypeError(PydanticTypeError): code = 'json' msg_template = 'JSON object must be str, bytes or bytearray' class PatternError(PydanticValueError): code = 'regex_pattern' msg_template = 'Invalid regular expression' class DataclassTypeError(PydanticTypeError): code = 'dataclass' msg_template = 'instance of {class_name}, tuple or dict expected' class CallableError(PydanticTypeError): msg_template = '{value} is not callable' class EnumError(PydanticTypeError): code = 'enum_instance' msg_template = '{value} is not a valid Enum instance' class IntEnumError(PydanticTypeError): code = 'int_enum_instance' msg_template = '{value} is not a valid IntEnum instance' class IPvAnyAddressError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 address' class IPvAnyInterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 interface' class IPvAnyNetworkError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 network' class IPv4AddressError(PydanticValueError): msg_template = 'value is not a valid IPv4 address' class IPv6AddressError(PydanticValueError): msg_template = 'value is not a valid IPv6 address' class IPv4NetworkError(PydanticValueError): msg_template = 'value is not a valid IPv4 network' class IPv6NetworkError(PydanticValueError): msg_template = 'value is not a valid IPv6 network' class IPv4InterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv4 interface' class IPv6InterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv6 interface' class ColorError(PydanticValueError): msg_template = 'value is not a valid color: {reason}' class StrictBoolError(PydanticValueError): msg_template = 'value is not a valid boolean' class NotDigitError(PydanticValueError): code = 'payment_card_number.digits' msg_template = 'card number is not all digits' class LuhnValidationError(PydanticValueError): code = 'payment_card_number.luhn_check' msg_template = 'card number is not luhn valid' class InvalidLengthForBrand(PydanticValueError): code = 'payment_card_number.invalid_length_for_brand' msg_template = 'Length for a {brand} card must be {required_length}' class InvalidByteSize(PydanticValueError): msg_template = 'could not parse value and unit from byte string' class InvalidByteSizeUnit(PydanticValueError): msg_template = 'could not interpret byte unit: {unit}' class MissingDiscriminator(PydanticValueError): code = 'discriminated_union.missing_discriminator' msg_template = 'Discriminator {discriminator_key!r} is missing in value' class InvalidDiscriminator(PydanticValueError): code = 'discriminated_union.invalid_discriminator' msg_template = ( 'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} ' '(allowed values: {allowed_values})' ) def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: super().__init__( discriminator_key=discriminator_key, discriminator_value=discriminator_value, allowed_values=', '.join(map(repr, allowed_values)), ) pydantic-1.10.14/pydantic/fields.py000066400000000000000000001424651455251250200171410ustar00rootroot00000000000000import copy import re from collections import Counter as CollectionCounter, defaultdict, deque from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable from typing import ( TYPE_CHECKING, Any, Counter, DefaultDict, Deque, Dict, ForwardRef, FrozenSet, Generator, Iterable, Iterator, List, Mapping, Optional, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union, ) from typing_extensions import Annotated, Final from . import errors as errors_ from .class_validators import Validator, make_generic_validator, prep_validators from .error_wrappers import ErrorWrapper from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError from .types import Json, JsonWrapper from .typing import ( NoArgAnyCallable, convert_generics, display_as_type, get_args, get_origin, is_finalvar, is_literal_type, is_new_type, is_none_type, is_typeddict, is_typeddict_special, is_union, new_type_supertype, ) from .utils import ( PyObjectStr, Representation, ValueItems, get_discriminator_alias_and_values, get_unique_discriminator_alias, lenient_isinstance, lenient_issubclass, sequence_like, smart_deepcopy, ) from .validators import constant_validator, dict_validator, find_validators, validate_json Required: Any = Ellipsis T = TypeVar('T') class UndefinedType: def __repr__(self) -> str: return 'PydanticUndefined' def __copy__(self: T) -> T: return self def __reduce__(self) -> str: return 'Undefined' def __deepcopy__(self: T, _: Any) -> T: return self Undefined = UndefinedType() if TYPE_CHECKING: from .class_validators import ValidatorsList from .config import BaseConfig from .error_wrappers import ErrorList from .types import ModelOrDc from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] LocStr = Union[Tuple[Union[int, str], ...], str] BoolUndefined = Union[bool, UndefinedType] class FieldInfo(Representation): """ Captures extra information about a field. """ __slots__ = ( 'default', 'default_factory', 'alias', 'alias_priority', 'title', 'description', 'exclude', 'include', 'const', 'gt', 'ge', 'lt', 'le', 'multiple_of', 'allow_inf_nan', 'max_digits', 'decimal_places', 'min_items', 'max_items', 'unique_items', 'min_length', 'max_length', 'allow_mutation', 'repr', 'regex', 'discriminator', 'extra', ) # field constraints with the default value, it's also used in update_from_config below __field_constraints__ = { 'min_length': None, 'max_length': None, 'regex': None, 'gt': None, 'lt': None, 'ge': None, 'le': None, 'multiple_of': None, 'allow_inf_nan': None, 'max_digits': None, 'decimal_places': None, 'min_items': None, 'max_items': None, 'unique_items': None, 'allow_mutation': True, } def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: self.default = default self.default_factory = kwargs.pop('default_factory', None) self.alias = kwargs.pop('alias', None) self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None) self.title = kwargs.pop('title', None) self.description = kwargs.pop('description', None) self.exclude = kwargs.pop('exclude', None) self.include = kwargs.pop('include', None) self.const = kwargs.pop('const', None) self.gt = kwargs.pop('gt', None) self.ge = kwargs.pop('ge', None) self.lt = kwargs.pop('lt', None) self.le = kwargs.pop('le', None) self.multiple_of = kwargs.pop('multiple_of', None) self.allow_inf_nan = kwargs.pop('allow_inf_nan', None) self.max_digits = kwargs.pop('max_digits', None) self.decimal_places = kwargs.pop('decimal_places', None) self.min_items = kwargs.pop('min_items', None) self.max_items = kwargs.pop('max_items', None) self.unique_items = kwargs.pop('unique_items', None) self.min_length = kwargs.pop('min_length', None) self.max_length = kwargs.pop('max_length', None) self.allow_mutation = kwargs.pop('allow_mutation', True) self.regex = kwargs.pop('regex', None) self.discriminator = kwargs.pop('discriminator', None) self.repr = kwargs.pop('repr', True) self.extra = kwargs def __repr_args__(self) -> 'ReprArgs': field_defaults_to_hide: Dict[str, Any] = { 'repr': True, **self.__field_constraints__, } attrs = ((s, getattr(self, s)) for s in self.__slots__) return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] def get_constraints(self) -> Set[str]: """ Gets the constraints set on the field by comparing the constraint value with its default value :return: the constraints set on field_info """ return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} def update_from_config(self, from_config: Dict[str, Any]) -> None: """ Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. """ for attr_name, value in from_config.items(): try: current_value = getattr(self, attr_name) except AttributeError: # attr_name is not an attribute of FieldInfo, it should therefore be added to extra # (except if extra already has this value!) self.extra.setdefault(attr_name, value) else: if current_value is self.__field_constraints__.get(attr_name, None): setattr(self, attr_name, value) elif attr_name == 'exclude': self.exclude = ValueItems.merge(value, current_value) elif attr_name == 'include': self.include = ValueItems.merge(value, current_value, intersect=True) def _validate(self) -> None: if self.default is not Undefined and self.default_factory is not None: raise ValueError('cannot specify both default and default_factory') def Field( default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None, alias: Optional[str] = None, title: Optional[str] = None, description: Optional[str] = None, exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, const: Optional[bool] = None, gt: Optional[float] = None, ge: Optional[float] = None, lt: Optional[float] = None, le: Optional[float] = None, multiple_of: Optional[float] = None, allow_inf_nan: Optional[bool] = None, max_digits: Optional[int] = None, decimal_places: Optional[int] = None, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: Optional[bool] = None, min_length: Optional[int] = None, max_length: Optional[int] = None, allow_mutation: bool = True, regex: Optional[str] = None, discriminator: Optional[str] = None, repr: bool = True, **extra: Any, ) -> Any: """ Used to provide extra information about a field, either for the model schema or complex validation. Some arguments apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. :param default: since this is replacing the field’s default, its first argument is used to set the default, use ellipsis (``...``) to indicate the field is required :param default_factory: callable that will be called when a default value is needed for this field If both `default` and `default_factory` are set, an error is raised. :param alias: the public name of the field :param title: can be any string, used in the schema :param description: can be any string, used in the schema :param exclude: exclude this field while dumping. Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. :param include: include this field while dumping. Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. :param const: this field is required and *must* take it's default value :param gt: only applies to numbers, requires the field to be "greater than". The schema will have an ``exclusiveMinimum`` validation keyword :param ge: only applies to numbers, requires the field to be "greater than or equal to". The schema will have a ``minimum`` validation keyword :param lt: only applies to numbers, requires the field to be "less than". The schema will have an ``exclusiveMaximum`` validation keyword :param le: only applies to numbers, requires the field to be "less than or equal to". The schema will have a ``maximum`` validation keyword :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The schema will have a ``multipleOf`` validation keyword :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), which is a valid Python float. Default True, set to False for compatibility with JSON. :param max_digits: only applies to Decimals, requires the field to have a maximum number of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places allowed. It does not include trailing decimal zeroes. :param min_items: only applies to lists, requires the field to have a minimum number of elements. The schema will have a ``minItems`` validation keyword :param max_items: only applies to lists, requires the field to have a maximum number of elements. The schema will have a ``maxItems`` validation keyword :param unique_items: only applies to lists, requires the field not to have duplicated elements. The schema will have a ``uniqueItems`` validation keyword :param min_length: only applies to strings, requires the field to have a minimum length. The schema will have a ``minLength`` validation keyword :param max_length: only applies to strings, requires the field to have a maximum length. The schema will have a ``maxLength`` validation keyword :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is assigned on an instance. The BaseModel Config must set validate_assignment to True :param regex: only applies to strings, requires the field match against a regular expression pattern string. The schema will have a ``pattern`` validation keyword :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. The `discriminator` is the name of this common field to shorten validation and improve generated schema :param repr: show this field in the representation :param **extra: any additional keyword arguments will be added as is to the schema """ field_info = FieldInfo( default, default_factory=default_factory, alias=alias, title=title, description=description, exclude=exclude, include=include, const=const, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan, max_digits=max_digits, decimal_places=decimal_places, min_items=min_items, max_items=max_items, unique_items=unique_items, min_length=min_length, max_length=max_length, allow_mutation=allow_mutation, regex=regex, discriminator=discriminator, repr=repr, **extra, ) field_info._validate() return field_info # used to be an enum but changed to int's for small performance improvement as less access overhead SHAPE_SINGLETON = 1 SHAPE_LIST = 2 SHAPE_SET = 3 SHAPE_MAPPING = 4 SHAPE_TUPLE = 5 SHAPE_TUPLE_ELLIPSIS = 6 SHAPE_SEQUENCE = 7 SHAPE_FROZENSET = 8 SHAPE_ITERABLE = 9 SHAPE_GENERIC = 10 SHAPE_DEQUE = 11 SHAPE_DICT = 12 SHAPE_DEFAULTDICT = 13 SHAPE_COUNTER = 14 SHAPE_NAME_LOOKUP = { SHAPE_LIST: 'List[{}]', SHAPE_SET: 'Set[{}]', SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', SHAPE_SEQUENCE: 'Sequence[{}]', SHAPE_FROZENSET: 'FrozenSet[{}]', SHAPE_ITERABLE: 'Iterable[{}]', SHAPE_DEQUE: 'Deque[{}]', SHAPE_DICT: 'Dict[{}]', SHAPE_DEFAULTDICT: 'DefaultDict[{}]', SHAPE_COUNTER: 'Counter[{}]', } MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} class ModelField(Representation): __slots__ = ( 'type_', 'outer_type_', 'annotation', 'sub_fields', 'sub_fields_mapping', 'key_field', 'validators', 'pre_validators', 'post_validators', 'default', 'default_factory', 'required', 'final', 'model_config', 'name', 'alias', 'has_alias', 'field_info', 'discriminator_key', 'discriminator_alias', 'validate_always', 'allow_none', 'shape', 'class_validators', 'parse_json', ) def __init__( self, *, name: str, type_: Type[Any], class_validators: Optional[Dict[str, Validator]], model_config: Type['BaseConfig'], default: Any = None, default_factory: Optional[NoArgAnyCallable] = None, required: 'BoolUndefined' = Undefined, final: bool = False, alias: Optional[str] = None, field_info: Optional[FieldInfo] = None, ) -> None: self.name: str = name self.has_alias: bool = alias is not None self.alias: str = alias if alias is not None else name self.annotation = type_ self.type_: Any = convert_generics(type_) self.outer_type_: Any = type_ self.class_validators = class_validators or {} self.default: Any = default self.default_factory: Optional[NoArgAnyCallable] = default_factory self.required: 'BoolUndefined' = required self.final: bool = final self.model_config = model_config self.field_info: FieldInfo = field_info or FieldInfo(default) self.discriminator_key: Optional[str] = self.field_info.discriminator self.discriminator_alias: Optional[str] = self.discriminator_key self.allow_none: bool = False self.validate_always: bool = False self.sub_fields: Optional[List[ModelField]] = None self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union self.key_field: Optional[ModelField] = None self.validators: 'ValidatorsList' = [] self.pre_validators: Optional['ValidatorsList'] = None self.post_validators: Optional['ValidatorsList'] = None self.parse_json: bool = False self.shape: int = SHAPE_SINGLETON self.model_config.prepare_field(self) self.prepare() def get_default(self) -> Any: return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() @staticmethod def _get_field_info( field_name: str, annotation: Any, value: Any, config: Type['BaseConfig'] ) -> Tuple[FieldInfo, Any]: """ Get a FieldInfo from a root typing.Annotated annotation, value, or config default. The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain a FieldInfo, a new one will be created using the config. :param field_name: name of the field for use in error messages :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` :param value: the field's assigned value :param config: the model's config object :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. """ field_info_from_config = config.get_field_info(field_name) field_info = None if get_origin(annotation) is Annotated: field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] if len(field_infos) > 1: raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}') field_info = next(iter(field_infos), None) if field_info is not None: field_info = copy.copy(field_info) field_info.update_from_config(field_info_from_config) if field_info.default not in (Undefined, Required): raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}') if value is not Undefined and value is not Required: # check also `Required` because of `validate_arguments` that sets `...` as default value field_info.default = value if isinstance(value, FieldInfo): if field_info is not None: raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}') field_info = value field_info.update_from_config(field_info_from_config) elif field_info is None: field_info = FieldInfo(value, **field_info_from_config) value = None if field_info.default_factory is not None else field_info.default field_info._validate() return field_info, value @classmethod def infer( cls, *, name: str, value: Any, annotation: Any, class_validators: Optional[Dict[str, Validator]], config: Type['BaseConfig'], ) -> 'ModelField': from .schema import get_annotation_from_field_info field_info, value = cls._get_field_info(name, annotation, value, config) required: 'BoolUndefined' = Undefined if value is Required: required = True value = None elif value is not Undefined: required = False annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) return cls( name=name, type_=annotation, alias=field_info.alias, class_validators=class_validators, default=value, default_factory=field_info.default_factory, required=required, model_config=config, field_info=field_info, ) def set_config(self, config: Type['BaseConfig']) -> None: self.model_config = config info_from_config = config.get_field_info(self.name) config.prepare_field(self) new_alias = info_from_config.get('alias') new_alias_priority = info_from_config.get('alias_priority') or 0 if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): self.field_info.alias = new_alias self.field_info.alias_priority = new_alias_priority self.alias = new_alias new_exclude = info_from_config.get('exclude') if new_exclude is not None: self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) new_include = info_from_config.get('include') if new_include is not None: self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) @property def alt_alias(self) -> bool: return self.name != self.alias def prepare(self) -> None: """ Prepare the field but inspecting self.default, self.type_ etc. Note: this method is **not** idempotent (because _type_analysis is not idempotent), e.g. calling it it multiple times may modify the field and configure it incorrectly. """ self._set_default_and_type() if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: # self.type_ is currently a ForwardRef and there's nothing we can do now, # user will need to call model.update_forward_refs() return self._type_analysis() if self.required is Undefined: self.required = True if self.default is Undefined and self.default_factory is None: self.default = None self.populate_validators() def _set_default_and_type(self) -> None: """ Set the default value, infer the type if needed and check if `None` value is valid. """ if self.default_factory is not None: if self.type_ is Undefined: raise errors_.ConfigError( f'you need to set the type of field {self.name!r} when using `default_factory`' ) return default_value = self.get_default() if default_value is not None and self.type_ is Undefined: self.type_ = default_value.__class__ self.outer_type_ = self.type_ self.annotation = self.type_ if self.type_ is Undefined: raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') if self.required is False and default_value is None: self.allow_none = True def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) # typing interface is horrible, we have to do some ugly checks if lenient_issubclass(self.type_, JsonWrapper): self.type_ = self.type_.inner_type self.parse_json = True elif lenient_issubclass(self.type_, Json): self.type_ = Any self.parse_json = True elif isinstance(self.type_, TypeVar): if self.type_.__bound__: self.type_ = self.type_.__bound__ elif self.type_.__constraints__: self.type_ = Union[self.type_.__constraints__] else: self.type_ = Any elif is_new_type(self.type_): self.type_ = new_type_supertype(self.type_) if self.type_ is Any or self.type_ is object: if self.required is Undefined: self.required = False self.allow_none = True return elif self.type_ is Pattern or self.type_ is re.Pattern: # python 3.7 only, Pattern is a typing object but without sub fields return elif is_literal_type(self.type_): return elif is_typeddict(self.type_): return if is_finalvar(self.type_): self.final = True if self.type_ is Final: self.type_ = Any else: self.type_ = get_args(self.type_)[0] self._type_analysis() return origin = get_origin(self.type_) if origin is Annotated or is_typeddict_special(origin): self.type_ = get_args(self.type_)[0] self._type_analysis() return if self.discriminator_key is not None and not is_union(origin): raise TypeError('`discriminator` can only be used with `Union` type with more than one variant') # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` if origin is None or origin is CollectionsHashable: # field is not "typing" object eg. Union, Dict, List etc. # allow None for virtual superclasses of NoneType, e.g. Hashable if isinstance(self.type_, type) and isinstance(None, self.type_): self.allow_none = True return elif origin is Callable: return elif is_union(origin): types_ = [] for type_ in get_args(self.type_): if is_none_type(type_) or type_ is Any or type_ is object: if self.required is Undefined: self.required = False self.allow_none = True if is_none_type(type_): continue types_.append(type_) if len(types_) == 1: # Optional[] self.type_ = types_[0] # this is the one case where the "outer type" isn't just the original type self.outer_type_ = self.type_ # re-run to correctly interpret the new self.type_ self._type_analysis() else: self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] if self.discriminator_key is not None: self.prepare_discriminated_union_sub_fields() return elif issubclass(origin, Tuple): # type: ignore # origin == Tuple without item type args = get_args(self.type_) if not args: # plain tuple self.type_ = Any self.shape = SHAPE_TUPLE_ELLIPSIS elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] self.type_ = args[0] self.shape = SHAPE_TUPLE_ELLIPSIS self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')] elif args == ((),): # Tuple[()] means empty tuple self.shape = SHAPE_TUPLE self.type_ = Any self.sub_fields = [] else: self.shape = SHAPE_TUPLE self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)] return elif issubclass(origin, List): # Create self validators get_validators = getattr(self.type_, '__get_validators__', None) if get_validators: self.class_validators.update( {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} ) self.type_ = get_args(self.type_)[0] self.shape = SHAPE_LIST elif issubclass(origin, Set): # Create self validators get_validators = getattr(self.type_, '__get_validators__', None) if get_validators: self.class_validators.update( {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} ) self.type_ = get_args(self.type_)[0] self.shape = SHAPE_SET elif issubclass(origin, FrozenSet): # Create self validators get_validators = getattr(self.type_, '__get_validators__', None) if get_validators: self.class_validators.update( {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} ) self.type_ = get_args(self.type_)[0] self.shape = SHAPE_FROZENSET elif issubclass(origin, Deque): self.type_ = get_args(self.type_)[0] self.shape = SHAPE_DEQUE elif issubclass(origin, Sequence): self.type_ = get_args(self.type_)[0] self.shape = SHAPE_SEQUENCE # priority to most common mapping: dict elif origin is dict or origin is Dict: self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) self.type_ = get_args(self.type_)[1] self.shape = SHAPE_DICT elif issubclass(origin, DefaultDict): self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) self.type_ = get_args(self.type_)[1] self.shape = SHAPE_DEFAULTDICT elif issubclass(origin, Counter): self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) self.type_ = int self.shape = SHAPE_COUNTER elif issubclass(origin, Mapping): self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) self.type_ = get_args(self.type_)[1] self.shape = SHAPE_MAPPING # Equality check as almost everything inherits form Iterable, including str # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other elif origin in {Iterable, CollectionsIterable}: self.type_ = get_args(self.type_)[0] self.shape = SHAPE_ITERABLE self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')] elif issubclass(origin, Type): # type: ignore return elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed: # Is a Pydantic-compatible generic that handles itself # or we have arbitrary_types_allowed = True self.shape = SHAPE_GENERIC self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))] self.type_ = origin return else: raise TypeError(f'Fields of type "{origin}" are not supported.') # type_ has been refined eg. as the type of a List and sub_fields needs to be populated self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] def prepare_discriminated_union_sub_fields(self) -> None: """ Prepare the mapping -> and update `sub_fields` Note that this process can be aborted if a `ForwardRef` is encountered """ assert self.discriminator_key is not None if self.type_.__class__ is DeferredType: return assert self.sub_fields is not None sub_fields_mapping: Dict[str, 'ModelField'] = {} all_aliases: Set[str] = set() for sub_field in self.sub_fields: t = sub_field.type_ if t.__class__ is ForwardRef: # Stopping everything...will need to call `update_forward_refs` return alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) all_aliases.add(alias) for discriminator_value in discriminator_values: sub_fields_mapping[discriminator_value] = sub_field self.sub_fields_mapping = sub_fields_mapping self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField': if for_keys: class_validators = None else: # validators for sub items should not have `each_item` as we want to check only the first sublevel class_validators = { k: Validator( func=v.func, pre=v.pre, each_item=False, always=v.always, check_fields=v.check_fields, skip_on_failure=v.skip_on_failure, ) for k, v in self.class_validators.items() if v.each_item } field_info, _ = self._get_field_info(name, type_, None, self.model_config) return self.__class__( type_=type_, name=name, class_validators=class_validators, model_config=self.model_config, field_info=field_info, ) def populate_validators(self) -> None: """ Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ and class validators. This method should be idempotent, e.g. it should be safe to call multiple times without mis-configuring the field. """ self.validate_always = getattr(self.type_, 'validate_always', False) or any( v.always for v in self.class_validators.values() ) class_validators_ = self.class_validators.values() if not self.sub_fields or self.shape == SHAPE_GENERIC: get_validators = getattr(self.type_, '__get_validators__', None) v_funcs = ( *[v.func for v in class_validators_ if v.each_item and v.pre], *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), *[v.func for v in class_validators_ if v.each_item and not v.pre], ) self.validators = prep_validators(v_funcs) self.pre_validators = [] self.post_validators = [] if self.field_info and self.field_info.const: self.post_validators.append(make_generic_validator(constant_validator)) if class_validators_: self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) if self.parse_json: self.pre_validators.append(make_generic_validator(validate_json)) self.pre_validators = self.pre_validators or None self.post_validators = self.post_validators or None def validate( self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None ) -> 'ValidateReturn': assert self.type_.__class__ is not DeferredType if self.type_.__class__ is ForwardRef: assert cls is not None raise ConfigError( f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' f'you might need to call {cls.__name__}.update_forward_refs().' ) errors: Optional['ErrorList'] if self.pre_validators: v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) if errors: return v, errors if v is None: if is_none_type(self.type_): # keep validating pass elif self.allow_none: if self.post_validators: return self._apply_validators(v, values, loc, cls, self.post_validators) else: return None, None else: return v, ErrorWrapper(NoneIsNotAllowedError(), loc) if self.shape == SHAPE_SINGLETON: v, errors = self._validate_singleton(v, values, loc, cls) elif self.shape in MAPPING_LIKE_SHAPES: v, errors = self._validate_mapping_like(v, values, loc, cls) elif self.shape == SHAPE_TUPLE: v, errors = self._validate_tuple(v, values, loc, cls) elif self.shape == SHAPE_ITERABLE: v, errors = self._validate_iterable(v, values, loc, cls) elif self.shape == SHAPE_GENERIC: v, errors = self._apply_validators(v, values, loc, cls, self.validators) else: # sequence, list, set, generator, tuple with ellipsis, frozen set v, errors = self._validate_sequence_like(v, values, loc, cls) if not errors and self.post_validators: v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) return v, errors def _validate_sequence_like( # noqa: C901 (ignore complexity) self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': """ Validate sequence-like containers: lists, tuples, sets and generators Note that large if-else blocks are necessary to enable Cython optimization, which is why we disable the complexity check above. """ if not sequence_like(v): e: errors_.PydanticTypeError if self.shape == SHAPE_LIST: e = errors_.ListError() elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): e = errors_.TupleError() elif self.shape == SHAPE_SET: e = errors_.SetError() elif self.shape == SHAPE_FROZENSET: e = errors_.FrozenSetError() else: e = errors_.SequenceError() return v, ErrorWrapper(e, loc) loc = loc if isinstance(loc, tuple) else (loc,) result = [] errors: List[ErrorList] = [] for i, v_ in enumerate(v): v_loc = *loc, i r, ee = self._validate_singleton(v_, values, v_loc, cls) if ee: errors.append(ee) else: result.append(r) if errors: return v, errors converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result if self.shape == SHAPE_SET: converted = set(result) elif self.shape == SHAPE_FROZENSET: converted = frozenset(result) elif self.shape == SHAPE_TUPLE_ELLIPSIS: converted = tuple(result) elif self.shape == SHAPE_DEQUE: converted = deque(result, maxlen=getattr(v, 'maxlen', None)) elif self.shape == SHAPE_SEQUENCE: if isinstance(v, tuple): converted = tuple(result) elif isinstance(v, set): converted = set(result) elif isinstance(v, Generator): converted = iter(result) elif isinstance(v, deque): converted = deque(result, maxlen=getattr(v, 'maxlen', None)) return converted, None def _validate_iterable( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': """ Validate Iterables. This intentionally doesn't validate values to allow infinite generators. """ try: iterable = iter(v) except TypeError: return v, ErrorWrapper(errors_.IterableError(), loc) return iterable, None def _validate_tuple( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': e: Optional[Exception] = None if not sequence_like(v): e = errors_.TupleError() else: actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore if actual_length != expected_length: e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) if e: return v, ErrorWrapper(e, loc) loc = loc if isinstance(loc, tuple) else (loc,) result = [] errors: List[ErrorList] = [] for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore v_loc = *loc, i r, ee = field.validate(v_, values, loc=v_loc, cls=cls) if ee: errors.append(ee) else: result.append(r) if errors: return v, errors else: return tuple(result), None def _validate_mapping_like( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': try: v_iter = dict_validator(v) except TypeError as exc: return v, ErrorWrapper(exc, loc) loc = loc if isinstance(loc, tuple) else (loc,) result, errors = {}, [] for k, v_ in v_iter.items(): v_loc = *loc, '__key__' key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore if key_errors: errors.append(key_errors) continue v_loc = *loc, k value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) if value_errors: errors.append(value_errors) continue result[key_result] = value_result if errors: return v, errors elif self.shape == SHAPE_DICT: return result, None elif self.shape == SHAPE_DEFAULTDICT: return defaultdict(self.type_, result), None elif self.shape == SHAPE_COUNTER: return CollectionCounter(result), None else: return self._get_mapping_value(v, result), None def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: """ When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid coercing to `dict` unwillingly. """ original_cls = original.__class__ if original_cls == dict or original_cls == Dict: return converted elif original_cls in {defaultdict, DefaultDict}: return defaultdict(self.type_, converted) else: try: # Counter, OrderedDict, UserDict, ... return original_cls(converted) # type: ignore except TypeError: raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None def _validate_singleton( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': if self.sub_fields: if self.discriminator_key is not None: return self._validate_discriminated_union(v, values, loc, cls) errors = [] if self.model_config.smart_union and is_union(get_origin(self.type_)): # 1st pass: check if the value is an exact instance of one of the Union types # (e.g. to avoid coercing a bool into an int) for field in self.sub_fields: if v.__class__ is field.outer_type_: return v, None # 2nd pass: check if the value is an instance of any subclass of the Union types for field in self.sub_fields: # This whole logic will be improved later on to support more complex `isinstance` checks # It will probably be done once a strict mode is added and be something like: # ``` # value, error = field.validate(v, values, strict=True) # if error is None: # return value, None # ``` try: if isinstance(v, field.outer_type_): return v, None except TypeError: # compound type if lenient_isinstance(v, get_origin(field.outer_type_)): value, error = field.validate(v, values, loc=loc, cls=cls) if not error: return value, None # 1st pass by default or 3rd pass with `smart_union` enabled: # check if the value can be coerced into one of the Union types for field in self.sub_fields: value, error = field.validate(v, values, loc=loc, cls=cls) if error: errors.append(error) else: return value, None return v, errors else: return self._apply_validators(v, values, loc, cls, self.validators) def _validate_discriminated_union( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': assert self.discriminator_key is not None assert self.discriminator_alias is not None try: try: discriminator_value = v[self.discriminator_alias] except KeyError: if self.model_config.allow_population_by_field_name: discriminator_value = v[self.discriminator_key] else: raise except KeyError: return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) except TypeError: try: # BaseModel or dataclass discriminator_value = getattr(v, self.discriminator_key) except (AttributeError, TypeError): return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) if self.sub_fields_mapping is None: assert cls is not None raise ConfigError( f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' f'you might need to call {cls.__name__}.update_forward_refs().' ) try: sub_field = self.sub_fields_mapping[discriminator_value] except (KeyError, TypeError): # KeyError: `discriminator_value` is not in the dictionary. # TypeError: `discriminator_value` is unhashable. assert self.sub_fields_mapping is not None return v, ErrorWrapper( InvalidDiscriminator( discriminator_key=self.discriminator_key, discriminator_value=discriminator_value, allowed_values=list(self.sub_fields_mapping), ), loc, ) else: if not isinstance(loc, tuple): loc = (loc,) return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) def _apply_validators( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' ) -> 'ValidateReturn': for validator in validators: try: v = validator(cls, v, values, self, self.model_config) except (ValueError, TypeError, AssertionError) as exc: return v, ErrorWrapper(exc, loc) return v, None def is_complex(self) -> bool: """ Whether the field is "complex" eg. env variables should be parsed as JSON. """ from .main import BaseModel return ( self.shape != SHAPE_SINGLETON or hasattr(self.type_, '__pydantic_model__') or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) ) def _type_display(self) -> PyObjectStr: t = display_as_type(self.type_) if self.shape in MAPPING_LIKE_SHAPES: t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore elif self.shape == SHAPE_TUPLE: t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore elif self.shape == SHAPE_GENERIC: assert self.sub_fields t = '{}[{}]'.format( display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields) ) elif self.shape != SHAPE_SINGLETON: t = SHAPE_NAME_LOOKUP[self.shape].format(t) if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): t = f'Optional[{t}]' return PyObjectStr(t) def __repr_args__(self) -> 'ReprArgs': args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] if not self.required: if self.default_factory is not None: args.append(('default_factory', f'')) else: args.append(('default', self.default)) if self.alt_alias: args.append(('alias', self.alias)) return args class ModelPrivateAttr(Representation): __slots__ = ('default', 'default_factory') def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: self.default = default self.default_factory = default_factory def get_default(self) -> Any: return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( other.default, other.default_factory, ) def PrivateAttr( default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None, ) -> Any: """ Indicates that attribute is only used internally and never mixed with regular fields. Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. Private attrs are stored in model __slots__. :param default: the attribute’s default value :param default_factory: callable that will be called when a default value is needed for this attribute If both `default` and `default_factory` are set, an error is raised. """ if default is not Undefined and default_factory is not None: raise ValueError('cannot specify both default and default_factory') return ModelPrivateAttr( default, default_factory=default_factory, ) class DeferredType: """ Used to postpone field preparation, while creating recursive generic models. """ def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) pydantic-1.10.14/pydantic/generics.py000066400000000000000000000426151455251250200174660ustar00rootroot00000000000000import sys import types import typing from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, ForwardRef, Generic, Iterator, List, Mapping, Optional, Tuple, Type, TypeVar, Union, cast, ) from weakref import WeakKeyDictionary, WeakValueDictionary from typing_extensions import Annotated, Literal as ExtLiteral from .class_validators import gather_all_validators from .fields import DeferredType from .main import BaseModel, create_model from .types import JsonWrapper from .typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base from .utils import all_identical, lenient_issubclass if sys.version_info >= (3, 10): from typing import _UnionGenericAlias if sys.version_info >= (3, 8): from typing import Literal GenericModelT = TypeVar('GenericModelT', bound='GenericModel') TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type CacheKey = Tuple[Type[Any], Any, Tuple[Any, ...]] Parametrization = Mapping[TypeVarType, Type[Any]] # weak dictionaries allow the dynamically created parametrized versions of generic models to get collected # once they are no longer referenced by the caller. if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9 GenericTypesCache = WeakValueDictionary[CacheKey, Type[BaseModel]] AssignedParameters = WeakKeyDictionary[Type[BaseModel], Parametrization] else: GenericTypesCache = WeakValueDictionary AssignedParameters = WeakKeyDictionary # _generic_types_cache is a Mapping from __class_getitem__ arguments to the parametrized version of generic models. # This ensures multiple calls of e.g. A[B] return always the same class. _generic_types_cache = GenericTypesCache() # _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations # as captured during construction of the class (not instances). # E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created, # `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`. # (This information is only otherwise available after creation from the class name string). _assigned_parameters = AssignedParameters() class GenericModel(BaseModel): __slots__ = () __concrete__: ClassVar[bool] = False if TYPE_CHECKING: # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below. __parameters__: ClassVar[Tuple[TypeVarType, ...]] # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: """Instantiates a new class from a generic class `cls` and type variables `params`. :param params: Tuple of types the class . Given a generic class `Model` with 2 type variables and a concrete model `Model[str, int]`, the value `(str, int)` would be passed to `params`. :return: New model class inheriting from `cls` with instantiated types described by `params`. If no parameters are given, `cls` is returned as is. """ def _cache_key(_params: Any) -> CacheKey: args = get_args(_params) # python returns a list for Callables, which is not hashable if len(args) == 2 and isinstance(args[0], list): args = (tuple(args[0]), args[1]) return cls, _params, args cached = _generic_types_cache.get(_cache_key(params)) if cached is not None: return cached if cls.__concrete__ and Generic not in cls.__bases__: raise TypeError('Cannot parameterize a concrete instantiation of a generic model') if not isinstance(params, tuple): params = (params,) if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel') if not hasattr(cls, '__parameters__'): raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') check_parameters_count(cls, params) # Build map from generic typevars to passed params typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params)) if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: return cls # if arguments are equal to parameters it's the same object # Create new model with original model as parent inserting fields with DeferredType. model_name = cls.__concrete_name__(params) validators = gather_all_validators(cls) type_hints = get_all_type_hints(cls).items() instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar} fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__} model_module, called_globally = get_caller_frame_info() created_model = cast( Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes create_model( model_name, __module__=model_module or cls.__module__, __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)), __config__=None, __validators__=validators, __cls_kwargs__=None, **fields, ), ) _assigned_parameters[created_model] = typevars_map if called_globally: # create global reference and therefore allow pickling object_by_reference = None reference_name = model_name reference_module_globals = sys.modules[created_model.__module__].__dict__ while object_by_reference is not created_model: object_by_reference = reference_module_globals.setdefault(reference_name, created_model) reference_name += '_' created_model.Config = cls.Config # Find any typevars that are still present in the model. # If none are left, the model is fully "concrete", otherwise the new # class is a generic class as well taking the found typevars as # parameters. new_params = tuple( {param: None for param in iter_contained_typevars(typevars_map.values())} ) # use dict as ordered set created_model.__concrete__ = not new_params if new_params: created_model.__parameters__ = new_params # Save created model in cache so we don't end up creating duplicate # models that should be identical. _generic_types_cache[_cache_key(params)] = created_model if len(params) == 1: _generic_types_cache[_cache_key(params[0])] = created_model # Recursively walk class type hints and replace generic typevars # with concrete types that were passed. _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map) return created_model @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: """Compute class name for child classes. :param params: Tuple of types the class . Given a generic class `Model` with 2 type variables and a concrete model `Model[str, int]`, the value `(str, int)` would be passed to `params`. :return: String representing a the new class where `params` are passed to `cls` as type variables. This method can be overridden to achieve a custom naming scheme for GenericModels. """ param_names = [display_as_type(param) for param in params] params_component = ', '.join(param_names) return f'{cls.__name__}[{params_component}]' @classmethod def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]: """ Returns unbound bases of cls parameterised to given type variables :param typevars_map: Dictionary of type applications for binding subclasses. Given a generic class `Model` with 2 type variables [S, T] and a concrete model `Model[str, int]`, the value `{S: str, T: int}` would be passed to `typevars_map`. :return: an iterator of generic sub classes, parameterised by `typevars_map` and other assigned parameters of `cls` e.g.: ``` class A(GenericModel, Generic[T]): ... class B(A[V], Generic[V]): ... assert A[int] in B.__parameterized_bases__({V: int}) ``` """ def build_base_model( base_model: Type[GenericModel], mapped_types: Parametrization ) -> Iterator[Type[GenericModel]]: base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__) parameterized_base = base_model.__class_getitem__(base_parameters) if parameterized_base is base_model or parameterized_base is cls: # Avoid duplication in MRO return yield parameterized_base for base_model in cls.__bases__: if not issubclass(base_model, GenericModel): # not a class that can be meaningfully parameterized continue elif not getattr(base_model, '__parameters__', None): # base_model is "GenericModel" (and has no __parameters__) # or # base_model is already concrete, and will be included transitively via cls. continue elif cls in _assigned_parameters: if base_model in _assigned_parameters: # cls is partially parameterised but not from base_model # e.g. cls = B[S], base_model = A[S] # B[S][int] should subclass A[int], (and will be transitively via B[int]) # but it's not viable to consistently subclass types with arbitrary construction # So don't attempt to include A[S][int] continue else: # base_model not in _assigned_parameters: # cls is partially parameterized, base_model is original generic # e.g. cls = B[str, T], base_model = B[S, T] # Need to determine the mapping for the base_model parameters mapped_types: Parametrization = { key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items() } yield from build_base_model(base_model, mapped_types) else: # cls is base generic, so base_class has a distinct base # can construct the Parameterised base model using typevars_map directly yield from build_base_model(base_model, typevars_map) def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any: """Return type with all occurrences of `type_map` keys recursively replaced with their values. :param type_: Any type, class or generic alias :param type_map: Mapping from `TypeVar` instance to concrete types. :return: New type representing the basic structure of `type_` with all `typevar_map` keys recursively replaced. >>> replace_types(Tuple[str, Union[List[str], float]], {str: int}) Tuple[int, Union[List[int], float]] """ if not type_map: return type_ type_args = get_args(type_) origin_type = get_origin(type_) if origin_type is Annotated: annotated_type, *annotations = type_args return Annotated[replace_types(annotated_type, type_map), tuple(annotations)] if (origin_type is ExtLiteral) or (sys.version_info >= (3, 8) and origin_type is Literal): return type_map.get(type_, type_) # Having type args is a good indicator that this is a typing module # class instantiation or a generic alias of some sort. if type_args: resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) if all_identical(type_args, resolved_type_args): # If all arguments are the same, there is no need to modify the # type or create a new object at all return type_ if ( origin_type is not None and isinstance(type_, typing_base) and not isinstance(origin_type, typing_base) and getattr(type_, '_name', None) is not None ): # In python < 3.9 generic aliases don't exist so any of these like `list`, # `type` or `collections.abc.Callable` need to be translated. # See: https://www.python.org/dev/peps/pep-0585 origin_type = getattr(typing, type_._name) assert origin_type is not None # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__. # We also cannot use isinstance() since we have to compare types. if sys.version_info >= (3, 10) and origin_type is types.UnionType: # noqa: E721 return _UnionGenericAlias(origin_type, resolved_type_args) return origin_type[resolved_type_args] # We handle pydantic generic models separately as they don't have the same # semantics as "typing" classes or generic aliases if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__: type_args = type_.__parameters__ resolved_type_args = tuple(replace_types(t, type_map) for t in type_args) if all_identical(type_args, resolved_type_args): return type_ return type_[resolved_type_args] # Handle special case for typehints that can have lists as arguments. # `typing.Callable[[int, str], int]` is an example for this. if isinstance(type_, (List, list)): resolved_list = list(replace_types(element, type_map) for element in type_) if all_identical(type_, resolved_list): return type_ return resolved_list # For JsonWrapperValue, need to handle its inner type to allow correct parsing # of generic Json arguments like Json[T] if not origin_type and lenient_issubclass(type_, JsonWrapper): type_.inner_type = replace_types(type_.inner_type, type_map) return type_ # If all else fails, we try to resolve the type directly and otherwise just # return the input with no modifications. new_type = type_map.get(type_, type_) # Convert string to ForwardRef if isinstance(new_type, str): return ForwardRef(new_type) else: return new_type def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: actual = len(parameters) expected = len(cls.__parameters__) if actual != expected: description = 'many' if actual > expected else 'few' raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') DictValues: Type[Any] = {}.values().__class__ def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.""" if isinstance(v, TypeVar): yield v elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel): yield from v.__parameters__ elif isinstance(v, (DictValues, list)): for var in v: yield from iter_contained_typevars(var) else: args = get_args(v) for arg in args: yield from iter_contained_typevars(arg) def get_caller_frame_info() -> Tuple[Optional[str], bool]: """ Used inside a function to check whether it was called globally Will only work against non-compiled code, therefore used only in pydantic.generics :returns Tuple[module_name, called_globally] """ try: previous_caller_frame = sys._getframe(2) except ValueError as e: raise RuntimeError('This function must be used inside another function') from e except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it return None, False frame_globals = previous_caller_frame.f_globals return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals def _prepare_model_fields( created_model: Type[GenericModel], fields: Mapping[str, Any], instance_type_hints: Mapping[str, type], typevars_map: Mapping[Any, type], ) -> None: """ Replace DeferredType fields with concrete type hints and prepare them. """ for key, field in created_model.__fields__.items(): if key not in fields: assert field.type_.__class__ is not DeferredType # https://github.com/nedbat/coveragepy/issues/198 continue # pragma: no cover assert field.type_.__class__ is DeferredType, field.type_.__class__ field_type_hint = instance_type_hints[key] concrete_type = replace_types(field_type_hint, typevars_map) field.type_ = concrete_type field.outer_type_ = concrete_type field.prepare() created_model.__annotations__[key] = concrete_type pydantic-1.10.14/pydantic/json.py000066400000000000000000000064221455251250200166340ustar00rootroot00000000000000import datetime from collections import deque from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from re import Pattern from types import GeneratorType from typing import Any, Callable, Dict, Type, Union from uuid import UUID from .color import Color from .networks import NameEmail from .types import SecretBytes, SecretStr __all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' def isoformat(o: Union[datetime.date, datetime.time]) -> str: return o.isoformat() def decimal_encoder(dec_value: Decimal) -> Union[int, float]: """ Encodes a Decimal as int of there's no exponent, otherwise float This is useful when we use ConstrainedDecimal to represent Numeric(x,0) where a integer (but not int typed) is used. Encoding this as a float results in failed round-tripping between encode and parse. Our Id type is a prime example of this. >>> decimal_encoder(Decimal("1.0")) 1.0 >>> decimal_encoder(Decimal("1")) 1 """ if dec_value.as_tuple().exponent >= 0: return int(dec_value) else: return float(dec_value) ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { bytes: lambda o: o.decode(), Color: str, datetime.date: isoformat, datetime.datetime: isoformat, datetime.time: isoformat, datetime.timedelta: lambda td: td.total_seconds(), Decimal: decimal_encoder, Enum: lambda o: o.value, frozenset: list, deque: list, GeneratorType: list, IPv4Address: str, IPv4Interface: str, IPv4Network: str, IPv6Address: str, IPv6Interface: str, IPv6Network: str, NameEmail: str, Path: str, Pattern: lambda o: o.pattern, SecretBytes: str, SecretStr: str, set: list, UUID: str, } def pydantic_encoder(obj: Any) -> Any: from dataclasses import asdict, is_dataclass from .main import BaseModel if isinstance(obj, BaseModel): return obj.dict() elif is_dataclass(obj): return asdict(obj) # Check the class type and its superclasses for a matching encoder for base in obj.__class__.__mro__[:-1]: try: encoder = ENCODERS_BY_TYPE[base] except KeyError: continue return encoder(obj) else: # We have exited the for loop without finding a suitable encoder raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: # Check the class type and its superclasses for a matching encoder for base in obj.__class__.__mro__[:-1]: try: encoder = type_encoders[base] except KeyError: continue return encoder(obj) else: # We have exited the for loop without finding a suitable encoder return pydantic_encoder(obj) def timedelta_isoformat(td: datetime.timedelta) -> str: """ ISO 8601 encoding for Python timedelta object. """ minutes, seconds = divmod(td.seconds, 60) hours, minutes = divmod(minutes, 60) return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' pydantic-1.10.14/pydantic/main.py000066400000000000000000001265301455251250200166120ustar00rootroot00000000000000import warnings from abc import ABCMeta from copy import deepcopy from enum import Enum from functools import partial from pathlib import Path from types import FunctionType, prepare_class, resolve_bases from typing import ( TYPE_CHECKING, AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, cast, no_type_check, overload, ) from typing_extensions import dataclass_transform from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators from .config import BaseConfig, Extra, inherit_config, prepare_config from .error_wrappers import ErrorWrapper, ValidationError from .errors import ConfigError, DictError, ExtraError, MissingError from .fields import ( MAPPING_LIKE_SHAPES, Field, ModelField, ModelPrivateAttr, PrivateAttr, Undefined, is_finalvar_with_default_val, ) from .json import custom_pydantic_encoder, pydantic_encoder from .parse import Protocol, load_file, load_str_bytes from .schema import default_ref_template, model_schema from .types import PyObject, StrBytes from .typing import ( AnyCallable, get_args, get_origin, is_classvar, is_namedtuple, is_union, resolve_annotations, update_model_forward_refs, ) from .utils import ( DUNDER_ATTRIBUTES, ROOT_KEY, ClassAttribute, GetterDict, Representation, ValueItems, generate_model_signature, is_valid_field, is_valid_private_name, lenient_issubclass, sequence_like, smart_deepcopy, unique_list, validate_field_name, ) if TYPE_CHECKING: from inspect import Signature from .class_validators import ValidatorListDict from .types import ModelOrDc from .typing import ( AbstractSetIntStr, AnyClassMethod, CallableGenerator, DictAny, DictStrAny, MappingIntStrAny, ReprArgs, SetStr, TupleGenerator, ) Model = TypeVar('Model', bound='BaseModel') __all__ = 'BaseModel', 'create_model', 'validate_model' _T = TypeVar('_T') def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: if len(fields) > 1: raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields') def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: def hash_function(self_: Any) -> int: return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) return hash_function if frozen else None # If a field is of type `Callable`, its default value should be a function and cannot to ignored. ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) # When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES # Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra # (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's # safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for # the `BaseModel` class, since that's defined immediately after the metaclass. _is_base_model_class_defined = False @dataclass_transform(kw_only_default=True, field_specifiers=(Field,)) class ModelMetaclass(ABCMeta): @no_type_check # noqa C901 def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 fields: Dict[str, ModelField] = {} config = BaseConfig validators: 'ValidatorListDict' = {} pre_root_validators, post_root_validators = [], [] private_attributes: Dict[str, ModelPrivateAttr] = {} base_private_attributes: Dict[str, ModelPrivateAttr] = {} slots: SetStr = namespace.get('__slots__', ()) slots = {slots} if isinstance(slots, str) else set(slots) class_vars: SetStr = set() hash_func: Optional[Callable[[Any], int]] = None for base in reversed(bases): if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: fields.update(smart_deepcopy(base.__fields__)) config = inherit_config(base.__config__, config) validators = inherit_validators(base.__validators__, validators) pre_root_validators += base.__pre_root_validators__ post_root_validators += base.__post_root_validators__ base_private_attributes.update(base.__private_attributes__) class_vars.update(base.__class_vars__) hash_func = base.__hash__ resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True) allowed_config_kwargs: SetStr = { key for key in dir(config) if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes } config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} config_from_namespace = namespace.get('Config') if config_kwargs and config_from_namespace: raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs') config = inherit_config(config_from_namespace, config, **config_kwargs) validators = inherit_validators(extract_validators(namespace), validators) vg = ValidatorGroup(validators) for f in fields.values(): f.set_config(config) extra_validators = vg.get_validators(f.name) if extra_validators: f.class_validators.update(extra_validators) # re-run prepare to add extra validators f.populate_validators() prepare_config(config, name) untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES def is_untouched(v: Any) -> bool: return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method' if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None)) # annotation only fields need to come first in fields for ann_name, ann_type in annotations.items(): if is_classvar(ann_type): class_vars.add(ann_name) elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): class_vars.add(ann_name) elif is_valid_field(ann_name): validate_field_name(bases, ann_name) value = namespace.get(ann_name, Undefined) allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) if ( is_untouched(value) and ann_type != PyObject and not any( lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types ) ): continue fields[ann_name] = ModelField.infer( name=ann_name, value=value, annotation=ann_type, class_validators=vg.get_validators(ann_name), config=config, ) elif ann_name not in namespace and config.underscore_attrs_are_private: private_attributes[ann_name] = PrivateAttr() untouched_types = UNTOUCHED_TYPES + config.keep_untouched for var_name, value in namespace.items(): can_be_changed = var_name not in class_vars and not is_untouched(value) if isinstance(value, ModelPrivateAttr): if not is_valid_private_name(var_name): raise NameError( f'Private attributes "{var_name}" must not be a valid field name; ' f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' ) private_attributes[var_name] = value elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: private_attributes[var_name] = PrivateAttr(default=value) elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: validate_field_name(bases, var_name) inferred = ModelField.infer( name=var_name, value=value, annotation=annotations.get(var_name, Undefined), class_validators=vg.get_validators(var_name), config=config, ) if var_name in fields: if lenient_issubclass(inferred.type_, fields[var_name].type_): inferred.type_ = fields[var_name].type_ else: raise TypeError( f'The type of {name}.{var_name} differs from the new default value; ' f'if you wish to change the type of this field, please use a type annotation' ) fields[var_name] = inferred _custom_root_type = ROOT_KEY in fields if _custom_root_type: validate_custom_root_type(fields) vg.check_for_unused() if config.json_encoders: json_encoder = partial(custom_pydantic_encoder, config.json_encoders) else: json_encoder = pydantic_encoder pre_rv_new, post_rv_new = extract_root_validators(namespace) if hash_func is None: hash_func = generate_hash_function(config.frozen) exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'} new_namespace = { '__config__': config, '__fields__': fields, '__exclude_fields__': { name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None } or None, '__include_fields__': { name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None } or None, '__validators__': vg.validators, '__pre_root_validators__': unique_list( pre_root_validators + pre_rv_new, name_factory=lambda v: v.__name__, ), '__post_root_validators__': unique_list( post_root_validators + post_rv_new, name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, ), '__schema_cache__': {}, '__json_encoder__': staticmethod(json_encoder), '__custom_root_type__': _custom_root_type, '__private_attributes__': {**base_private_attributes, **private_attributes}, '__slots__': slots | private_attributes.keys(), '__hash__': hash_func, '__class_vars__': class_vars, **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, } cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) # set __signature__ attr only for model class, but not for its instances cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config)) if resolve_forward_refs: cls.__try_update_forward_refs__() # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 # for attributes not in `new_namespace` (e.g. private attributes) for name, obj in namespace.items(): if name not in new_namespace: set_name = getattr(obj, '__set_name__', None) if callable(set_name): set_name(cls, name) return cls def __instancecheck__(self, instance: Any) -> bool: """ Avoid calling ABC _abc_subclasscheck unless we're pretty sure. See #3829 and python/cpython#92810 """ return hasattr(instance, '__fields__') and super().__instancecheck__(instance) object_setattr = object.__setattr__ class BaseModel(Representation, metaclass=ModelMetaclass): if TYPE_CHECKING: # populated by the metaclass, defined here to help IDEs only __fields__: ClassVar[Dict[str, ModelField]] = {} __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None __validators__: ClassVar[Dict[str, AnyCallable]] = {} __pre_root_validators__: ClassVar[List[AnyCallable]] __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] __config__: ClassVar[Type[BaseConfig]] = BaseConfig __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x __schema_cache__: ClassVar['DictAny'] = {} __custom_root_type__: ClassVar[bool] = False __signature__: ClassVar['Signature'] __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] __class_vars__: ClassVar[SetStr] __fields_set__: ClassVar[SetStr] = set() Config = BaseConfig __slots__ = ('__dict__', '__fields_set__') __doc__ = '' # Null out the Representation docstring def __init__(__pydantic_self__, **data: Any) -> None: """ Create a new model by parsing and validating input data from keyword arguments. Raises ValidationError if the input data cannot be parsed to form a valid model. """ # Uses something other than `self` the first arg to allow "self" as a settable attribute values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) if validation_error: raise validation_error try: object_setattr(__pydantic_self__, '__dict__', values) except TypeError as e: raise TypeError( 'Model values must be a dict; you may not have returned a dictionary from a root validator' ) from e object_setattr(__pydantic_self__, '__fields_set__', fields_set) __pydantic_self__._init_private_attributes() @no_type_check def __setattr__(self, name, value): # noqa: C901 (ignore complexity) if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: return object_setattr(self, name, value) if self.__config__.extra is not Extra.allow and name not in self.__fields__: raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') elif not self.__config__.allow_mutation or self.__config__.frozen: raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') elif name in self.__fields__ and self.__fields__[name].final: raise TypeError( f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' ) elif self.__config__.validate_assignment: new_values = {**self.__dict__, name: value} for validator in self.__pre_root_validators__: try: new_values = validator(self.__class__, new_values) except (ValueError, TypeError, AssertionError) as exc: raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) known_field = self.__fields__.get(name, None) if known_field: # We want to # - make sure validators are called without the current value for this field inside `values` # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) # - keep the order of the fields if not known_field.field_info.allow_mutation: raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) if error_: raise ValidationError([error_], self.__class__) else: new_values[name] = value errors = [] for skip_on_failure, validator in self.__post_root_validators__: if skip_on_failure and errors: continue try: new_values = validator(self.__class__, new_values) except (ValueError, TypeError, AssertionError) as exc: errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) if errors: raise ValidationError(errors, self.__class__) # update the whole __dict__ as other values than just `value` # may be changed (e.g. with `root_validator`) object_setattr(self, '__dict__', new_values) else: self.__dict__[name] = value self.__fields_set__.add(name) def __getstate__(self) -> 'DictAny': private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) return { '__dict__': self.__dict__, '__fields_set__': self.__fields_set__, '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, } def __setstate__(self, state: 'DictAny') -> None: object_setattr(self, '__dict__', state['__dict__']) object_setattr(self, '__fields_set__', state['__fields_set__']) for name, value in state.get('__private_attribute_values__', {}).items(): object_setattr(self, name, value) def _init_private_attributes(self) -> None: for name, private_attr in self.__private_attributes__.items(): default = private_attr.get_default() if default is not Undefined: object_setattr(self, name, default) def dict( self, *, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, ) -> 'DictStrAny': """ Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. """ if skip_defaults is not None: warnings.warn( f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', DeprecationWarning, ) exclude_unset = skip_defaults return dict( self._iter( to_dict=True, by_alias=by_alias, include=include, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) ) def json( self, *, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, encoder: Optional[Callable[[Any], Any]] = None, models_as_dict: bool = True, **dumps_kwargs: Any, ) -> str: """ Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. """ if skip_defaults is not None: warnings.warn( f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', DeprecationWarning, ) exclude_unset = skip_defaults encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` # because we want to be able to keep raw `BaseModel` instances and not as `dict`. # This allows users to write custom JSON encoders for given `BaseModel` classes. data = dict( self._iter( to_dict=models_as_dict, by_alias=by_alias, include=include, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) ) if self.__custom_root_type__: data = data[ROOT_KEY] return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) @classmethod def _enforce_dict_if_root(cls, obj: Any) -> Any: if cls.__custom_root_type__ and ( not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY}) or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES ): return {ROOT_KEY: obj} else: return obj @classmethod def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': obj = cls._enforce_dict_if_root(obj) if not isinstance(obj, dict): try: obj = dict(obj) except (TypeError, ValueError) as e: exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}') raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e return cls(**obj) @classmethod def parse_raw( cls: Type['Model'], b: StrBytes, *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, ) -> 'Model': try: obj = load_str_bytes( b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=cls.__config__.json_loads, ) except (ValueError, TypeError, UnicodeDecodeError) as e: raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) return cls.parse_obj(obj) @classmethod def parse_file( cls: Type['Model'], path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, ) -> 'Model': obj = load_file( path, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=cls.__config__.json_loads, ) return cls.parse_obj(obj) @classmethod def from_orm(cls: Type['Model'], obj: Any) -> 'Model': if not cls.__config__.orm_mode: raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) m = cls.__new__(cls) values, fields_set, validation_error = validate_model(cls, obj) if validation_error: raise validation_error object_setattr(m, '__dict__', values) object_setattr(m, '__fields_set__', fields_set) m._init_private_attributes() return m @classmethod def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': """ Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. Behaves as if `Config.extra = 'allow'` was set since it adds all passed values """ m = cls.__new__(cls) fields_values: Dict[str, Any] = {} for name, field in cls.__fields__.items(): if field.alt_alias and field.alias in values: fields_values[name] = values[field.alias] elif name in values: fields_values[name] = values[name] elif not field.required: fields_values[name] = field.get_default() fields_values.update(values) object_setattr(m, '__dict__', fields_values) if _fields_set is None: _fields_set = set(values.keys()) object_setattr(m, '__fields_set__', _fields_set) m._init_private_attributes() return m def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model': if deep: # chances of having empty dict here are quite low for using smart_deepcopy values = deepcopy(values) cls = self.__class__ m = cls.__new__(cls) object_setattr(m, '__dict__', values) object_setattr(m, '__fields_set__', fields_set) for name in self.__private_attributes__: value = getattr(self, name, Undefined) if value is not Undefined: if deep: value = deepcopy(value) object_setattr(m, name, value) return m def copy( self: 'Model', *, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, update: Optional['DictStrAny'] = None, deep: bool = False, ) -> 'Model': """ Duplicate a model, optionally choose which fields to include, exclude and change. :param include: fields to include in new model :param exclude: fields to exclude from new model, as with values this takes precedence over include :param update: values to change/add in the new model. Note: the data is not validated before creating the new model: you should trust this data :param deep: set to `True` to make a deep copy of the model :return: new model instance """ values = dict( self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), **(update or {}), ) # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg if update: fields_set = self.__fields_set__ | update.keys() else: fields_set = set(self.__fields_set__) return self._copy_and_set_values(values, fields_set, deep=deep) @classmethod def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny': cached = cls.__schema_cache__.get((by_alias, ref_template)) if cached is not None: return cached s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) cls.__schema_cache__[(by_alias, ref_template)] = s return s @classmethod def schema_json( cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any ) -> str: from .json import pydantic_encoder return cls.__config__.json_dumps( cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls: Type['Model'], value: Any) -> 'Model': if isinstance(value, cls): copy_on_model_validation = cls.__config__.copy_on_model_validation # whether to deep or shallow copy the model on validation, None means do not copy deep_copy: Optional[bool] = None if copy_on_model_validation not in {'deep', 'shallow', 'none'}: # Warn about deprecated behavior warnings.warn( "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning ) if copy_on_model_validation: deep_copy = False if copy_on_model_validation == 'shallow': # shallow copy deep_copy = False elif copy_on_model_validation == 'deep': # deep copy deep_copy = True if deep_copy is None: return value else: return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) value = cls._enforce_dict_if_root(value) if isinstance(value, dict): return cls(**value) elif cls.__config__.orm_mode: return cls.from_orm(value) else: try: value_as_dict = dict(value) except (TypeError, ValueError) as e: raise DictError() from e return cls(**value_as_dict) @classmethod def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: if isinstance(obj, GetterDict): return obj return cls.__config__.getter_dict(obj) @classmethod @no_type_check def _get_value( cls, v: Any, to_dict: bool, by_alias: bool, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], exclude_unset: bool, exclude_defaults: bool, exclude_none: bool, ) -> Any: if isinstance(v, BaseModel): if to_dict: v_dict = v.dict( by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=include, exclude=exclude, exclude_none=exclude_none, ) if ROOT_KEY in v_dict: return v_dict[ROOT_KEY] return v_dict else: return v.copy(include=include, exclude=exclude) value_exclude = ValueItems(v, exclude) if exclude else None value_include = ValueItems(v, include) if include else None if isinstance(v, dict): return { k_: cls._get_value( v_, to_dict=to_dict, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=value_include and value_include.for_element(k_), exclude=value_exclude and value_exclude.for_element(k_), exclude_none=exclude_none, ) for k_, v_ in v.items() if (not value_exclude or not value_exclude.is_excluded(k_)) and (not value_include or value_include.is_included(k_)) } elif sequence_like(v): seq_args = ( cls._get_value( v_, to_dict=to_dict, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=value_include and value_include.for_element(i), exclude=value_exclude and value_exclude.for_element(i), exclude_none=exclude_none, ) for i, v_ in enumerate(v) if (not value_exclude or not value_exclude.is_excluded(i)) and (not value_include or value_include.is_included(i)) ) return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False): return v.value else: return v @classmethod def __try_update_forward_refs__(cls, **localns: Any) -> None: """ Same as update_forward_refs but will not raise exception when forward references are not defined. """ update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) @classmethod def update_forward_refs(cls, **localns: Any) -> None: """ Try to update ForwardRefs on fields based on this Model, globalns and localns. """ update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) def __iter__(self) -> 'TupleGenerator': """ so `dict(model)` works """ yield from self.__dict__.items() def _iter( self, to_dict: bool = False, by_alias: bool = False, include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, ) -> 'TupleGenerator': # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. if exclude is not None or self.__exclude_fields__ is not None: exclude = ValueItems.merge(self.__exclude_fields__, exclude) if include is not None or self.__include_fields__ is not None: include = ValueItems.merge(self.__include_fields__, include, intersect=True) allowed_keys = self._calculate_keys( include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore ) if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): # huge boost for plain _iter() yield from self.__dict__.items() return value_exclude = ValueItems(self, exclude) if exclude is not None else None value_include = ValueItems(self, include) if include is not None else None for field_key, v in self.__dict__.items(): if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): continue if exclude_defaults: model_field = self.__fields__.get(field_key) if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v: continue if by_alias and field_key in self.__fields__: dict_key = self.__fields__[field_key].alias else: dict_key = field_key if to_dict or value_include or value_exclude: v = self._get_value( v, to_dict=to_dict, by_alias=by_alias, include=value_include and value_include.for_element(field_key), exclude=value_exclude and value_exclude.for_element(field_key), exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) yield dict_key, v def _calculate_keys( self, include: Optional['MappingIntStrAny'], exclude: Optional['MappingIntStrAny'], exclude_unset: bool, update: Optional['DictStrAny'] = None, ) -> Optional[AbstractSet[str]]: if include is None and exclude is None and exclude_unset is False: return None keys: AbstractSet[str] if exclude_unset: keys = self.__fields_set__.copy() else: keys = self.__dict__.keys() if include is not None: keys &= include.keys() if update: keys -= update.keys() if exclude: keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} return keys def __eq__(self, other: Any) -> bool: if isinstance(other, BaseModel): return self.dict() == other.dict() else: return self.dict() == other def __repr_args__(self) -> 'ReprArgs': return [ (k, v) for k, v in self.__dict__.items() if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) ] _is_base_model_class_defined = True @overload def create_model( __model_name: str, *, __config__: Optional[Type[BaseConfig]] = None, __base__: None = None, __module__: str = __name__, __validators__: Dict[str, 'AnyClassMethod'] = None, __cls_kwargs__: Dict[str, Any] = None, **field_definitions: Any, ) -> Type['BaseModel']: ... @overload def create_model( __model_name: str, *, __config__: Optional[Type[BaseConfig]] = None, __base__: Union[Type['Model'], Tuple[Type['Model'], ...]], __module__: str = __name__, __validators__: Dict[str, 'AnyClassMethod'] = None, __cls_kwargs__: Dict[str, Any] = None, **field_definitions: Any, ) -> Type['Model']: ... def create_model( __model_name: str, *, __config__: Optional[Type[BaseConfig]] = None, __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None, __module__: str = __name__, __validators__: Dict[str, 'AnyClassMethod'] = None, __cls_kwargs__: Dict[str, Any] = None, __slots__: Optional[Tuple[str, ...]] = None, **field_definitions: Any, ) -> Type['Model']: """ Dynamically create a model. :param __model_name: name of the created model :param __config__: config class to use for the new model :param __base__: base class for the new model to inherit from :param __module__: module of the created model :param __validators__: a dict of method names and @validator class methods :param __cls_kwargs__: a dict for class creation :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` :param field_definitions: fields of the model (or extra fields if a base is supplied) in the format `=(, )` or `=, e.g. `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format `=` or `=(, )`, e.g. `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or `foo=(str, FieldInfo(title='Foo'))` """ if __slots__ is not None: # __slots__ will be ignored from here on warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) if __base__ is not None: if __config__ is not None: raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') if not isinstance(__base__, tuple): __base__ = (__base__,) else: __base__ = (cast(Type['Model'], BaseModel),) __cls_kwargs__ = __cls_kwargs__ or {} fields = {} annotations = {} for f_name, f_def in field_definitions.items(): if not is_valid_field(f_name): warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) if isinstance(f_def, tuple): try: f_annotation, f_value = f_def except ValueError as e: raise ConfigError( 'field definitions should either be a tuple of (, ) or just a ' 'default value, unfortunately this means tuples as ' 'default values are not allowed' ) from e else: f_annotation, f_value = None, f_def if f_annotation: annotations[f_name] = f_annotation fields[f_name] = f_value namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} if __validators__: namespace.update(__validators__) namespace.update(fields) if __config__: namespace['Config'] = inherit_config(__config__, BaseConfig) resolved_bases = resolve_bases(__base__) meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) if resolved_bases is not __base__: ns['__orig_bases__'] = __base__ namespace.update(ns) return meta(__model_name, resolved_bases, namespace, **kwds) _missing = object() def validate_model( # noqa: C901 (ignore complexity) model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None ) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: """ validate data against a model. """ values = {} errors = [] # input_data names, possibly alias names_used = set() # field names, never aliases fields_set = set() config = model.__config__ check_extra = config.extra is not Extra.ignore cls_ = cls or model for validator in model.__pre_root_validators__: try: input_data = validator(cls_, input_data) except (ValueError, TypeError, AssertionError) as exc: return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) for name, field in model.__fields__.items(): value = input_data.get(field.alias, _missing) using_name = False if value is _missing and config.allow_population_by_field_name and field.alt_alias: value = input_data.get(field.name, _missing) using_name = True if value is _missing: if field.required: errors.append(ErrorWrapper(MissingError(), loc=field.alias)) continue value = field.get_default() if not config.validate_all and not field.validate_always: values[name] = value continue else: fields_set.add(name) if check_extra: names_used.add(field.name if using_name else field.alias) v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[name] = v_ if check_extra: if isinstance(input_data, GetterDict): extra = input_data.extra_keys() - names_used else: extra = input_data.keys() - names_used if extra: fields_set |= extra if config.extra is Extra.allow: for f in extra: values[f] = input_data[f] else: for f in sorted(extra): errors.append(ErrorWrapper(ExtraError(), loc=f)) for skip_on_failure, validator in model.__post_root_validators__: if skip_on_failure and errors: continue try: values = validator(cls_, values) except (ValueError, TypeError, AssertionError) as exc: errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) if errors: return values, fields_set, ValidationError(errors, cls_) else: return values, fields_set, None pydantic-1.10.14/pydantic/mypy.py000066400000000000000000001135311455251250200166610ustar00rootroot00000000000000import sys from configparser import ConfigParser from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union from mypy.errorcodes import ErrorCode from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR2, MDEF, Argument, AssignmentStmt, Block, CallExpr, ClassDef, Context, Decorator, EllipsisExpr, FuncBase, FuncDef, JsonDict, MemberExpr, NameExpr, PassStmt, PlaceholderNode, RefExpr, StrExpr, SymbolNode, SymbolTableNode, TempNode, TypeInfo, TypeVarExpr, Var, ) from mypy.options import Options from mypy.plugin import ( CheckerPluginInterface, ClassDefContext, FunctionContext, MethodContext, Plugin, ReportConfigContext, SemanticAnalyzerPluginInterface, ) from mypy.plugins import dataclasses from mypy.semanal import set_callable_name # type: ignore from mypy.server.trigger import make_wildcard_trigger from mypy.types import ( AnyType, CallableType, Instance, NoneType, Overloaded, ProperType, Type, TypeOfAny, TypeType, TypeVarType, UnionType, get_proper_type, ) from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name from mypy.version import __version__ as mypy_version from pydantic.utils import is_valid_field try: from mypy.types import TypeVarDef # type: ignore[attr-defined] except ImportError: # pragma: no cover # Backward-compatible with TypeVarDef from Mypy 0.910. from mypy.types import TypeVarType as TypeVarDef CONFIGFILE_KEY = 'pydantic-mypy' METADATA_KEY = 'pydantic-mypy-metadata' _NAMESPACE = __name__[:-5] # 'pydantic' in 1.10.X, 'pydantic.v1' in v2.X BASEMODEL_FULLNAME = f'{_NAMESPACE}.main.BaseModel' BASESETTINGS_FULLNAME = f'{_NAMESPACE}.env_settings.BaseSettings' MODEL_METACLASS_FULLNAME = f'{_NAMESPACE}.main.ModelMetaclass' FIELD_FULLNAME = f'{_NAMESPACE}.fields.Field' DATACLASS_FULLNAME = f'{_NAMESPACE}.dataclasses.dataclass' def parse_mypy_version(version: str) -> Tuple[int, ...]: return tuple(map(int, version.partition('+')[0].split('.'))) MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' # Increment version if plugin changes and mypy caches should be invalidated __version__ = 2 def plugin(version: str) -> 'TypingType[Plugin]': """ `version` is the mypy version string We might want to use this to print a warning if the mypy version being used is newer, or especially older, than we expect (or need). """ return PydanticPlugin class PydanticPlugin(Plugin): def __init__(self, options: Options) -> None: self.plugin_config = PydanticPluginConfig(options) self._plugin_data = self.plugin_config.to_data() super().__init__(options) def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': sym = self.lookup_fully_qualified(fullname) if sym and isinstance(sym.node, TypeInfo): # pragma: no branch # No branching may occur if the mypy cache has not been cleared if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro): return self._pydantic_model_class_maker_callback return None def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: if fullname == MODEL_METACLASS_FULLNAME: return self._pydantic_model_metaclass_marker_callback return None def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]': sym = self.lookup_fully_qualified(fullname) if sym and sym.fullname == FIELD_FULLNAME: return self._pydantic_field_callback return None def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: if fullname.endswith('.from_orm'): return from_orm_callback return None def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: """Mark pydantic.dataclasses as dataclass. Mypy version 1.1.1 added support for `@dataclass_transform` decorator. """ if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] return None def report_config_data(self, ctx: ReportConfigContext) -> Dict[str, Any]: """Return all plugin config data. Used by mypy to determine if cache needs to be discarded. """ return self._plugin_data def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: transformer = PydanticModelTransformer(ctx, self.plugin_config) transformer.transform() def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: """Reset dataclass_transform_spec attribute of ModelMetaclass. Let the plugin handle it. This behavior can be disabled if 'debug_dataclass_transform' is set to True', for testing purposes. """ if self.plugin_config.debug_dataclass_transform: return info_metaclass = ctx.cls.info.declared_metaclass assert info_metaclass, "callback not passed from 'get_metaclass_hook'" if getattr(info_metaclass.type, 'dataclass_transform_spec', None): info_metaclass.type.dataclass_transform_spec = None # type: ignore[attr-defined] def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type': """ Extract the type of the `default` argument from the Field function, and use it as the return type. In particular: * Check whether the default and default_factory argument is specified. * Output an error if both are specified. * Retrieve the type of the argument which is specified, and use it as return type for the function. """ default_any_type = ctx.default_return_type assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' default_args = ctx.args[0] default_factory_args = ctx.args[1] if default_args and default_factory_args: error_default_and_default_factory_specified(ctx.api, ctx.context) return default_any_type if default_args: default_type = ctx.arg_types[0][0] default_arg = default_args[0] # Fallback to default Any type if the field is required if not isinstance(default_arg, EllipsisExpr): return default_type elif default_factory_args: default_factory_type = ctx.arg_types[1][0] # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter # Pydantic calls the default factory without any argument, so we retrieve the first item if isinstance(default_factory_type, Overloaded): if MYPY_VERSION_TUPLE > (0, 910): default_factory_type = default_factory_type.items[0] else: # Mypy0.910 exposes the items of overloaded types in a function default_factory_type = default_factory_type.items()[0] # type: ignore[operator] if isinstance(default_factory_type, CallableType): ret_type = default_factory_type.ret_type # mypy doesn't think `ret_type` has `args`, you'd think mypy should know, # add this check in case it varies by version args = getattr(ret_type, 'args', None) if args: if all(isinstance(arg, TypeVarType) for arg in args): # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any` ret_type.args = tuple(default_any_type for _ in args) # type: ignore[attr-defined] return ret_type return default_any_type class PydanticPluginConfig: __slots__ = ( 'init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields', 'debug_dataclass_transform', ) init_forbid_extra: bool init_typed: bool warn_required_dynamic_aliases: bool warn_untyped_fields: bool debug_dataclass_transform: bool # undocumented def __init__(self, options: Options) -> None: if options.config_file is None: # pragma: no cover return toml_config = parse_toml(options.config_file) if toml_config is not None: config = toml_config.get('tool', {}).get('pydantic-mypy', {}) for key in self.__slots__: setting = config.get(key, False) if not isinstance(setting, bool): raise ValueError(f'Configuration value must be a boolean for key: {key}') setattr(self, key, setting) else: plugin_config = ConfigParser() plugin_config.read(options.config_file) for key in self.__slots__: setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) setattr(self, key, setting) def to_data(self) -> Dict[str, Any]: return {key: getattr(self, key) for key in self.__slots__} def from_orm_callback(ctx: MethodContext) -> Type: """ Raise an error if orm_mode is not enabled """ model_type: Instance ctx_type = ctx.type if isinstance(ctx_type, TypeType): ctx_type = ctx_type.item if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): model_type = ctx_type.ret_type # called on the class elif isinstance(ctx_type, Instance): model_type = ctx_type # called on an instance (unusual, but still valid) else: # pragma: no cover detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' error_unexpected_behavior(detail, ctx.api, ctx.context) return ctx.default_return_type pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) if pydantic_metadata is None: return ctx.default_return_type orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') if orm_mode is not True: error_from_orm(get_name(model_type.type), ctx.api, ctx.context) return ctx.default_return_type class PydanticModelTransformer: tracked_config_fields: Set[str] = { 'extra', 'allow_mutation', 'frozen', 'orm_mode', 'allow_population_by_field_name', 'alias_generator', } def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: self._ctx = ctx self.plugin_config = plugin_config def transform(self) -> None: """ Configures the BaseModel subclass according to the plugin settings. In particular: * determines the model config and fields, * adds a fields-aware signature for the initializer and construct methods * freezes the class if allow_mutation = False or frozen = True * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses """ ctx = self._ctx info = ctx.cls.info self.adjust_validator_signatures() config = self.collect_config() fields = self.collect_fields(config) is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1]) self.add_initializer(fields, config, is_settings) self.add_construct_method(fields) self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True) info.metadata[METADATA_KEY] = { 'fields': {field.name: field.serialize() for field in fields}, 'config': config.set_values_dict(), } def adjust_validator_signatures(self) -> None: """When we decorate a function `f` with `pydantic.validator(...), mypy sees `f` as a regular method taking a `self` instance, even though pydantic internally wraps `f` with `classmethod` if necessary. Teach mypy this by marking any function whose outermost decorator is a `validator()` call as a classmethod. """ for name, sym in self._ctx.cls.info.names.items(): if isinstance(sym.node, Decorator): first_dec = sym.node.original_decorators[0] if ( isinstance(first_dec, CallExpr) and isinstance(first_dec.callee, NameExpr) and first_dec.callee.fullname == f'{_NAMESPACE}.class_validators.validator' ): sym.node.func.is_class = True def collect_config(self) -> 'ModelConfigData': """ Collects the values of the config attributes that are used by the plugin, accounting for parent classes. """ ctx = self._ctx cls = ctx.cls config = ModelConfigData() for stmt in cls.defs.body: if not isinstance(stmt, ClassDef): continue if stmt.name == 'Config': for substmt in stmt.defs.body: if not isinstance(substmt, AssignmentStmt): continue config.update(self.get_config_update(substmt)) if ( config.has_alias_generator and not config.allow_population_by_field_name and self.plugin_config.warn_required_dynamic_aliases ): error_required_dynamic_aliases(ctx.api, stmt) for info in cls.info.mro[1:]: # 0 is the current class if METADATA_KEY not in info.metadata: continue # Each class depends on the set of fields in its ancestors ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) for name, value in info.metadata[METADATA_KEY]['config'].items(): config.setdefault(name, value) return config def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: """ Collects the fields for the model, accounting for parent classes """ # First, collect fields belonging to the current class. ctx = self._ctx cls = self._ctx.cls fields = [] # type: List[PydanticModelField] known_fields = set() # type: Set[str] for stmt in cls.defs.body: if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation continue lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name): continue if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: error_untyped_fields(ctx.api, stmt) # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet # continue sym = cls.info.names.get(lhs.name) if sym is None: # pragma: no cover # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) # This is the same logic used in the dataclasses plugin continue node = sym.node if isinstance(node, PlaceholderNode): # pragma: no cover # See the PlaceholderNode docstring for more detail about how this can occur # Basically, it is an edge case when dealing with complex import logic # This is the same logic used in the dataclasses plugin continue if not isinstance(node, Var): # pragma: no cover # Don't know if this edge case still happens with the `is_valid_field` check above # but better safe than sorry continue # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue is_required = self.get_is_required(cls, stmt, lhs) alias, has_dynamic_alias = self.get_alias_info(stmt) if ( has_dynamic_alias and not model_config.allow_population_by_field_name and self.plugin_config.warn_required_dynamic_aliases ): error_required_dynamic_aliases(ctx.api, stmt) fields.append( PydanticModelField( name=lhs.name, is_required=is_required, alias=alias, has_dynamic_alias=has_dynamic_alias, line=stmt.line, column=stmt.column, ) ) known_fields.add(lhs.name) all_fields = fields.copy() for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object if METADATA_KEY not in info.metadata: continue superclass_fields = [] # Each class depends on the set of fields in its ancestors ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) for name, data in info.metadata[METADATA_KEY]['fields'].items(): if name not in known_fields: field = PydanticModelField.deserialize(info, data) known_fields.add(name) superclass_fields.append(field) else: (field,) = (a for a in all_fields if a.name == name) all_fields.remove(field) superclass_fields.append(field) all_fields = superclass_fields + all_fields return all_fields def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: """ Adds a fields-aware `__init__` method to the class. The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. """ ctx = self._ctx typed = self.plugin_config.init_typed use_alias = config.allow_population_by_field_name is not True force_all_optional = is_settings or bool( config.has_alias_generator and not config.allow_population_by_field_name ) init_arguments = self.get_field_arguments( fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias ) if not self.should_init_forbid_extra(fields, config): var = Var('kwargs') init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) if '__init__' not in ctx.cls.info.names: add_method(ctx, '__init__', init_arguments, NoneType()) def add_construct_method(self, fields: List['PydanticModelField']) -> None: """ Adds a fully typed `construct` classmethod to the class. Similar to the fields-aware __init__ method, but always uses the field names (not aliases), and does not treat settings fields as optional. """ ctx = self._ctx set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) optional_set_str = UnionType([set_str, NoneType()]) fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) construct_arguments = [fields_set_argument] + construct_arguments obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name if MYPY_VERSION_TUPLE >= (1, 4): tvd = TypeVarType( self_tvar_name, tvar_fullname, -1, [], obj_type, AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] ) self_tvar_expr = TypeVarExpr( self_tvar_name, tvar_fullname, [], obj_type, AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] ) else: tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) # Backward-compatible with TypeVarDef from Mypy 0.910. if isinstance(tvd, TypeVarType): self_type = tvd else: self_type = TypeVarType(tvd) add_method( ctx, 'construct', construct_arguments, return_type=self_type, self_type=self_type, tvar_def=tvd, is_classmethod=True, ) def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: """ Marks all fields as properties so that attempts to set them trigger mypy errors. This is the same approach used by the attrs and dataclasses plugins. """ ctx = self._ctx info = ctx.cls.info for field in fields: sym_node = info.names.get(field.name) if sym_node is not None: var = sym_node.node if isinstance(var, Var): var.is_property = frozen elif isinstance(var, PlaceholderNode) and not ctx.api.final_iteration: # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage ctx.api.defer() else: # pragma: no cover # I don't know whether it's possible to hit this branch, but I've added it for safety try: var_str = str(var) except TypeError: # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. var_str = repr(var) detail = f'sym_node.node: {var_str} (of type {var.__class__})' error_unexpected_behavior(detail, ctx.api, ctx.cls) else: var = field.to_var(info, use_alias=False) var.info = info var.is_property = frozen var._fullname = get_fullname(info) + '.' + get_name(var) info.names[get_name(var)] = SymbolTableNode(MDEF, var) def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: """ Determines the config update due to a single statement in the Config class definition. Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) """ lhs = substmt.lvalues[0] if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): return None if lhs.name == 'extra': if isinstance(substmt.rvalue, StrExpr): forbid_extra = substmt.rvalue.value == 'forbid' elif isinstance(substmt.rvalue, MemberExpr): forbid_extra = substmt.rvalue.name == 'forbid' else: error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None return ModelConfigData(forbid_extra=forbid_extra) if lhs.name == 'alias_generator': has_alias_generator = True if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': has_alias_generator = False return ModelConfigData(has_alias_generator=has_alias_generator) if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None @staticmethod def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: """ Returns a boolean indicating whether the field defined in `stmt` is a required field. """ expr = stmt.rvalue if isinstance(expr, TempNode): # TempNode means annotation-only, so only non-required if Optional value_type = get_proper_type(cls.info[lhs.name].type) return not PydanticModelTransformer.type_has_implicit_default(value_type) if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: # The "default value" is a call to `Field`; at this point, the field is # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory # is specified. for arg, name in zip(expr.args, expr.arg_names): # If name is None, then this arg is the default because it is the only positional argument. if name is None or name == 'default': return arg.__class__ is EllipsisExpr if name == 'default_factory': return False # In this case, default and default_factory are not specified, so we need to look at the annotation value_type = get_proper_type(cls.info[lhs.name].type) return not PydanticModelTransformer.type_has_implicit_default(value_type) # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) return isinstance(expr, EllipsisExpr) @staticmethod def type_has_implicit_default(type_: Optional[ProperType]) -> bool: """ Returns True if the passed type will be given an implicit default value. In pydantic v1, this is the case for Optional types and Any (with default value None). """ if isinstance(type_, AnyType): # Annotated as Any return True if isinstance(type_, UnionType) and any( isinstance(item, NoneType) or isinstance(item, AnyType) for item in type_.items ): # Annotated as Optional, or otherwise having NoneType or AnyType in the union return True return False @staticmethod def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: """ Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. If `has_dynamic_alias` is True, `alias` will be None. """ expr = stmt.rvalue if isinstance(expr, TempNode): # TempNode means annotation-only return None, False if not ( isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME ): # Assigned value is not a call to pydantic.fields.Field return None, False for i, arg_name in enumerate(expr.arg_names): if arg_name != 'alias': continue arg = expr.args[i] if isinstance(arg, StrExpr): return arg.value, False else: return None, True return None, False def get_field_arguments( self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool ) -> List[Argument]: """ Helper function used during the construction of the `__init__` and `construct` method signatures. Returns a list of mypy Argument instances for use in the generated signatures. """ info = self._ctx.cls.info arguments = [ field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) for field in fields if not (use_alias and field.has_dynamic_alias) ] return arguments def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: """ Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, *unless* a required dynamic alias is present (since then we can't determine a valid signature). """ if not config.allow_population_by_field_name: if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): return False if config.forbid_extra: return True return self.plugin_config.init_forbid_extra @staticmethod def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: """ Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be determined during static analysis. """ for field in fields: if field.has_dynamic_alias: return True if has_alias_generator: for field in fields: if field.alias is None: return True return False class PydanticModelField: def __init__( self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int ): self.name = name self.is_required = is_required self.alias = alias self.has_dynamic_alias = has_dynamic_alias self.line = line self.column = column def to_var(self, info: TypeInfo, use_alias: bool) -> Var: name = self.name if use_alias and self.alias is not None: name = self.alias return Var(name, info[self.name].type) def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: if typed and info[self.name].type is not None: type_annotation = info[self.name].type else: type_annotation = AnyType(TypeOfAny.explicit) return Argument( variable=self.to_var(info, use_alias), type_annotation=type_annotation, initializer=None, kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, ) def serialize(self) -> JsonDict: return self.__dict__ @classmethod def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': return cls(**data) class ModelConfigData: def __init__( self, forbid_extra: Optional[bool] = None, allow_mutation: Optional[bool] = None, frozen: Optional[bool] = None, orm_mode: Optional[bool] = None, allow_population_by_field_name: Optional[bool] = None, has_alias_generator: Optional[bool] = None, ): self.forbid_extra = forbid_extra self.allow_mutation = allow_mutation self.frozen = frozen self.orm_mode = orm_mode self.allow_population_by_field_name = allow_population_by_field_name self.has_alias_generator = has_alias_generator def set_values_dict(self) -> Dict[str, Any]: return {k: v for k, v in self.__dict__.items() if v is not None} def update(self, config: Optional['ModelConfigData']) -> None: if config is None: return for k, v in config.set_values_dict().items(): setattr(self, k, v) def setdefault(self, key: str, value: Any) -> None: if getattr(self, key) is None: setattr(self, key, value) ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) def error_unexpected_behavior( detail: str, api: Union[CheckerPluginInterface, SemanticAnalyzerPluginInterface], context: Context ) -> None: # pragma: no cover # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path link = 'https://github.com/pydantic/pydantic/issues/new/choose' full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' api.fail(full_message, context, code=ERROR_UNEXPECTED) def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) def add_method( ctx: ClassDefContext, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, is_classmethod: bool = False, is_new: bool = False, # is_staticmethod: bool = False, ) -> None: """ Adds a new method to a class. This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged """ info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): ctx.cls.defs.body.remove(sym.node) # pragma: no cover self_type = self_type or fill_typevars(info) if is_classmethod or is_new: first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] # elif is_staticmethod: # first = [] else: self_type = self_type or fill_typevars(info) first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] args = first + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(get_name(arg.variable)) arg_kinds.append(arg.kind) function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function') signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func.is_class = is_classmethod # func.is_static = is_staticmethod func._fullname = get_fullname(info) + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] if is_classmethod: # or is_staticmethod: func.is_decorated = True v = Var(name, func.type) v.info = info v._fullname = func._fullname # if is_classmethod: v.is_classmethod = True dec = Decorator(func, [NameExpr('classmethod')], v) # else: # v.is_staticmethod = True # dec = Decorator(func, [NameExpr('staticmethod')], v) dec.line = info.line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True info.names[name] = sym info.defn.defs.body.append(func) def get_fullname(x: Union[FuncBase, SymbolNode]) -> str: """ Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. """ fn = x.fullname if callable(fn): # pragma: no cover return fn() return fn def get_name(x: Union[FuncBase, SymbolNode]) -> str: """ Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. """ fn = x.name if callable(fn): # pragma: no cover return fn() return fn def parse_toml(config_file: str) -> Optional[Dict[str, Any]]: if not config_file.endswith('.toml'): return None read_mode = 'rb' if sys.version_info >= (3, 11): import tomllib as toml_ else: try: import tomli as toml_ except ImportError: # older versions of mypy have toml as a dependency, not tomli read_mode = 'r' try: import toml as toml_ # type: ignore[no-redef] except ImportError: # pragma: no cover import warnings warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') return None with open(config_file, read_mode) as rf: return toml_.load(rf) # type: ignore[arg-type] pydantic-1.10.14/pydantic/networks.py000066400000000000000000000530531455251250200175410ustar00rootroot00000000000000import re from ipaddress import ( IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, _BaseAddress, _BaseNetwork, ) from typing import ( TYPE_CHECKING, Any, Collection, Dict, Generator, List, Match, Optional, Pattern, Set, Tuple, Type, Union, cast, no_type_check, ) from . import errors from .utils import Representation, update_not_none from .validators import constr_length_validator, str_validator if TYPE_CHECKING: import email_validator from typing_extensions import TypedDict from .config import BaseConfig from .fields import ModelField from .typing import AnyCallable CallableGenerator = Generator[AnyCallable, None, None] class Parts(TypedDict, total=False): scheme: str user: Optional[str] password: Optional[str] ipv4: Optional[str] ipv6: Optional[str] domain: Optional[str] port: Optional[str] path: Optional[str] query: Optional[str] fragment: Optional[str] class HostParts(TypedDict, total=False): host: str tld: Optional[str] host_type: Optional[str] port: Optional[str] rebuild: bool else: email_validator = None class Parts(dict): pass NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] __all__ = [ 'AnyUrl', 'AnyHttpUrl', 'FileUrl', 'HttpUrl', 'stricturl', 'EmailStr', 'NameEmail', 'IPvAnyAddress', 'IPvAnyInterface', 'IPvAnyNetwork', 'PostgresDsn', 'CockroachDsn', 'AmqpDsn', 'RedisDsn', 'MongoDsn', 'KafkaDsn', 'validate_email', ] _url_regex_cache = None _multi_host_url_regex_cache = None _ascii_domain_regex_cache = None _int_domain_regex_cache = None _host_regex_cache = None _host_regex = ( r'(?:' r'(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4 r'(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6 r'(?P[^\s/:?#]+)' # domain, validation occurs later r')?' r'(?::(?P\d+))?' # port ) _scheme_regex = r'(?:(?P[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A _user_info_regex = r'(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?' _path_regex = r'(?P/[^\s?#]*)?' _query_regex = r'(?:\?(?P[^\s#]*))?' _fragment_regex = r'(?:#(?P[^\s#]*))?' def url_regex() -> Pattern[str]: global _url_regex_cache if _url_regex_cache is None: _url_regex_cache = re.compile( rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}', re.IGNORECASE, ) return _url_regex_cache def multi_host_url_regex() -> Pattern[str]: """ Compiled multi host url regex. Additionally to `url_regex` it allows to match multiple hosts. E.g. host1.db.net,host2.db.net """ global _multi_host_url_regex_cache if _multi_host_url_regex_cache is None: _multi_host_url_regex_cache = re.compile( rf'{_scheme_regex}{_user_info_regex}' r'(?P([^/]*))' # validation occurs later rf'{_path_regex}{_query_regex}{_fragment_regex}', re.IGNORECASE, ) return _multi_host_url_regex_cache def ascii_domain_regex() -> Pattern[str]: global _ascii_domain_regex_cache if _ascii_domain_regex_cache is None: ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' ascii_domain_ending = r'(?P\.[a-z]{2,63})?\.?' _ascii_domain_regex_cache = re.compile( fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE ) return _ascii_domain_regex_cache def int_domain_regex() -> Pattern[str]: global _int_domain_regex_cache if _int_domain_regex_cache is None: int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' int_domain_ending = r'(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?' _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE) return _int_domain_regex_cache def host_regex() -> Pattern[str]: global _host_regex_cache if _host_regex_cache is None: _host_regex_cache = re.compile( _host_regex, re.IGNORECASE, ) return _host_regex_cache class AnyUrl(str): strip_whitespace = True min_length = 1 max_length = 2**16 allowed_schemes: Optional[Collection[str]] = None tld_required: bool = False user_required: bool = False host_required: bool = True hidden_parts: Set[str] = set() __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') @no_type_check def __new__(cls, url: Optional[str], **kwargs) -> object: return str.__new__(cls, cls.build(**kwargs) if url is None else url) def __init__( self, url: str, *, scheme: str, user: Optional[str] = None, password: Optional[str] = None, host: Optional[str] = None, tld: Optional[str] = None, host_type: str = 'domain', port: Optional[str] = None, path: Optional[str] = None, query: Optional[str] = None, fragment: Optional[str] = None, ) -> None: str.__init__(url) self.scheme = scheme self.user = user self.password = password self.host = host self.tld = tld self.host_type = host_type self.port = port self.path = path self.query = query self.fragment = fragment @classmethod def build( cls, *, scheme: str, user: Optional[str] = None, password: Optional[str] = None, host: str, port: Optional[str] = None, path: Optional[str] = None, query: Optional[str] = None, fragment: Optional[str] = None, **_kwargs: str, ) -> str: parts = Parts( scheme=scheme, user=user, password=password, host=host, port=port, path=path, query=query, fragment=fragment, **_kwargs, # type: ignore[misc] ) url = scheme + '://' if user: url += user if password: url += ':' + password if user or password: url += '@' url += host if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port): url += ':' + port if path: url += path if query: url += '?' + query if fragment: url += '#' + fragment return url @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': if value.__class__ == cls: return value value = str_validator(value) if cls.strip_whitespace: value = value.strip() url: str = cast(str, constr_length_validator(value, field, config)) m = cls._match_url(url) # the regex should always match, if it doesn't please report with details of the URL tried assert m, 'URL regex failed unexpectedly' original_parts = cast('Parts', m.groupdict()) parts = cls.apply_default_parts(original_parts) parts = cls.validate_parts(parts) if m.end() != len(url): raise errors.UrlExtraError(extra=url[m.end() :]) return cls._build_url(m, url, parts) @classmethod def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl': """ Validate hosts and build the AnyUrl object. Split from `validate` so this method can be altered in `MultiHostDsn`. """ host, tld, host_type, rebuild = cls.validate_host(parts) return cls( None if rebuild else url, scheme=parts['scheme'], user=parts['user'], password=parts['password'], host=host, tld=tld, host_type=host_type, port=parts['port'], path=parts['path'], query=parts['query'], fragment=parts['fragment'], ) @staticmethod def _match_url(url: str) -> Optional[Match[str]]: return url_regex().match(url) @staticmethod def _validate_port(port: Optional[str]) -> None: if port is not None and int(port) > 65_535: raise errors.UrlPortError() @classmethod def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': """ A method used to validate parts of a URL. Could be overridden to set default values for parts if missing """ scheme = parts['scheme'] if scheme is None: raise errors.UrlSchemeError() if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) if validate_port: cls._validate_port(parts['port']) user = parts['user'] if cls.user_required and user is None: raise errors.UrlUserInfoError() return parts @classmethod def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]: tld, host_type, rebuild = None, None, False for f in ('domain', 'ipv4', 'ipv6'): host = parts[f] # type: ignore[literal-required] if host: host_type = f break if host is None: if cls.host_required: raise errors.UrlHostError() elif host_type == 'domain': is_international = False d = ascii_domain_regex().fullmatch(host) if d is None: d = int_domain_regex().fullmatch(host) if d is None: raise errors.UrlHostError() is_international = True tld = d.group('tld') if tld is None and not is_international: d = int_domain_regex().fullmatch(host) assert d is not None tld = d.group('tld') is_international = True if tld is not None: tld = tld[1:] elif cls.tld_required: raise errors.UrlHostTldError() if is_international: host_type = 'int_domain' rebuild = True host = host.encode('idna').decode('ascii') if tld is not None: tld = tld.encode('idna').decode('ascii') return host, tld, host_type, rebuild # type: ignore @staticmethod def get_default_parts(parts: 'Parts') -> 'Parts': return {} @classmethod def apply_default_parts(cls, parts: 'Parts') -> 'Parts': for key, value in cls.get_default_parts(parts).items(): if not parts[key]: # type: ignore[literal-required] parts[key] = value # type: ignore[literal-required] return parts def __repr__(self) -> str: extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) return f'{self.__class__.__name__}({super().__repr__()}, {extra})' class AnyHttpUrl(AnyUrl): allowed_schemes = {'http', 'https'} __slots__ = () class HttpUrl(AnyHttpUrl): tld_required = True # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers max_length = 2083 hidden_parts = {'port'} @staticmethod def get_default_parts(parts: 'Parts') -> 'Parts': return {'port': '80' if parts['scheme'] == 'http' else '443'} class FileUrl(AnyUrl): allowed_schemes = {'file'} host_required = False __slots__ = () class MultiHostDsn(AnyUrl): __slots__ = AnyUrl.__slots__ + ('hosts',) def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any): super().__init__(*args, **kwargs) self.hosts = hosts @staticmethod def _match_url(url: str) -> Optional[Match[str]]: return multi_host_url_regex().match(url) @classmethod def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': return super().validate_parts(parts, validate_port=False) @classmethod def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn': hosts_parts: List['HostParts'] = [] host_re = host_regex() for host in m.groupdict()['hosts'].split(','): d: Parts = host_re.match(host).groupdict() # type: ignore host, tld, host_type, rebuild = cls.validate_host(d) port = d.get('port') cls._validate_port(port) hosts_parts.append( { 'host': host, 'host_type': host_type, 'tld': tld, 'rebuild': rebuild, 'port': port, } ) if len(hosts_parts) > 1: return cls( None if any([hp['rebuild'] for hp in hosts_parts]) else url, scheme=parts['scheme'], user=parts['user'], password=parts['password'], path=parts['path'], query=parts['query'], fragment=parts['fragment'], host_type=None, hosts=hosts_parts, ) else: # backwards compatibility with single host host_part = hosts_parts[0] return cls( None if host_part['rebuild'] else url, scheme=parts['scheme'], user=parts['user'], password=parts['password'], host=host_part['host'], tld=host_part['tld'], host_type=host_part['host_type'], port=host_part.get('port'), path=parts['path'], query=parts['query'], fragment=parts['fragment'], ) class PostgresDsn(MultiHostDsn): allowed_schemes = { 'postgres', 'postgresql', 'postgresql+asyncpg', 'postgresql+pg8000', 'postgresql+psycopg', 'postgresql+psycopg2', 'postgresql+psycopg2cffi', 'postgresql+py-postgresql', 'postgresql+pygresql', } user_required = True __slots__ = () class CockroachDsn(AnyUrl): allowed_schemes = { 'cockroachdb', 'cockroachdb+psycopg2', 'cockroachdb+asyncpg', } user_required = True class AmqpDsn(AnyUrl): allowed_schemes = {'amqp', 'amqps'} host_required = False class RedisDsn(AnyUrl): __slots__ = () allowed_schemes = {'redis', 'rediss'} host_required = False @staticmethod def get_default_parts(parts: 'Parts') -> 'Parts': return { 'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '', 'port': '6379', 'path': '/0', } class MongoDsn(AnyUrl): allowed_schemes = {'mongodb'} # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes @staticmethod def get_default_parts(parts: 'Parts') -> 'Parts': return { 'port': '27017', } class KafkaDsn(AnyUrl): allowed_schemes = {'kafka'} @staticmethod def get_default_parts(parts: 'Parts') -> 'Parts': return { 'domain': 'localhost', 'port': '9092', } def stricturl( *, strip_whitespace: bool = True, min_length: int = 1, max_length: int = 2**16, tld_required: bool = True, host_required: bool = True, allowed_schemes: Optional[Collection[str]] = None, ) -> Type[AnyUrl]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( strip_whitespace=strip_whitespace, min_length=min_length, max_length=max_length, tld_required=tld_required, host_required=host_required, allowed_schemes=allowed_schemes, ) return type('UrlValue', (AnyUrl,), namespace) def import_email_validator() -> None: global email_validator try: import email_validator except ImportError as e: raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e class EmailStr(str): @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='email') @classmethod def __get_validators__(cls) -> 'CallableGenerator': # included here and below so the error happens straight away import_email_validator() yield str_validator yield cls.validate @classmethod def validate(cls, value: Union[str]) -> str: return validate_email(value)[1] class NameEmail(Representation): __slots__ = 'name', 'email' def __init__(self, name: str, email: str): self.name = name self.email = email def __eq__(self, other: Any) -> bool: return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='name-email') @classmethod def __get_validators__(cls) -> 'CallableGenerator': import_email_validator() yield cls.validate @classmethod def validate(cls, value: Any) -> 'NameEmail': if value.__class__ == cls: return value value = str_validator(value) return cls(*validate_email(value)) def __str__(self) -> str: return f'{self.name} <{self.email}>' class IPvAnyAddress(_BaseAddress): __slots__ = () @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='ipvanyaddress') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: try: return IPv4Address(value) except ValueError: pass try: return IPv6Address(value) except ValueError: raise errors.IPvAnyAddressError() class IPvAnyInterface(_BaseAddress): __slots__ = () @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='ipvanyinterface') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: try: return IPv4Interface(value) except ValueError: pass try: return IPv6Interface(value) except ValueError: raise errors.IPvAnyInterfaceError() class IPvAnyNetwork(_BaseNetwork): # type: ignore @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='ipvanynetwork') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: # Assume IP Network is defined with a default value for ``strict`` argument. # Define your own class if you want to specify network address check strictness. try: return IPv4Network(value) except ValueError: pass try: return IPv6Network(value) except ValueError: raise errors.IPvAnyNetworkError() pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') MAX_EMAIL_LENGTH = 2048 """Maximum length for an email. A somewhat arbitrary but very generous number compared to what is allowed by most implementations. """ def validate_email(value: Union[str]) -> Tuple[str, str]: """ Email address validation using https://pypi.org/project/email-validator/ Notes: * raw ip address (literal) domain parts are not allowed. * "John Doe " style "pretty" email addresses are processed * spaces are striped from the beginning and end of addresses but no error is raised """ if email_validator is None: import_email_validator() if len(value) > MAX_EMAIL_LENGTH: raise errors.EmailError() m = pretty_email_regex.fullmatch(value) name: Union[str, None] = None if m: name, value = m.groups() email = value.strip() try: parts = email_validator.validate_email(email, check_deliverability=False) except email_validator.EmailNotValidError as e: raise errors.EmailError from e if hasattr(parts, 'normalized'): # email-validator >= 2 email = parts.normalized assert email is not None name = name or parts.local_part return name, email else: # email-validator >1, <2 at_index = email.index('@') local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. global_part = email[at_index:].lower() return name or local_part, local_part + global_part pydantic-1.10.14/pydantic/parse.py000066400000000000000000000034221455251250200167720ustar00rootroot00000000000000import json import pickle from enum import Enum from pathlib import Path from typing import Any, Callable, Union from .types import StrBytes class Protocol(str, Enum): json = 'json' pickle = 'pickle' def load_str_bytes( b: StrBytes, *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, json_loads: Callable[[str], Any] = json.loads, ) -> Any: if proto is None and content_type: if content_type.endswith(('json', 'javascript')): pass elif allow_pickle and content_type.endswith('pickle'): proto = Protocol.pickle else: raise TypeError(f'Unknown content-type: {content_type}') proto = proto or Protocol.json if proto == Protocol.json: if isinstance(b, bytes): b = b.decode(encoding) return json_loads(b) elif proto == Protocol.pickle: if not allow_pickle: raise RuntimeError('Trying to decode with pickle with allow_pickle=False') bb = b if isinstance(b, bytes) else b.encode() return pickle.loads(bb) else: raise TypeError(f'Unknown protocol: {proto}') def load_file( path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, json_loads: Callable[[str], Any] = json.loads, ) -> Any: path = Path(path) b = path.read_bytes() if content_type is None: if path.suffix in ('.js', '.json'): proto = Protocol.json elif path.suffix == '.pkl': proto = Protocol.pickle return load_str_bytes( b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads ) pydantic-1.10.14/pydantic/py.typed000066400000000000000000000000001455251250200167720ustar00rootroot00000000000000pydantic-1.10.14/pydantic/schema.py000066400000000000000000001347761455251250200171410ustar00rootroot00000000000000import re import warnings from collections import defaultdict from dataclasses import is_dataclass from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import ( TYPE_CHECKING, Any, Callable, Dict, ForwardRef, FrozenSet, Generic, Iterable, List, Optional, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union, cast, ) from uuid import UUID from typing_extensions import Annotated, Literal from .fields import ( MAPPING_LIKE_SHAPES, SHAPE_DEQUE, SHAPE_FROZENSET, SHAPE_GENERIC, SHAPE_ITERABLE, SHAPE_LIST, SHAPE_SEQUENCE, SHAPE_SET, SHAPE_SINGLETON, SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS, FieldInfo, ModelField, ) from .json import pydantic_encoder from .networks import AnyUrl, EmailStr from .types import ( ConstrainedDecimal, ConstrainedFloat, ConstrainedFrozenSet, ConstrainedInt, ConstrainedList, ConstrainedSet, ConstrainedStr, SecretBytes, SecretStr, StrictBytes, StrictStr, conbytes, condecimal, confloat, confrozenset, conint, conlist, conset, constr, ) from .typing import ( all_literal_values, get_args, get_origin, get_sub_types, is_callable_type, is_literal_type, is_namedtuple, is_none_type, is_union, ) from .utils import ROOT_KEY, get_model, lenient_issubclass if TYPE_CHECKING: from .dataclasses import Dataclass from .main import BaseModel default_prefix = '#/definitions/' default_ref_template = '#/definitions/{model}' TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] TypeModelSet = Set[TypeModelOrEnum] def _apply_modify_schema( modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] ) -> None: from inspect import signature sig = signature(modify_schema) args = set(sig.parameters.keys()) if 'field' in args or 'kwargs' in args: modify_schema(field_schema, field=field) else: modify_schema(field_schema) def schema( models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], *, by_alias: bool = True, title: Optional[str] = None, description: Optional[str] = None, ref_prefix: Optional[str] = None, ref_template: str = default_ref_template, ) -> Dict[str, Any]: """ Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` top-level JSON key, including their sub-models. :param models: a list of models to include in the generated JSON Schema :param by_alias: generate the schemas using the aliases defined, if any :param title: title for the generated schema that includes the definitions :param description: description for the generated schema :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the top-level key ``definitions``, so you can extract them from there. But all the references will have the set prefix. :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for the models and sub-models passed in ``models``. """ clean_models = [get_model(model) for model in models] flat_models = get_flat_models_from_models(clean_models) model_name_map = get_model_name_map(flat_models) definitions = {} output_schema: Dict[str, Any] = {} if title: output_schema['title'] = title if description: output_schema['description'] = description for model in clean_models: m_schema, m_definitions, m_nested_models = model_process_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, ) definitions.update(m_definitions) model_name = model_name_map[model] definitions[model_name] = m_schema if definitions: output_schema['definitions'] = definitions return output_schema def model_schema( model: Union[Type['BaseModel'], Type['Dataclass']], by_alias: bool = True, ref_prefix: Optional[str] = None, ref_template: str = default_ref_template, ) -> Dict[str, Any]: """ Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level JSON key. :param model: a Pydantic model (a class that inherits from BaseModel) :param by_alias: generate the schemas using the aliases defined, if any :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the top-level key ``definitions``, so you can extract them from there. But all the references will have the set prefix. :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. :return: dict with the JSON Schema for the passed ``model`` """ model = get_model(model) flat_models = get_flat_models_from_model(model) model_name_map = get_model_name_map(flat_models) model_name = model_name_map[model] m_schema, m_definitions, nested_models = model_process_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template ) if model_name in nested_models: # model_name is in Nested models, it has circular references m_definitions[model_name] = m_schema m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) if m_definitions: m_schema.update({'definitions': m_definitions}) return m_schema def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: # If no title is explicitly set, we don't set title in the schema for enums. # The behaviour is the same as `BaseModel` reference, where the default title # is in the definitions part of the schema. schema_: Dict[str, Any] = {} if field.field_info.title or not lenient_issubclass(field.type_, Enum): schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') if field.field_info.title: schema_overrides = True if field.field_info.description: schema_['description'] = field.field_info.description schema_overrides = True if not field.required and field.default is not None and not is_callable_type(field.outer_type_): schema_['default'] = encode_default(field.default) schema_overrides = True return schema_, schema_overrides def field_schema( field: ModelField, *, by_alias: bool = True, model_name_map: Dict[TypeModelOrEnum, str], ref_prefix: Optional[str] = None, ref_template: str = default_ref_template, known_models: Optional[TypeModelSet] = None, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they will be included in the definitions and referenced in the schema instead of included recursively. :param field: a Pydantic ``ModelField`` :param by_alias: use the defined alias (if any) in the returned schema :param model_name_map: used to generate the JSON Schema references to other models included in the definitions :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of #/definitions/ will be used :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. :param known_models: used to solve circular references :return: tuple of the schema for this field and additional definitions """ s, schema_overrides = get_field_info_schema(field) validation_schema = get_field_schema_validations(field) if validation_schema: s.update(validation_schema) schema_overrides = True f_schema, f_definitions, f_nested_models = field_type_schema( field, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models or set(), ) # $ref will only be returned when there are no schema_overrides if '$ref' in f_schema: return f_schema, f_definitions, f_nested_models else: s.update(f_schema) return s, f_definitions, f_nested_models numeric_types = (int, float, Decimal) _str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( ('max_length', numeric_types, 'maxLength'), ('min_length', numeric_types, 'minLength'), ('regex', str, 'pattern'), ) _numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( ('gt', numeric_types, 'exclusiveMinimum'), ('lt', numeric_types, 'exclusiveMaximum'), ('ge', numeric_types, 'minimum'), ('le', numeric_types, 'maximum'), ('multiple_of', numeric_types, 'multipleOf'), ) def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: """ Get the JSON Schema validation keywords for a ``field`` with an annotation of a Pydantic ``FieldInfo`` with validation arguments. """ f_schema: Dict[str, Any] = {} if lenient_issubclass(field.type_, Enum): # schema is already updated by `enum_process_schema`; just update with field extra if field.field_info.extra: f_schema.update(field.field_info.extra) return f_schema if lenient_issubclass(field.type_, (str, bytes)): for attr_name, t, keyword in _str_types_attrs: attr = getattr(field.field_info, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): for attr_name, t, keyword in _numeric_types_attrs: attr = getattr(field.field_info, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr if field.field_info is not None and field.field_info.const: f_schema['const'] = field.default if field.field_info.extra: f_schema.update(field.field_info.extra) modify_schema = getattr(field.outer_type_, '__modify_schema__', None) if modify_schema: _apply_modify_schema(modify_schema, field, f_schema) return f_schema def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: """ Process a set of models and generate unique names for them to be used as keys in the JSON Schema definitions. By default the names are the same as the class name. But if two models in different Python modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be based on the Python module path for those conflicting models to prevent name collisions. :param unique_models: a Python set of models :return: dict mapping models to names """ name_model_map = {} conflicting_names: Set[str] = set() for model in unique_models: model_name = normalize_name(model.__name__) if model_name in conflicting_names: model_name = get_long_model_name(model) name_model_map[model_name] = model elif model_name in name_model_map: conflicting_names.add(model_name) conflicting_model = name_model_map.pop(model_name) name_model_map[get_long_model_name(conflicting_model)] = conflicting_model name_model_map[get_long_model_name(model)] = model else: name_model_map[model_name] = model return {v: k for k, v in name_model_map.items()} def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet: """ Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param model: a Pydantic ``BaseModel`` subclass :param known_models: used to solve circular references :return: a set with the initial model and all its sub-models """ known_models = known_models or set() flat_models: TypeModelSet = set() flat_models.add(model) known_models |= flat_models fields = cast(Sequence[ModelField], model.__fields__.values()) flat_models |= get_flat_models_from_fields(fields, known_models=known_models) return flat_models def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: """ Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param field: a Pydantic ``ModelField`` :param known_models: used to solve circular references :return: a set with the model used in the declaration for this field, if any, and all its sub-models """ from .main import BaseModel flat_models: TypeModelSet = set() field_type = field.type_ if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): field_type = field_type.__pydantic_model__ if field.sub_fields and not lenient_issubclass(field_type, BaseModel): flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: flat_models |= get_flat_models_from_model(field_type, known_models=known_models) elif lenient_issubclass(field_type, Enum): flat_models.add(field_type) return flat_models def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: """ Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param fields: a list of Pydantic ``ModelField``s :param known_models: used to solve circular references :return: a set with any model declared in the fields, and all their sub-models """ flat_models: TypeModelSet = set() for field in fields: flat_models |= get_flat_models_from_field(field, known_models=known_models) return flat_models def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: """ Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. """ flat_models: TypeModelSet = set() for model in models: flat_models |= get_flat_models_from_model(model) return flat_models def get_long_model_name(model: TypeModelOrEnum) -> str: return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') def field_type_schema( field: ModelField, *, by_alias: bool, model_name_map: Dict[TypeModelOrEnum, str], ref_template: str, schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: TypeModelSet, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Used by ``field_schema()``, you probably should be using that function. Take a single ``field`` and generate the schema for its type only, not including additional information as title, etc. Also return additional schema definitions, from sub-models. """ from .main import BaseModel # noqa: F811 definitions = {} nested_models: Set[str] = set() f_schema: Dict[str, Any] if field.shape in { SHAPE_LIST, SHAPE_TUPLE_ELLIPSIS, SHAPE_SEQUENCE, SHAPE_SET, SHAPE_FROZENSET, SHAPE_ITERABLE, SHAPE_DEQUE, }: items_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) definitions.update(f_definitions) nested_models.update(f_nested_models) f_schema = {'type': 'array', 'items': items_schema} if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: f_schema['uniqueItems'] = True elif field.shape in MAPPING_LIKE_SHAPES: f_schema = {'type': 'object'} key_field = cast(ModelField, field.key_field) regex = getattr(key_field.type_, 'regex', None) items_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) definitions.update(f_definitions) nested_models.update(f_nested_models) if regex: # Dict keys have a regex pattern # items_schema might be a schema or empty dict, add it either way f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema} if items_schema: # The dict values are not simply Any, so they need a schema f_schema['additionalProperties'] = items_schema elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): sub_schema = [] sub_fields = cast(List[ModelField], field.sub_fields) for sf in sub_fields: sf_schema, sf_definitions, sf_nested_models = field_type_schema( sf, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) definitions.update(sf_definitions) nested_models.update(sf_nested_models) sub_schema.append(sf_schema) sub_fields_len = len(sub_fields) if field.shape == SHAPE_GENERIC: all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} f_schema = {'allOf': [all_of_schemas]} else: f_schema = { 'type': 'array', 'minItems': sub_fields_len, 'maxItems': sub_fields_len, } if sub_fields_len >= 1: f_schema['items'] = sub_schema else: assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape f_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) definitions.update(f_definitions) nested_models.update(f_nested_models) # check field type to avoid repeated calls to the same __modify_schema__ method if field.type_ != field.outer_type_: if field.shape == SHAPE_GENERIC: field_type = field.type_ else: field_type = field.outer_type_ modify_schema = getattr(field_type, '__modify_schema__', None) if modify_schema: _apply_modify_schema(modify_schema, field, f_schema) return f_schema, definitions, nested_models def model_process_schema( model: TypeModelOrEnum, *, by_alias: bool = True, model_name_map: Dict[TypeModelOrEnum, str], ref_prefix: Optional[str] = None, ref_template: str = default_ref_template, known_models: Optional[TypeModelSet] = None, field: Optional[ModelField] = None, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Used by ``model_schema()``, you probably should be using that function. Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All the definitions are returned as the second value. """ from inspect import getdoc, signature known_models = known_models or set() if lenient_issubclass(model, Enum): model = cast(Type[Enum], model) s = enum_process_schema(model, field=field) return s, {}, set() model = cast(Type['BaseModel'], model) s = {'title': model.__config__.title or model.__name__} doc = getdoc(model) if doc: s['description'] = doc known_models.add(model) m_schema, m_definitions, nested_models = model_type_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) s.update(m_schema) schema_extra = model.__config__.schema_extra if callable(schema_extra): if len(signature(schema_extra).parameters) == 1: schema_extra(s) else: schema_extra(s, model) else: s.update(schema_extra) return s, m_definitions, nested_models def model_type_schema( model: Type['BaseModel'], *, by_alias: bool, model_name_map: Dict[TypeModelOrEnum, str], ref_template: str, ref_prefix: Optional[str] = None, known_models: TypeModelSet, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ You probably should be using ``model_schema()``, this function is indirectly used by that function. Take a single ``model`` and generate the schema for its type only, not including additional information as title, etc. Also return additional schema definitions, from sub-models. """ properties = {} required = [] definitions: Dict[str, Any] = {} nested_models: Set[str] = set() for k, f in model.__fields__.items(): try: f_schema, f_definitions, f_nested_models = field_schema( f, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) except SkipField as skip: warnings.warn(skip.message, UserWarning) continue definitions.update(f_definitions) nested_models.update(f_nested_models) if by_alias: properties[f.alias] = f_schema if f.required: required.append(f.alias) else: properties[k] = f_schema if f.required: required.append(k) if ROOT_KEY in properties: out_schema = properties[ROOT_KEY] out_schema['title'] = model.__config__.title or model.__name__ else: out_schema = {'type': 'object', 'properties': properties} if required: out_schema['required'] = required if model.__config__.extra == 'forbid': out_schema['additionalProperties'] = False return out_schema, definitions, nested_models def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: """ Take a single `enum` and generate its schema. This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. """ import inspect schema_: Dict[str, Any] = { 'title': enum.__name__, # Python assigns all enums a default docstring value of 'An enumeration', so # all enums will have a description field even if not explicitly provided. 'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'), # Add enum values and the enum field type to the schema. 'enum': [item.value for item in cast(Iterable[Enum], enum)], } add_field_type_to_schema(enum, schema_) modify_schema = getattr(enum, '__modify_schema__', None) if modify_schema: _apply_modify_schema(modify_schema, field, schema_) return schema_ def field_singleton_sub_fields_schema( field: ModelField, *, by_alias: bool, model_name_map: Dict[TypeModelOrEnum, str], ref_template: str, schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: TypeModelSet, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ This function is indirectly used by ``field_schema()``, you probably should be using that function. Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. """ sub_fields = cast(List[ModelField], field.sub_fields) definitions = {} nested_models: Set[str] = set() if len(sub_fields) == 1: return field_type_schema( sub_fields[0], by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) else: s: Dict[str, Any] = {} # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object field_has_discriminator: bool = field.discriminator_key is not None if field_has_discriminator: assert field.sub_fields_mapping is not None discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} for discriminator_value, sub_field in field.sub_fields_mapping.items(): if isinstance(discriminator_value, Enum): discriminator_value = str(discriminator_value.value) # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many if is_union(get_origin(sub_field.type_)): sub_models = get_sub_types(sub_field.type_) discriminator_models_refs[discriminator_value] = { model_name_map[sub_model]: get_schema_ref( model_name_map[sub_model], ref_prefix, ref_template, False ) for sub_model in sub_models } else: sub_field_type = sub_field.type_ if hasattr(sub_field_type, '__pydantic_model__'): sub_field_type = sub_field_type.__pydantic_model__ discriminator_model_name = model_name_map[sub_field_type] discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] s['discriminator'] = { 'propertyName': field.discriminator_alias, 'mapping': discriminator_models_refs, } sub_field_schemas = [] for sf in sub_fields: sub_schema, sub_definitions, sub_nested_models = field_type_schema( sf, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) definitions.update(sub_definitions) if schema_overrides and 'allOf' in sub_schema: # if the sub_field is a referenced schema we only need the referenced # object. Otherwise we will end up with several allOf inside anyOf/oneOf. # See https://github.com/pydantic/pydantic/issues/1209 sub_schema = sub_schema['allOf'][0] if sub_schema.keys() == {'discriminator', 'oneOf'}: # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere sub_schema.pop('discriminator') sub_field_schemas.append(sub_schema) nested_models.update(sub_nested_models) s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas return s, definitions, nested_models # Order is important, e.g. subclasses of str must go before str # this is used only for standard library types, custom types should use __modify_schema__ instead field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( (Path, {'type': 'string', 'format': 'path'}), (datetime, {'type': 'string', 'format': 'date-time'}), (date, {'type': 'string', 'format': 'date'}), (time, {'type': 'string', 'format': 'time'}), (timedelta, {'type': 'number', 'format': 'time-delta'}), (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), (IPv4Address, {'type': 'string', 'format': 'ipv4'}), (IPv6Address, {'type': 'string', 'format': 'ipv6'}), (Pattern, {'type': 'string', 'format': 'regex'}), (str, {'type': 'string'}), (bytes, {'type': 'string', 'format': 'binary'}), (bool, {'type': 'boolean'}), (int, {'type': 'integer'}), (float, {'type': 'number'}), (Decimal, {'type': 'number'}), (UUID, {'type': 'string', 'format': 'uuid'}), (dict, {'type': 'object'}), (list, {'type': 'array', 'items': {}}), (tuple, {'type': 'array', 'items': {}}), (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}), ) json_scheme = {'type': 'string', 'format': 'json-string'} def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: """ Update the given `schema` with the type-specific metadata for the given `field_type`. This function looks through `field_class_to_schema` for a class that matches the given `field_type`, and then modifies the given `schema` with the information from that type. """ for type_, t_schema in field_class_to_schema: # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: schema_.update(t_schema) break def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: if ref_prefix: schema_ref = {'$ref': ref_prefix + name} else: schema_ref = {'$ref': ref_template.format(model=name)} return {'allOf': [schema_ref]} if schema_overrides else schema_ref def field_singleton_schema( # noqa: C901 (ignore complexity) field: ModelField, *, by_alias: bool, model_name_map: Dict[TypeModelOrEnum, str], ref_template: str, schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: TypeModelSet, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ This function is indirectly used by ``field_schema()``, you should probably be using that function. Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. """ from .main import BaseModel definitions: Dict[str, Any] = {} nested_models: Set[str] = set() field_type = field.type_ # Recurse into this field if it contains sub_fields and is NOT a # BaseModel OR that BaseModel is a const if field.sub_fields and ( (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) ): return field_singleton_sub_fields_schema( field, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: return {}, definitions, nested_models # no restrictions if is_none_type(field_type): return {'type': 'null'}, definitions, nested_models if is_callable_type(field_type): raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') f_schema: Dict[str, Any] = {} if field.field_info is not None and field.field_info.const: f_schema['const'] = field.default if is_literal_type(field_type): values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type)) if len({v.__class__ for v in values}) > 1: return field_schema( multitypes_literal_field_for_schema(values, field), by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, ) # All values have the same type field_type = values[0].__class__ f_schema['enum'] = list(values) add_field_type_to_schema(field_type, f_schema) elif lenient_issubclass(field_type, Enum): enum_name = model_name_map[field_type] f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) definitions[enum_name] = enum_process_schema(field_type, field=field) elif is_namedtuple(field_type): sub_schema, *_ = model_process_schema( field_type.__pydantic_model__, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, field=field, ) items_schemas = list(sub_schema['properties'].values()) f_schema.update( { 'type': 'array', 'items': items_schemas, 'minItems': len(items_schemas), 'maxItems': len(items_schemas), } ) elif not hasattr(field_type, '__pydantic_model__'): add_field_type_to_schema(field_type, f_schema) modify_schema = getattr(field_type, '__modify_schema__', None) if modify_schema: _apply_modify_schema(modify_schema, field, f_schema) if f_schema: return f_schema, definitions, nested_models # Handle dataclass-based models if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): field_type = field_type.__pydantic_model__ if issubclass(field_type, BaseModel): model_name = model_name_map[field_type] if field_type not in known_models: sub_schema, sub_definitions, sub_nested_models = model_process_schema( field_type, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template, known_models=known_models, field=field, ) definitions.update(sub_definitions) definitions[model_name] = sub_schema nested_models.update(sub_nested_models) else: nested_models.add(model_name) schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) return schema_ref, definitions, nested_models # For generics with no args args = get_args(field_type) if args is not None and not args and Generic in field_type.__bases__: return f_schema, definitions, nested_models raise ValueError(f'Value not declarable with JSON Schema, field: {field}') def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: """ To support `Literal` with values of different types, we split it into multiple `Literal` with same type e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` """ literal_distinct_types = defaultdict(list) for v in values: literal_distinct_types[v.__class__].append(v) distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) return ModelField( name=field.name, type_=Union[tuple(distinct_literals)], # type: ignore class_validators=field.class_validators, model_config=field.model_config, default=field.default, required=field.required, alias=field.alias, field_info=field.field_info, ) def encode_default(dft: Any) -> Any: from .main import BaseModel if isinstance(dft, BaseModel) or is_dataclass(dft): dft = cast('dict[str, Any]', pydantic_encoder(dft)) if isinstance(dft, dict): return {encode_default(k): encode_default(v) for k, v in dft.items()} elif isinstance(dft, Enum): return dft.value elif isinstance(dft, (int, float, str)): return dft elif isinstance(dft, (list, tuple)): t = dft.__class__ seq_args = (encode_default(v) for v in dft) return t(*seq_args) if is_namedtuple(t) else t(seq_args) elif dft is None: return None else: return pydantic_encoder(dft) _map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} def get_annotation_from_field_info( annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False ) -> Type[Any]: """ Get an annotation with validation implemented for numbers and strings based on the field_info. :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema :param field_name: name of the field for use in error messages :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment :return: the same ``annotation`` if unmodified or a new annotation with validation in place """ constraints = field_info.get_constraints() used_constraints: Set[str] = set() if constraints: annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) if validate_assignment: used_constraints.add('allow_mutation') unused_constraints = constraints - used_constraints if unused_constraints: raise ValueError( f'On field "{field_name}" the following field constraints are set but not enforced: ' f'{", ".join(unused_constraints)}. ' f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints' ) return annotation def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 """ Get an annotation with used constraints implemented for numbers and strings based on the field_info. :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. """ used_constraints: Set[str] = set() def go(type_: Any) -> Type[Any]: if ( is_literal_type(type_) or isinstance(type_, ForwardRef) or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) ): return type_ origin = get_origin(type_) if origin is not None: args: Tuple[Any, ...] = get_args(type_) if any(isinstance(a, ForwardRef) for a in args): # forward refs cause infinite recursion below return type_ if origin is Annotated: return go(args[0]) if is_union(origin): return Union[tuple(go(a) for a in args)] # type: ignore if issubclass(origin, List) and ( field_info.min_items is not None or field_info.max_items is not None or field_info.unique_items is not None ): used_constraints.update({'min_items', 'max_items', 'unique_items'}) return conlist( go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items, unique_items=field_info.unique_items, ) if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): used_constraints.update({'min_items', 'max_items'}) return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): used_constraints.update({'min_items', 'max_items'}) return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) for t in (Tuple, List, Set, FrozenSet, Sequence): if issubclass(origin, t): # type: ignore return t[tuple(go(a) for a in args)] # type: ignore if issubclass(origin, Dict): return Dict[args[0], go(args[1])] # type: ignore attrs: Optional[Tuple[str, ...]] = None constraint_func: Optional[Callable[..., type]] = None if isinstance(type_, type): if issubclass(type_, (SecretStr, SecretBytes)): attrs = ('max_length', 'min_length') def constraint_func(**kw: Any) -> Type[Any]: return type(type_.__name__, (type_,), kw) elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): attrs = ('max_length', 'min_length', 'regex') if issubclass(type_, StrictStr): def constraint_func(**kw: Any) -> Type[Any]: return type(type_.__name__, (type_,), kw) else: constraint_func = constr elif issubclass(type_, bytes): attrs = ('max_length', 'min_length', 'regex') if issubclass(type_, StrictBytes): def constraint_func(**kw: Any) -> Type[Any]: return type(type_.__name__, (type_,), kw) else: constraint_func = conbytes elif issubclass(type_, numeric_types) and not issubclass( type_, ( ConstrainedInt, ConstrainedFloat, ConstrainedDecimal, ConstrainedList, ConstrainedSet, ConstrainedFrozenSet, bool, ), ): # Is numeric type attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') if issubclass(type_, float): attrs += ('allow_inf_nan',) if issubclass(type_, Decimal): attrs += ('max_digits', 'decimal_places') numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch constraint_func = _map_types_constraint[numeric_type] if attrs: used_constraints.update(set(attrs)) kwargs = { attr_name: attr for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) if attr is not None } if kwargs: constraint_func = cast(Callable[..., type], constraint_func) return constraint_func(**kwargs) return type_ return go(annotation), used_constraints def normalize_name(name: str) -> str: """ Normalizes the given name. This can be applied to either a model *or* enum. """ return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) class SkipField(Exception): """ Utility exception used to exclude fields from schema. """ def __init__(self, message: str) -> None: self.message = message pydantic-1.10.14/pydantic/tools.py000066400000000000000000000054121455251250200170210ustar00rootroot00000000000000import json from functools import lru_cache from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union from .parse import Protocol, load_file, load_str_bytes from .types import StrBytes from .typing import display_as_type __all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of') NameFactory = Union[str, Callable[[Type[Any]], str]] if TYPE_CHECKING: from .typing import DictStrAny def _generate_parsing_type_name(type_: Any) -> str: return f'ParsingModel[{display_as_type(type_)}]' @lru_cache(maxsize=2048) def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: from .main import create_model if type_name is None: type_name = _generate_parsing_type_name if not isinstance(type_name, str): type_name = type_name(type_) return create_model(type_name, __root__=(type_, ...)) T = TypeVar('T') def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type] return model_type(__root__=obj).__root__ def parse_file_as( type_: Type[T], path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, json_loads: Callable[[str], Any] = json.loads, type_name: Optional[NameFactory] = None, ) -> T: obj = load_file( path, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads, ) return parse_obj_as(type_, obj, type_name=type_name) def parse_raw_as( type_: Type[T], b: StrBytes, *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, json_loads: Callable[[str], Any] = json.loads, type_name: Optional[NameFactory] = None, ) -> T: obj = load_str_bytes( b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads, ) return parse_obj_as(type_, obj, type_name=type_name) def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny': """Generate a JSON schema (as dict) for the passed model or dynamically generated one""" return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs) def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str: """Generate a JSON schema (as JSON) for the passed model or dynamically generated one""" return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs) pydantic-1.10.14/pydantic/types.py000066400000000000000000001050631455251250200170300ustar00rootroot00000000000000import abc import math import re import warnings from datetime import date from decimal import Decimal, InvalidOperation from enum import Enum from pathlib import Path from types import new_class from typing import ( TYPE_CHECKING, Any, Callable, ClassVar, Dict, FrozenSet, List, Optional, Pattern, Set, Tuple, Type, TypeVar, Union, cast, overload, ) from uuid import UUID from weakref import WeakSet from . import errors from .datetime_parse import parse_date from .utils import import_string, update_not_none from .validators import ( bytes_validator, constr_length_validator, constr_lower, constr_strip_whitespace, constr_upper, decimal_validator, float_finite_validator, float_validator, frozenset_validator, int_validator, list_validator, number_multiple_validator, number_size_validator, path_exists_validator, path_validator, set_validator, str_validator, strict_bytes_validator, strict_float_validator, strict_int_validator, strict_str_validator, ) __all__ = [ 'NoneStr', 'NoneBytes', 'StrBytes', 'NoneStrBytes', 'StrictStr', 'ConstrainedBytes', 'conbytes', 'ConstrainedList', 'conlist', 'ConstrainedSet', 'conset', 'ConstrainedFrozenSet', 'confrozenset', 'ConstrainedStr', 'constr', 'PyObject', 'ConstrainedInt', 'conint', 'PositiveInt', 'NegativeInt', 'NonNegativeInt', 'NonPositiveInt', 'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat', 'NonNegativeFloat', 'NonPositiveFloat', 'FiniteFloat', 'ConstrainedDecimal', 'condecimal', 'UUID1', 'UUID3', 'UUID4', 'UUID5', 'FilePath', 'DirectoryPath', 'Json', 'JsonWrapper', 'SecretField', 'SecretStr', 'SecretBytes', 'StrictBool', 'StrictBytes', 'StrictInt', 'StrictFloat', 'PaymentCardNumber', 'ByteSize', 'PastDate', 'FutureDate', 'ConstrainedDate', 'condate', ] NoneStr = Optional[str] NoneBytes = Optional[bytes] StrBytes = Union[str, bytes] NoneStrBytes = Optional[StrBytes] OptionalInt = Optional[int] OptionalIntFloat = Union[OptionalInt, float] OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] OptionalDate = Optional[date] StrIntFloat = Union[str, int, float] if TYPE_CHECKING: from typing_extensions import Annotated from .dataclasses import Dataclass from .main import BaseModel from .typing import CallableGenerator ModelOrDc = Type[Union[BaseModel, Dataclass]] T = TypeVar('T') _DEFINED_TYPES: 'WeakSet[type]' = WeakSet() @overload def _registered(typ: Type[T]) -> Type[T]: pass @overload def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta': pass def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']: # In order to generate valid examples of constrained types, Hypothesis needs # to inspect the type object - so we keep a weakref to each contype object # until it can be registered. When (or if) our Hypothesis plugin is loaded, # it monkeypatches this function. # If Hypothesis is never used, the total effect is to keep a weak reference # which has minimal memory usage and doesn't even affect garbage collection. _DEFINED_TYPES.add(typ) return typ class ConstrainedNumberMeta(type): def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) if new_cls.gt is not None and new_cls.ge is not None: raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') if new_cls.lt is not None and new_cls.le is not None: raise errors.ConfigError('bounds lt and le cannot be specified at the same time') return _registered(new_cls) # type: ignore # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if TYPE_CHECKING: StrictBool = bool else: class StrictBool(int): """ StrictBool to allow for bools which are not type-coerced. """ @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='boolean') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any) -> bool: """ Ensure that we only allow bools. """ if isinstance(value, bool): return value raise errors.StrictBoolError() # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): strict: bool = False gt: OptionalInt = None ge: OptionalInt = None lt: OptionalInt = None le: OptionalInt = None multiple_of: OptionalInt = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le, multipleOf=cls.multiple_of, ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_int_validator if cls.strict else int_validator yield number_size_validator yield number_multiple_validator def conint( *, strict: bool = False, gt: Optional[int] = None, ge: Optional[int] = None, lt: Optional[int] = None, le: Optional[int] = None, multiple_of: Optional[int] = None, ) -> Type[int]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) return type('ConstrainedIntValue', (ConstrainedInt,), namespace) if TYPE_CHECKING: PositiveInt = int NegativeInt = int NonPositiveInt = int NonNegativeInt = int StrictInt = int else: class PositiveInt(ConstrainedInt): gt = 0 class NegativeInt(ConstrainedInt): lt = 0 class NonPositiveInt(ConstrainedInt): le = 0 class NonNegativeInt(ConstrainedInt): ge = 0 class StrictInt(ConstrainedInt): strict = True # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): strict: bool = False gt: OptionalIntFloat = None ge: OptionalIntFloat = None lt: OptionalIntFloat = None le: OptionalIntFloat = None multiple_of: OptionalIntFloat = None allow_inf_nan: Optional[bool] = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le, multipleOf=cls.multiple_of, ) # Modify constraints to account for differences between IEEE floats and JSON if field_schema.get('exclusiveMinimum') == -math.inf: del field_schema['exclusiveMinimum'] if field_schema.get('minimum') == -math.inf: del field_schema['minimum'] if field_schema.get('exclusiveMaximum') == math.inf: del field_schema['exclusiveMaximum'] if field_schema.get('maximum') == math.inf: del field_schema['maximum'] @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_float_validator if cls.strict else float_validator yield number_size_validator yield number_multiple_validator yield float_finite_validator def confloat( *, strict: bool = False, gt: float = None, ge: float = None, lt: float = None, le: float = None, multiple_of: float = None, allow_inf_nan: Optional[bool] = None, ) -> Type[float]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) if TYPE_CHECKING: PositiveFloat = float NegativeFloat = float NonPositiveFloat = float NonNegativeFloat = float StrictFloat = float FiniteFloat = float else: class PositiveFloat(ConstrainedFloat): gt = 0 class NegativeFloat(ConstrainedFloat): lt = 0 class NonPositiveFloat(ConstrainedFloat): le = 0 class NonNegativeFloat(ConstrainedFloat): ge = 0 class StrictFloat(ConstrainedFloat): strict = True class FiniteFloat(ConstrainedFloat): allow_inf_nan = False # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class ConstrainedBytes(bytes): strip_whitespace = False to_upper = False to_lower = False min_length: OptionalInt = None max_length: OptionalInt = None strict: bool = False @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_bytes_validator if cls.strict else bytes_validator yield constr_strip_whitespace yield constr_upper yield constr_lower yield constr_length_validator def conbytes( *, strip_whitespace: bool = False, to_upper: bool = False, to_lower: bool = False, min_length: Optional[int] = None, max_length: Optional[int] = None, strict: bool = False, ) -> Type[bytes]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( strip_whitespace=strip_whitespace, to_upper=to_upper, to_lower=to_lower, min_length=min_length, max_length=max_length, strict=strict, ) return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace)) if TYPE_CHECKING: StrictBytes = bytes else: class StrictBytes(ConstrainedBytes): strict = True # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class ConstrainedStr(str): strip_whitespace = False to_upper = False to_lower = False min_length: OptionalInt = None max_length: OptionalInt = None curtail_length: OptionalInt = None regex: Optional[Union[str, Pattern[str]]] = None strict = False @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, minLength=cls.min_length, maxLength=cls.max_length, pattern=cls.regex and cls._get_pattern(cls.regex), ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_str_validator if cls.strict else str_validator yield constr_strip_whitespace yield constr_upper yield constr_lower yield constr_length_validator yield cls.validate @classmethod def validate(cls, value: Union[str]) -> Union[str]: if cls.curtail_length and len(value) > cls.curtail_length: value = value[: cls.curtail_length] if cls.regex: if not re.match(cls.regex, value): raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex)) return value @staticmethod def _get_pattern(regex: Union[str, Pattern[str]]) -> str: return regex if isinstance(regex, str) else regex.pattern def constr( *, strip_whitespace: bool = False, to_upper: bool = False, to_lower: bool = False, strict: bool = False, min_length: Optional[int] = None, max_length: Optional[int] = None, curtail_length: Optional[int] = None, regex: Optional[str] = None, ) -> Type[str]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( strip_whitespace=strip_whitespace, to_upper=to_upper, to_lower=to_lower, strict=strict, min_length=min_length, max_length=max_length, curtail_length=curtail_length, regex=regex and re.compile(regex), ) return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace)) if TYPE_CHECKING: StrictStr = str else: class StrictStr(ConstrainedStr): strict = True # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # This types superclass should be Set[T], but cython chokes on that... class ConstrainedSet(set): # type: ignore # Needed for pydantic to detect that this is a set __origin__ = set __args__: Set[Type[T]] # type: ignore min_items: Optional[int] = None max_items: Optional[int] = None item_type: Type[T] # type: ignore @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.set_length_validator @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) @classmethod def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]': if v is None: return None v = set_validator(v) v_len = len(v) if cls.min_items is not None and v_len < cls.min_items: raise errors.SetMinLengthError(limit_value=cls.min_items) if cls.max_items is not None and v_len > cls.max_items: raise errors.SetMaxLengthError(limit_value=cls.max_items) return v def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]: # __args__ is needed to conform to typing generics api namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} # We use new_class to be able to deal with Generic types return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) # This types superclass should be FrozenSet[T], but cython chokes on that... class ConstrainedFrozenSet(frozenset): # type: ignore # Needed for pydantic to detect that this is a set __origin__ = frozenset __args__: FrozenSet[Type[T]] # type: ignore min_items: Optional[int] = None max_items: Optional[int] = None item_type: Type[T] # type: ignore @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.frozenset_length_validator @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) @classmethod def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]': if v is None: return None v = frozenset_validator(v) v_len = len(v) if cls.min_items is not None and v_len < cls.min_items: raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) if cls.max_items is not None and v_len > cls.max_items: raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) return v def confrozenset( item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None ) -> Type[FrozenSet[T]]: # __args__ is needed to conform to typing generics api namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} # We use new_class to be able to deal with Generic types return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # This types superclass should be List[T], but cython chokes on that... class ConstrainedList(list): # type: ignore # Needed for pydantic to detect that this is a list __origin__ = list __args__: Tuple[Type[T], ...] # type: ignore min_items: Optional[int] = None max_items: Optional[int] = None unique_items: Optional[bool] = None item_type: Type[T] # type: ignore @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.list_length_validator if cls.unique_items: yield cls.unique_items_validator @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) @classmethod def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': if v is None: return None v = list_validator(v) v_len = len(v) if cls.min_items is not None and v_len < cls.min_items: raise errors.ListMinLengthError(limit_value=cls.min_items) if cls.max_items is not None and v_len > cls.max_items: raise errors.ListMaxLengthError(limit_value=cls.max_items) return v @classmethod def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': if v is None: return None for i, value in enumerate(v, start=1): if value in v[i:]: raise errors.ListUniqueItemsError() return v def conlist( item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None ) -> Type[List[T]]: # __args__ is needed to conform to typing generics api namespace = dict( min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) ) # We use new_class to be able to deal with Generic types return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if TYPE_CHECKING: PyObject = Callable[..., Any] else: class PyObject: validate_always = True @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any) -> Any: if isinstance(value, Callable): return value try: value = str_validator(value) except errors.StrError: raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') try: return import_string(value) except ImportError as e: raise errors.PyObjectError(error_message=str(e)) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): gt: OptionalIntFloatDecimal = None ge: OptionalIntFloatDecimal = None lt: OptionalIntFloatDecimal = None le: OptionalIntFloatDecimal = None max_digits: OptionalInt = None decimal_places: OptionalInt = None multiple_of: OptionalIntFloatDecimal = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le, multipleOf=cls.multiple_of, ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield decimal_validator yield number_size_validator yield number_multiple_validator yield cls.validate @classmethod def validate(cls, value: Decimal) -> Decimal: try: normalized_value = value.normalize() except InvalidOperation: normalized_value = value digit_tuple, exponent = normalized_value.as_tuple()[1:] if exponent in {'F', 'n', 'N'}: raise errors.DecimalIsNotFiniteError() if exponent >= 0: # A positive exponent adds that many trailing zeros. digits = len(digit_tuple) + exponent decimals = 0 else: # If the absolute value of the negative exponent is larger than the # number of digits, then it's the same as the number of digits, # because it'll consume all of the digits in digit_tuple and then # add abs(exponent) - len(digit_tuple) leading zeros after the # decimal point. if abs(exponent) > len(digit_tuple): digits = decimals = abs(exponent) else: digits = len(digit_tuple) decimals = abs(exponent) whole_digits = digits - decimals if cls.max_digits is not None and digits > cls.max_digits: raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) if cls.decimal_places is not None and decimals > cls.decimal_places: raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) if cls.max_digits is not None and cls.decimal_places is not None: expected = cls.max_digits - cls.decimal_places if whole_digits > expected: raise errors.DecimalWholeDigitsError(whole_digits=expected) return value def condecimal( *, gt: Decimal = None, ge: Decimal = None, lt: Decimal = None, le: Decimal = None, max_digits: Optional[int] = None, decimal_places: Optional[int] = None, multiple_of: Decimal = None, ) -> Type[Decimal]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of ) return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if TYPE_CHECKING: UUID1 = UUID UUID3 = UUID UUID4 = UUID UUID5 = UUID else: class UUID1(UUID): _required_version = 1 @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format=f'uuid{cls._required_version}') class UUID3(UUID1): _required_version = 3 class UUID4(UUID1): _required_version = 4 class UUID5(UUID1): _required_version = 5 # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if TYPE_CHECKING: FilePath = Path DirectoryPath = Path else: class FilePath(Path): @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(format='file-path') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield path_validator yield path_exists_validator yield cls.validate @classmethod def validate(cls, value: Path) -> Path: if not value.is_file(): raise errors.PathNotAFileError(path=value) return value class DirectoryPath(Path): @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(format='directory-path') @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield path_validator yield path_exists_validator yield cls.validate @classmethod def validate(cls, value: Path) -> Path: if not value.is_dir(): raise errors.PathNotADirectoryError(path=value) return value # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class JsonWrapper: pass class JsonMeta(type): def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: if t is Any: return Json # allow Json[Any] to replecate plain Json return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t})) if TYPE_CHECKING: Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] else: class Json(metaclass=JsonMeta): @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: field_schema.update(type='string', format='json-string') # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class SecretField(abc.ABC): """ Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, the `__init__()` should be part of the abstract class and the `get_secret_value()` method should use the generic `T` type. However Cython doesn't support very well generics at the moment and the generated code fails to be imported (see https://github.com/cython/cython/issues/2753). """ def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() def __str__(self) -> str: return '**********' if self.get_secret_value() else '' def __hash__(self) -> int: return hash(self.get_secret_value()) @abc.abstractmethod def get_secret_value(self) -> Any: # pragma: no cover ... class SecretStr(SecretField): min_length: OptionalInt = None max_length: OptionalInt = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, type='string', writeOnly=True, format='password', minLength=cls.min_length, maxLength=cls.max_length, ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate yield constr_length_validator @classmethod def validate(cls, value: Any) -> 'SecretStr': if isinstance(value, cls): return value value = str_validator(value) return cls(value) def __init__(self, value: str): self._secret_value = value def __repr__(self) -> str: return f"SecretStr('{self}')" def __len__(self) -> int: return len(self._secret_value) def display(self) -> str: warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) return str(self) def get_secret_value(self) -> str: return self._secret_value class SecretBytes(SecretField): min_length: OptionalInt = None max_length: OptionalInt = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none( field_schema, type='string', writeOnly=True, format='password', minLength=cls.min_length, maxLength=cls.max_length, ) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate yield constr_length_validator @classmethod def validate(cls, value: Any) -> 'SecretBytes': if isinstance(value, cls): return value value = bytes_validator(value) return cls(value) def __init__(self, value: bytes): self._secret_value = value def __repr__(self) -> str: return f"SecretBytes(b'{self}')" def __len__(self) -> int: return len(self._secret_value) def display(self) -> str: warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) return str(self) def get_secret_value(self) -> bytes: return self._secret_value # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ class PaymentCardBrand(str, Enum): # If you add another card type, please also add it to the # Hypothesis strategy in `pydantic._hypothesis_plugin`. amex = 'American Express' mastercard = 'Mastercard' visa = 'Visa' other = 'other' def __str__(self) -> str: return self.value class PaymentCardNumber(str): """ Based on: https://en.wikipedia.org/wiki/Payment_card_number """ strip_whitespace: ClassVar[bool] = True min_length: ClassVar[int] = 12 max_length: ClassVar[int] = 19 bin: str last4: str brand: PaymentCardBrand def __init__(self, card_number: str): self.bin = card_number[:6] self.last4 = card_number[-4:] self.brand = self._get_brand(card_number) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield str_validator yield constr_strip_whitespace yield constr_length_validator yield cls.validate_digits yield cls.validate_luhn_check_digit yield cls yield cls.validate_length_for_brand @property def masked(self) -> str: num_masked = len(self) - 10 # len(bin) + len(last4) == 10 return f'{self.bin}{"*" * num_masked}{self.last4}' @classmethod def validate_digits(cls, card_number: str) -> str: if not card_number.isdigit(): raise errors.NotDigitError return card_number @classmethod def validate_luhn_check_digit(cls, card_number: str) -> str: """ Based on: https://en.wikipedia.org/wiki/Luhn_algorithm """ sum_ = int(card_number[-1]) length = len(card_number) parity = length % 2 for i in range(length - 1): digit = int(card_number[i]) if i % 2 == parity: digit *= 2 if digit > 9: digit -= 9 sum_ += digit valid = sum_ % 10 == 0 if not valid: raise errors.LuhnValidationError return card_number @classmethod def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': """ Validate length based on BIN for major brands: https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) """ required_length: Union[None, int, str] = None if card_number.brand in PaymentCardBrand.mastercard: required_length = 16 valid = len(card_number) == required_length elif card_number.brand == PaymentCardBrand.visa: required_length = '13, 16 or 19' valid = len(card_number) in {13, 16, 19} elif card_number.brand == PaymentCardBrand.amex: required_length = 15 valid = len(card_number) == required_length else: valid = True if not valid: raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) return card_number @staticmethod def _get_brand(card_number: str) -> PaymentCardBrand: if card_number[0] == '4': brand = PaymentCardBrand.visa elif 51 <= int(card_number[:2]) <= 55: brand = PaymentCardBrand.mastercard elif card_number[:2] in {'34', '37'}: brand = PaymentCardBrand.amex else: brand = PaymentCardBrand.other return brand # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE_SIZES = { 'b': 1, 'kb': 10**3, 'mb': 10**6, 'gb': 10**9, 'tb': 10**12, 'pb': 10**15, 'eb': 10**18, 'kib': 2**10, 'mib': 2**20, 'gib': 2**30, 'tib': 2**40, 'pib': 2**50, 'eib': 2**60, } BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) class ByteSize(int): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, v: StrIntFloat) -> 'ByteSize': try: return cls(int(v)) except ValueError: pass str_match = byte_string_re.match(str(v)) if str_match is None: raise errors.InvalidByteSize() scalar, unit = str_match.groups() if unit is None: unit = 'b' try: unit_mult = BYTE_SIZES[unit.lower()] except KeyError: raise errors.InvalidByteSizeUnit(unit=unit) return cls(int(float(scalar) * unit_mult)) def human_readable(self, decimal: bool = False) -> str: if decimal: divisor = 1000 units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] final_unit = 'EB' else: divisor = 1024 units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] final_unit = 'EiB' num = float(self) for unit in units: if abs(num) < divisor: return f'{num:0.1f}{unit}' num /= divisor return f'{num:0.1f}{final_unit}' def to(self, unit: str) -> float: try: unit_div = BYTE_SIZES[unit.lower()] except KeyError: raise errors.InvalidByteSizeUnit(unit=unit) return self / unit_div # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if TYPE_CHECKING: PastDate = date FutureDate = date else: class PastDate(date): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield parse_date yield cls.validate @classmethod def validate(cls, value: date) -> date: if value >= date.today(): raise errors.DateNotInThePastError() return value class FutureDate(date): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield parse_date yield cls.validate @classmethod def validate(cls, value: date) -> date: if value <= date.today(): raise errors.DateNotInTheFutureError() return value class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): gt: OptionalDate = None ge: OptionalDate = None lt: OptionalDate = None le: OptionalDate = None @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield parse_date yield number_size_validator def condate( *, gt: date = None, ge: date = None, lt: date = None, le: date = None, ) -> Type[date]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(gt=gt, ge=ge, lt=lt, le=le) return type('ConstrainedDateValue', (ConstrainedDate,), namespace) pydantic-1.10.14/pydantic/typing.py000066400000000000000000000450641455251250200172020ustar00rootroot00000000000000import sys import typing from collections.abc import Callable from os import PathLike from typing import ( # type: ignore TYPE_CHECKING, AbstractSet, Any, Callable as TypingCallable, ClassVar, Dict, ForwardRef, Generator, Iterable, List, Mapping, NewType, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, _eval_type, cast, get_type_hints, ) from typing_extensions import ( Annotated, Final, Literal, NotRequired as TypedDictNotRequired, Required as TypedDictRequired, ) try: from typing import _TypingBase as typing_base # type: ignore except ImportError: from typing import _Final as typing_base # type: ignore try: from typing import GenericAlias as TypingGenericAlias # type: ignore except ImportError: # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) TypingGenericAlias = () try: from types import UnionType as TypesUnionType # type: ignore except ImportError: # python < 3.10 does not have UnionType (str | int, byte | bool and so on) TypesUnionType = () if sys.version_info < (3, 9): def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: return type_._evaluate(globalns, localns) else: def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: # Even though it is the right signature for python 3.9, mypy complains with # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... return cast(Any, type_)._evaluate(globalns, localns, set()) if sys.version_info < (3, 9): # Ensure we always get all the whole `Annotated` hint, not just the annotated type. # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, # so it already returns the full annotation get_all_type_hints = get_type_hints else: def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: return get_type_hints(obj, globalns, localns, include_extras=True) _T = TypeVar('_T') AnyCallable = TypingCallable[..., Any] NoArgAnyCallable = TypingCallable[[], Any] # workaround for https://github.com/python/mypy/issues/9496 AnyArgTCallable = TypingCallable[..., _T] # Annotated[...] is implemented by returning an instance of one of these classes, depending on # python/typing_extensions version. AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'} LITERAL_TYPES: Set[Any] = {Literal} if hasattr(typing, 'Literal'): LITERAL_TYPES.add(typing.Literal) if sys.version_info < (3, 8): def get_origin(t: Type[Any]) -> Optional[Type[Any]]: if type(t).__name__ in AnnotatedTypeNames: # weirdly this is a runtime requirement, as well as for mypy return cast(Type[Any], Annotated) return getattr(t, '__origin__', None) else: from typing import get_origin as _typing_get_origin def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: """ We can't directly use `typing.get_origin` since we need a fallback to support custom generic classes like `ConstrainedList` It should be useless once https://github.com/cython/cython/issues/3537 is solved and https://github.com/pydantic/pydantic/pull/1753 is merged. """ if type(tp).__name__ in AnnotatedTypeNames: return cast(Type[Any], Annotated) # mypy complains about _SpecialForm return _typing_get_origin(tp) or getattr(tp, '__origin__', None) if sys.version_info < (3, 8): from typing import _GenericAlias def get_args(t: Type[Any]) -> Tuple[Any, ...]: """Compatibility version of get_args for python 3.7. Mostly compatible with the python 3.8 `typing` module version and able to handle almost all use cases. """ if type(t).__name__ in AnnotatedTypeNames: return t.__args__ + t.__metadata__ if isinstance(t, _GenericAlias): res = t.__args__ if t.__origin__ is Callable and res and res[0] is not Ellipsis: res = (list(res[:-1]), res[-1]) return res return getattr(t, '__args__', ()) else: from typing import get_args as _typing_get_args def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: """ In python 3.9, `typing.Dict`, `typing.List`, ... do have an empty `__args__` by default (instead of the generic ~T for example). In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, we retrieve the `_nparams` value that tells us how many parameters it needs. """ if hasattr(tp, '_nparams'): return (Any,) * tp._nparams # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` # in python 3.10- but now returns () for `tuple` and `Tuple`. # This will probably be clarified in pydantic v2 try: if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] return ((),) # there is a TypeError when compiled with cython except TypeError: # pragma: no cover pass return () def get_args(tp: Type[Any]) -> Tuple[Any, ...]: """Get type arguments with all substitutions performed. For unions, basic simplifications used by Union constructor are performed. Examples:: get_args(Dict[str, int]) == (str, int) get_args(int) == () get_args(Union[int, Union[T, int], str][int]) == (int, str) get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) get_args(Callable[[], T][int]) == ([], int) """ if type(tp).__name__ in AnnotatedTypeNames: return tp.__args__ + tp.__metadata__ # the fallback is needed for the same reasons as `get_origin` (see above) return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp) if sys.version_info < (3, 9): def convert_generics(tp: Type[Any]) -> Type[Any]: """Python 3.9 and older only supports generics from `typing` module. They convert strings to ForwardRef automatically. Examples:: typing.List['Hero'] == typing.List[ForwardRef('Hero')] """ return tp else: from typing import _UnionGenericAlias # type: ignore from typing_extensions import _AnnotatedAlias def convert_generics(tp: Type[Any]) -> Type[Any]: """ Recursively searches for `str` type hints and replaces them with ForwardRef. Examples:: convert_generics(list['Hero']) == list[ForwardRef('Hero')] convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int """ origin = get_origin(tp) if not origin or not hasattr(tp, '__args__'): return tp args = get_args(tp) # typing.Annotated needs special treatment if origin is Annotated: return _AnnotatedAlias(convert_generics(args[0]), args[1:]) # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` converted = tuple( ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) for arg in args ) if converted == args: return tp elif isinstance(tp, TypingGenericAlias): return TypingGenericAlias(origin, converted) elif isinstance(tp, TypesUnionType): # recreate types.UnionType (PEP604, Python >= 3.10) return _UnionGenericAlias(origin, converted) else: try: setattr(tp, '__args__', converted) except AttributeError: pass return tp if sys.version_info < (3, 10): def is_union(tp: Optional[Type[Any]]) -> bool: return tp is Union WithArgsTypes = (TypingGenericAlias,) else: import types import typing def is_union(tp: Optional[Type[Any]]) -> bool: return tp is Union or tp is types.UnionType # noqa: E721 WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) StrPath = Union[str, PathLike] if TYPE_CHECKING: from .fields import ModelField TupleGenerator = Generator[Tuple[str, Any], None, None] DictStrAny = Dict[str, Any] DictAny = Dict[Any, Any] SetStr = Set[str] ListStr = List[str] IntStr = Union[int, str] AbstractSetIntStr = AbstractSet[IntStr] DictIntStrAny = Dict[IntStr, Any] MappingIntStrAny = Mapping[IntStr, Any] CallableGenerator = Generator[AnyCallable, None, None] ReprArgs = Sequence[Tuple[Optional[str], Any]] MYPY = False if MYPY: AnyClassMethod = classmethod[Any] else: # classmethod[TargetType, CallableParamSpecType, CallableReturnType] AnyClassMethod = classmethod[Any, Any, Any] __all__ = ( 'AnyCallable', 'NoArgAnyCallable', 'NoneType', 'is_none_type', 'display_as_type', 'resolve_annotations', 'is_callable_type', 'is_literal_type', 'all_literal_values', 'is_namedtuple', 'is_typeddict', 'is_typeddict_special', 'is_new_type', 'new_type_supertype', 'is_classvar', 'is_finalvar', 'update_field_forward_refs', 'update_model_forward_refs', 'TupleGenerator', 'DictStrAny', 'DictAny', 'SetStr', 'ListStr', 'IntStr', 'AbstractSetIntStr', 'DictIntStrAny', 'CallableGenerator', 'ReprArgs', 'AnyClassMethod', 'CallableGenerator', 'WithArgsTypes', 'get_args', 'get_origin', 'get_sub_types', 'typing_base', 'get_all_type_hints', 'is_union', 'StrPath', 'MappingIntStrAny', ) NoneType = None.__class__ NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) if sys.version_info < (3, 8): # Even though this implementation is slower, we need it for python 3.7: # In python 3.7 "Literal" is not a builtin type and uses a different # mechanism. # for this reason `Literal[None] is Literal[None]` evaluates to `False`, # breaking the faster implementation used for the other python versions. def is_none_type(type_: Any) -> bool: return type_ in NONE_TYPES elif sys.version_info[:2] == (3, 8): def is_none_type(type_: Any) -> bool: for none_type in NONE_TYPES: if type_ is none_type: return True # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey # can change on very subtle changes like use of types in other modules, # hopefully this check avoids that issue. if is_literal_type(type_): # pragma: no cover return all_literal_values(type_) == (None,) return False else: def is_none_type(type_: Any) -> bool: return type_ in NONE_TYPES def display_as_type(v: Type[Any]) -> str: if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): v = v.__class__ if is_union(get_origin(v)): return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' if isinstance(v, WithArgsTypes): # Generic alias are constructs like `list[int]` return str(v).replace('typing.', '') try: return v.__name__ except AttributeError: # happens with typing objects return str(v).replace('typing.', '') def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: """ Partially taken from typing.get_type_hints. Resolve string or ForwardRef annotations into type objects if possible. """ base_globals: Optional[Dict[str, Any]] = None if module_name: try: module = sys.modules[module_name] except KeyError: # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 pass else: base_globals = module.__dict__ annotations = {} for name, value in raw_annotations.items(): if isinstance(value, str): if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): value = ForwardRef(value, is_argument=False, is_class=True) else: value = ForwardRef(value, is_argument=False) try: value = _eval_type(value, base_globals, None) except NameError: # this is ok, it can be fixed with update_forward_refs pass annotations[name] = value return annotations def is_callable_type(type_: Type[Any]) -> bool: return type_ is Callable or get_origin(type_) is Callable def is_literal_type(type_: Type[Any]) -> bool: return Literal is not None and get_origin(type_) in LITERAL_TYPES def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: return get_args(type_) def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: """ This method is used to retrieve all Literal values as Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` """ if not is_literal_type(type_): return (type_,) values = literal_values(type_) return tuple(x for value in values for x in all_literal_values(value)) def is_namedtuple(type_: Type[Any]) -> bool: """ Check if a given class is a named tuple. It can be either a `typing.NamedTuple` or `collections.namedtuple` """ from .utils import lenient_issubclass return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') def is_typeddict(type_: Type[Any]) -> bool: """ Check if a given class is a typed dict (from `typing` or `typing_extensions`) In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) """ from .utils import lenient_issubclass return lenient_issubclass(type_, dict) and hasattr(type_, '__total__') def _check_typeddict_special(type_: Any) -> bool: return type_ is TypedDictRequired or type_ is TypedDictNotRequired def is_typeddict_special(type_: Any) -> bool: """ Check if type is a TypedDict special form (Required or NotRequired). """ return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) test_type = NewType('test_type', str) def is_new_type(type_: Type[Any]) -> bool: """ Check whether type_ was created using typing.NewType """ return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore def new_type_supertype(type_: Type[Any]) -> Type[Any]: while hasattr(type_, '__supertype__'): type_ = type_.__supertype__ return type_ def _check_classvar(v: Optional[Type[Any]]) -> bool: if v is None: return False return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' def _check_finalvar(v: Optional[Type[Any]]) -> bool: """ Check if a given type is a `typing.Final` type. """ if v is None: return False return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') def is_classvar(ann_type: Type[Any]) -> bool: if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): return True # this is an ugly workaround for class vars that contain forward references and are therefore themselves # forward references, see #3679 if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): return True return False def is_finalvar(ann_type: Type[Any]) -> bool: return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: """ Try to update ForwardRefs on fields based on this ModelField, globalns and localns. """ prepare = False if field.type_.__class__ == ForwardRef: prepare = True field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) if field.outer_type_.__class__ == ForwardRef: prepare = True field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) if prepare: field.prepare() if field.sub_fields: for sub_f in field.sub_fields: update_field_forward_refs(sub_f, globalns=globalns, localns=localns) if field.discriminator_key is not None: field.prepare_discriminated_union_sub_fields() def update_model_forward_refs( model: Type[Any], fields: Iterable['ModelField'], json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], localns: 'DictStrAny', exc_to_suppress: Tuple[Type[BaseException], ...] = (), ) -> None: """ Try to update model fields ForwardRefs based on model and localns. """ if model.__module__ in sys.modules: globalns = sys.modules[model.__module__].__dict__.copy() else: globalns = {} globalns.setdefault(model.__name__, model) for f in fields: try: update_field_forward_refs(f, globalns=globalns, localns=localns) except exc_to_suppress: pass for key in set(json_encoders.keys()): if isinstance(key, str): fr: ForwardRef = ForwardRef(key) elif isinstance(key, ForwardRef): fr = key else: continue try: new_key = evaluate_forwardref(fr, globalns, localns or None) except exc_to_suppress: # pragma: no cover continue json_encoders[new_key] = json_encoders.pop(key) def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: """ Tries to get the class of a Type[T] annotation. Returns True if Type is used without brackets. Otherwise returns None. """ if type_ is type: return True if get_origin(type_) is None: return None args = get_args(type_) if not args or not isinstance(args[0], type): return True else: return args[0] def get_sub_types(tp: Any) -> List[Any]: """ Return all the types that are allowed by type `tp` `tp` can be a `Union` of allowed types or an `Annotated` type """ origin = get_origin(tp) if origin is Annotated: return get_sub_types(get_args(tp)[0]) elif is_union(origin): return [x for t in get_args(tp) for x in get_sub_types(t)] else: return [tp] pydantic-1.10.14/pydantic/utils.py000066400000000000000000000623211455251250200170230ustar00rootroot00000000000000import keyword import warnings import weakref from collections import OrderedDict, defaultdict, deque from copy import deepcopy from itertools import islice, zip_longest from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType from typing import ( TYPE_CHECKING, AbstractSet, Any, Callable, Collection, Dict, Generator, Iterable, Iterator, List, Mapping, NoReturn, Optional, Set, Tuple, Type, TypeVar, Union, ) from typing_extensions import Annotated from .errors import ConfigError from .typing import ( NoneType, WithArgsTypes, all_literal_values, display_as_type, get_args, get_origin, is_literal_type, is_union, ) from .version import version_info if TYPE_CHECKING: from inspect import Signature from pathlib import Path from .config import BaseConfig from .dataclasses import Dataclass from .fields import ModelField from .main import BaseModel from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] __all__ = ( 'import_string', 'sequence_like', 'validate_field_name', 'lenient_isinstance', 'lenient_issubclass', 'in_ipython', 'is_valid_identifier', 'deep_update', 'update_not_none', 'almost_equal_floats', 'get_model', 'to_camel', 'is_valid_field', 'smart_deepcopy', 'PyObjectStr', 'Representation', 'GetterDict', 'ValueItems', 'version_info', # required here to match behaviour in v1.3 'ClassAttribute', 'path_type', 'ROOT_KEY', 'get_unique_discriminator_alias', 'get_discriminator_alias_and_values', 'DUNDER_ATTRIBUTES', ) ROOT_KEY = '__root__' # these are types that are returned unchanged by deepcopy IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { int, float, complex, str, bool, bytes, type, NoneType, FunctionType, BuiltinFunctionType, LambdaType, weakref.ref, CodeType, # note: including ModuleType will differ from behaviour of deepcopy by not producing error. # It might be not a good idea in general, but considering that this function used only internally # against default values of fields, this will allow to actually have a field with module as default value ModuleType, NotImplemented.__class__, Ellipsis.__class__, } # these are types that if empty, might be copied with simple copy() instead of deepcopy() BUILTIN_COLLECTIONS: Set[Type[Any]] = { list, set, tuple, frozenset, dict, OrderedDict, defaultdict, deque, } def import_string(dotted_path: str) -> Any: """ Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImportError if the import fails. """ from importlib import import_module try: module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) except ValueError as e: raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e module = import_module(module_path) try: return getattr(module, class_name) except AttributeError as e: raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e def truncate(v: Union[str], *, max_len: int = 80) -> str: """ Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long """ warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) if isinstance(v, str) and len(v) > (max_len - 2): # -3 so quote + string + … + quote has correct length return (v[: (max_len - 3)] + '…').__repr__() try: v = v.__repr__() except TypeError: v = v.__class__.__repr__(v) # in case v is a type if len(v) > max_len: v = v[: max_len - 1] + '…' return v def sequence_like(v: Any) -> bool: return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: """ Ensure that the field's name does not shadow an existing attribute of the model. """ for base in bases: if getattr(base, field_name, None): raise NameError( f'Field name "{field_name}" shadows a BaseModel attribute; ' f'use a different field name with "alias=\'{field_name}\'".' ) def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: try: return isinstance(o, class_or_tuple) # type: ignore[arg-type] except TypeError: return False def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: try: return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] except TypeError: if isinstance(cls, WithArgsTypes): return False raise # pragma: no cover def in_ipython() -> bool: """ Check whether we're in an ipython environment, including jupyter notebooks. """ try: eval('__IPYTHON__') except NameError: return False else: # pragma: no cover return True def is_valid_identifier(identifier: str) -> bool: """ Checks that a string is a valid identifier and not a Python keyword. :param identifier: The identifier to test. :return: True if the identifier is valid. """ return identifier.isidentifier() and not keyword.iskeyword(identifier) KeyType = TypeVar('KeyType') def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: updated_mapping = mapping.copy() for updating_mapping in updating_mappings: for k, v in updating_mapping.items(): if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): updated_mapping[k] = deep_update(updated_mapping[k], v) else: updated_mapping[k] = v return updated_mapping def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: mapping.update({k: v for k, v in update.items() if v is not None}) def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: """ Return True if two floats are almost equal """ return abs(value_1 - value_2) <= delta def generate_model_signature( init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig'] ) -> 'Signature': """ Generate signature for model based on its fields """ from inspect import Parameter, Signature, signature from .config import Extra present_params = signature(init).parameters.values() merged_params: Dict[str, Parameter] = {} var_kw = None use_var_kw = False for param in islice(present_params, 1, None): # skip self arg if param.kind is param.VAR_KEYWORD: var_kw = param continue merged_params[param.name] = param if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through allow_names = config.allow_population_by_field_name for field_name, field in fields.items(): param_name = field.alias if field_name in merged_params or param_name in merged_params: continue elif not is_valid_identifier(param_name): if allow_names and is_valid_identifier(field_name): param_name = field_name else: use_var_kw = True continue # TODO: replace annotation with actual expected types once #1055 solved kwargs = {'default': field.default} if not field.required else {} merged_params[param_name] = Parameter( param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs ) if config.extra is Extra.allow: use_var_kw = True if var_kw and use_var_kw: # Make sure the parameter for extra kwargs # does not have the same name as a field default_model_signature = [ ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD), ('data', Parameter.VAR_KEYWORD), ] if [(p.name, p.kind) for p in present_params] == default_model_signature: # if this is the standard model signature, use extra_data as the extra args name var_kw_name = 'extra_data' else: # else start from var_kw var_kw_name = var_kw.name # generate a name that's definitely unique while var_kw_name in fields: var_kw_name += '_' merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) return Signature(parameters=list(merged_params.values()), return_annotation=None) def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']: from .main import BaseModel try: model_cls = obj.__pydantic_model__ # type: ignore except AttributeError: model_cls = obj if not issubclass(model_cls, BaseModel): raise TypeError('Unsupported type, must be either BaseModel or dataclass') return model_cls def to_camel(string: str) -> str: return ''.join(word.capitalize() for word in string.split('_')) def to_lower_camel(string: str) -> str: if len(string) >= 1: pascal_string = to_camel(string) return pascal_string[0].lower() + pascal_string[1:] return string.lower() T = TypeVar('T') def unique_list( input_list: Union[List[T], Tuple[T, ...]], *, name_factory: Callable[[T], str] = str, ) -> List[T]: """ Make a list unique while maintaining order. We update the list if another one with the same name is set (e.g. root validator overridden in subclass) """ result: List[T] = [] result_names: List[str] = [] for v in input_list: v_name = name_factory(v) if v_name not in result_names: result_names.append(v_name) result.append(v) else: result[result_names.index(v_name)] = v return result class PyObjectStr(str): """ String class where repr doesn't include quotes. Useful with Representation when you want to return a string representation of something that valid (or pseudo-valid) python. """ def __repr__(self) -> str: return str(self) class Representation: """ Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations of objects. """ __slots__: Tuple[str, ...] = tuple() def __repr_args__(self) -> 'ReprArgs': """ Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. Can either return: * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` """ attrs = ((s, getattr(self, s)) for s in self.__slots__) return [(a, v) for a, v in attrs if v is not None] def __repr_name__(self) -> str: """ Name of the instance's class, used in __repr__. """ return self.__class__.__name__ def __repr_str__(self, join_str: str) -> str: return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: """ Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects """ yield self.__repr_name__() + '(' yield 1 for name, value in self.__repr_args__(): if name is not None: yield name + '=' yield fmt(value) yield ',' yield 0 yield -1 yield ')' def __str__(self) -> str: return self.__repr_str__(' ') def __repr__(self) -> str: return f'{self.__repr_name__()}({self.__repr_str__(", ")})' def __rich_repr__(self) -> 'RichReprResult': """Get fields for Rich library""" for name, field_repr in self.__repr_args__(): if name is None: yield field_repr else: yield name, field_repr class GetterDict(Representation): """ Hack to make object's smell just enough like dicts for validate_model. We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. """ __slots__ = ('_obj',) def __init__(self, obj: Any): self._obj = obj def __getitem__(self, key: str) -> Any: try: return getattr(self._obj, key) except AttributeError as e: raise KeyError(key) from e def get(self, key: Any, default: Any = None) -> Any: return getattr(self._obj, key, default) def extra_keys(self) -> Set[Any]: """ We don't want to get any other attributes of obj if the model didn't explicitly ask for them """ return set() def keys(self) -> List[Any]: """ Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python dictionaries. """ return list(self) def values(self) -> List[Any]: return [self[k] for k in self] def items(self) -> Iterator[Tuple[str, Any]]: for k in self: yield k, self.get(k) def __iter__(self) -> Iterator[str]: for name in dir(self._obj): if not name.startswith('_'): yield name def __len__(self) -> int: return sum(1 for _ in self) def __contains__(self, item: Any) -> bool: return item in self.keys() def __eq__(self, other: Any) -> bool: return dict(self) == dict(other.items()) def __repr_args__(self) -> 'ReprArgs': return [(None, dict(self))] def __repr_name__(self) -> str: return f'GetterDict[{display_as_type(self._obj)}]' class ValueItems(Representation): """ Class for more convenient calculation of excluded or included fields on values. """ __slots__ = ('_items', '_type') def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None: items = self._coerce_items(items) if isinstance(value, (list, tuple)): items = self._normalize_indexes(items, len(value)) self._items: 'MappingIntStrAny' = items def is_excluded(self, item: Any) -> bool: """ Check if item is fully excluded. :param item: key or index of a value """ return self.is_true(self._items.get(item)) def is_included(self, item: Any) -> bool: """ Check if value is contained in self._items :param item: key or index of value """ return item in self._items def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]: """ :param e: key or index of element on value :return: raw values for element if self._items is dict and contain needed element """ item = self._items.get(e) return item if not self.is_true(item) else None def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny': """ :param items: dict or set of indexes which will be normalized :param v_length: length of sequence indexes of which will be >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) {0: True, 2: True, 3: True} >>> self._normalize_indexes({'__all__': True}, 4) {0: True, 1: True, 2: True, 3: True} """ normalized_items: 'DictIntStrAny' = {} all_items = None for i, v in items.items(): if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') if i == '__all__': all_items = self._coerce_value(v) continue if not isinstance(i, int): raise TypeError( 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' 'expected integer keys or keyword "__all__"' ) normalized_i = v_length + i if i < 0 else i normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) if not all_items: return normalized_items if self.is_true(all_items): for i in range(v_length): normalized_items.setdefault(i, ...) return normalized_items for i in range(v_length): normalized_item = normalized_items.setdefault(i, {}) if not self.is_true(normalized_item): normalized_items[i] = self.merge(all_items, normalized_item) return normalized_items @classmethod def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: """ Merge a ``base`` item with an ``override`` item. Both ``base`` and ``override`` are converted to dictionaries if possible. Sets are converted to dictionaries with the sets entries as keys and Ellipsis as values. Each key-value pair existing in ``base`` is merged with ``override``, while the rest of the key-value pairs are updated recursively with this function. Merging takes place based on the "union" of keys if ``intersect`` is set to ``False`` (default) and on the intersection of keys if ``intersect`` is set to ``True``. """ override = cls._coerce_value(override) base = cls._coerce_value(base) if override is None: return base if cls.is_true(base) or base is None: return override if cls.is_true(override): return base if intersect else override # intersection or union of keys while preserving ordering: if intersect: merge_keys = [k for k in base if k in override] + [k for k in override if k in base] else: merge_keys = list(base) + [k for k in override if k not in base] merged: 'DictIntStrAny' = {} for k in merge_keys: merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) if merged_item is not None: merged[k] = merged_item return merged @staticmethod def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny': if isinstance(items, Mapping): pass elif isinstance(items, AbstractSet): items = dict.fromkeys(items, ...) else: class_name = getattr(items, '__class__', '???') assert_never( items, f'Unexpected type of exclude value {class_name}', ) return items @classmethod def _coerce_value(cls, value: Any) -> Any: if value is None or cls.is_true(value): return value return cls._coerce_items(value) @staticmethod def is_true(v: Any) -> bool: return v is True or v is ... def __repr_args__(self) -> 'ReprArgs': return [(None, self._items)] class ClassAttribute: """ Hide class attribute from its instances """ __slots__ = ( 'name', 'value', ) def __init__(self, name: str, value: Any) -> None: self.name = name self.value = value def __get__(self, instance: Any, owner: Type[Any]) -> None: if instance is None: return self.value raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') path_types = { 'is_dir': 'directory', 'is_file': 'file', 'is_mount': 'mount point', 'is_symlink': 'symlink', 'is_block_device': 'block device', 'is_char_device': 'char device', 'is_fifo': 'FIFO', 'is_socket': 'socket', } def path_type(p: 'Path') -> str: """ Find out what sort of thing a path is. """ assert p.exists(), 'path does not exist' for method, name in path_types.items(): if getattr(p, method)(): return name return 'unknown' Obj = TypeVar('Obj') def smart_deepcopy(obj: Obj) -> Obj: """ Return type as is for immutable built-in types Use obj.copy() for built-in empty collections Use copy.deepcopy() for non-empty collections and unknown objects """ obj_type = obj.__class__ if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway try: if not obj and obj_type in BUILTIN_COLLECTIONS: # faster way for empty collections, no need to copy its members return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method except (TypeError, ValueError, RuntimeError): # do we really dare to catch ALL errors? Seems a bit risky pass return deepcopy(obj) # slowest way when we actually might need a deepcopy def is_valid_field(name: str) -> bool: if not name.startswith('_'): return True return ROOT_KEY == name DUNDER_ATTRIBUTES = { '__annotations__', '__classcell__', '__doc__', '__module__', '__orig_bases__', '__orig_class__', '__qualname__', } def is_valid_private_name(name: str) -> bool: return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES _EMPTY = object() def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: """ Check that the items of `left` are the same objects as those in `right`. >>> a, b = object(), object() >>> all_identical([a, b, a], [a, b, a]) True >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" False """ for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): if left_item is not right_item: return False return True def assert_never(obj: NoReturn, msg: str) -> NoReturn: """ Helper to make sure that we have covered all possible types. This is mostly useful for ``mypy``, docs: https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks """ raise TypeError(msg) def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: """Validate that all aliases are the same and if that's the case return the alias""" unique_aliases = set(all_aliases) if len(unique_aliases) > 1: raise ConfigError( f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' ) return unique_aliases.pop() def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: """ Get alias and all valid values in the `Literal` type of the discriminator field `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. """ is_root_model = getattr(tp, '__custom_root_type__', False) if get_origin(tp) is Annotated: tp = get_args(tp)[0] if hasattr(tp, '__pydantic_model__'): tp = tp.__pydantic_model__ if is_union(get_origin(tp)): alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) return alias, tuple(v for values in all_values for v in values) elif is_root_model: union_type = tp.__fields__[ROOT_KEY].type_ alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) if len(set(all_values)) > 1: raise ConfigError( f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}' ) return alias, all_values[0] else: try: t_discriminator_type = tp.__fields__[discriminator_key].type_ except AttributeError as e: raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e except KeyError as e: raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e if not is_literal_type(t_discriminator_type): raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`') return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) def _get_union_alias_and_all_values( union_type: Type[Any], discriminator_key: str ) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] all_aliases, all_values = zip(*zipped_aliases_values) return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values pydantic-1.10.14/pydantic/validators.py000066400000000000000000000525771455251250200200470ustar00rootroot00000000000000import math import re from collections import OrderedDict, deque from collections.abc import Hashable as CollectionsHashable from datetime import date, datetime, time, timedelta from decimal import Decimal, DecimalException from enum import Enum, IntEnum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import ( TYPE_CHECKING, Any, Callable, Deque, Dict, ForwardRef, FrozenSet, Generator, Hashable, List, NamedTuple, Pattern, Set, Tuple, Type, TypeVar, Union, ) from uuid import UUID from . import errors from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time from .typing import ( AnyCallable, all_literal_values, display_as_type, get_class, is_callable_type, is_literal_type, is_namedtuple, is_none_type, is_typeddict, ) from .utils import almost_equal_floats, lenient_issubclass, sequence_like if TYPE_CHECKING: from typing_extensions import Literal, TypedDict from .config import BaseConfig from .fields import ModelField from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] AnyOrderedDict = OrderedDict[Any, Any] Number = Union[int, float, Decimal] StrBytes = Union[str, bytes] def str_validator(v: Any) -> Union[str]: if isinstance(v, str): if isinstance(v, Enum): return v.value else: return v elif isinstance(v, (float, int, Decimal)): # is there anything else we want to add here? If you think so, create an issue. return str(v) elif isinstance(v, (bytes, bytearray)): return v.decode() else: raise errors.StrError() def strict_str_validator(v: Any) -> Union[str]: if isinstance(v, str) and not isinstance(v, Enum): return v raise errors.StrError() def bytes_validator(v: Any) -> Union[bytes]: if isinstance(v, bytes): return v elif isinstance(v, bytearray): return bytes(v) elif isinstance(v, str): return v.encode() elif isinstance(v, (float, int, Decimal)): return str(v).encode() else: raise errors.BytesError() def strict_bytes_validator(v: Any) -> Union[bytes]: if isinstance(v, bytes): return v elif isinstance(v, bytearray): return bytes(v) else: raise errors.BytesError() BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} def bool_validator(v: Any) -> bool: if v is True or v is False: return v if isinstance(v, bytes): v = v.decode() if isinstance(v, str): v = v.lower() try: if v in BOOL_TRUE: return True if v in BOOL_FALSE: return False except TypeError: raise errors.BoolError() raise errors.BoolError() # matches the default limit cpython, see https://github.com/python/cpython/pull/96500 max_str_int = 4_300 def int_validator(v: Any) -> int: if isinstance(v, int) and not (v is True or v is False): return v # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10 # but better to check here until then. # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int: raise errors.IntegerError() try: return int(v) except (TypeError, ValueError, OverflowError): raise errors.IntegerError() def strict_int_validator(v: Any) -> int: if isinstance(v, int) and not (v is True or v is False): return v raise errors.IntegerError() def float_validator(v: Any) -> float: if isinstance(v, float): return v try: return float(v) except (TypeError, ValueError): raise errors.FloatError() def strict_float_validator(v: Any) -> float: if isinstance(v, float): return v raise errors.FloatError() def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number': allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None) if allow_inf_nan is None: allow_inf_nan = config.allow_inf_nan if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): raise errors.NumberNotFiniteError() return v def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': field_type: ConstrainedNumber = field.type_ if field_type.multiple_of is not None: mod = float(v) / float(field_type.multiple_of) % 1 if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) return v def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': field_type: ConstrainedNumber = field.type_ if field_type.gt is not None and not v > field_type.gt: raise errors.NumberNotGtError(limit_value=field_type.gt) elif field_type.ge is not None and not v >= field_type.ge: raise errors.NumberNotGeError(limit_value=field_type.ge) if field_type.lt is not None and not v < field_type.lt: raise errors.NumberNotLtError(limit_value=field_type.lt) if field_type.le is not None and not v <= field_type.le: raise errors.NumberNotLeError(limit_value=field_type.le) return v def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': """Validate ``const`` fields. The value provided for a ``const`` field must be equal to the default value of the field. This is to support the keyword of the same name in JSON Schema. """ if v != field.default: raise errors.WrongConstantError(given=v, permitted=[field.default]) return v def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': v_len = len(v) min_length = config.min_anystr_length if v_len < min_length: raise errors.AnyStrMinLengthError(limit_value=min_length) max_length = config.max_anystr_length if max_length is not None and v_len > max_length: raise errors.AnyStrMaxLengthError(limit_value=max_length) return v def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': return v.strip() def anystr_upper(v: 'StrBytes') -> 'StrBytes': return v.upper() def anystr_lower(v: 'StrBytes') -> 'StrBytes': return v.lower() def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': if isinstance(v, OrderedDict): return v try: return OrderedDict(v) except (TypeError, ValueError): raise errors.DictError() def dict_validator(v: Any) -> Dict[Any, Any]: if isinstance(v, dict): return v try: return dict(v) except (TypeError, ValueError): raise errors.DictError() def list_validator(v: Any) -> List[Any]: if isinstance(v, list): return v elif sequence_like(v): return list(v) else: raise errors.ListError() def tuple_validator(v: Any) -> Tuple[Any, ...]: if isinstance(v, tuple): return v elif sequence_like(v): return tuple(v) else: raise errors.TupleError() def set_validator(v: Any) -> Set[Any]: if isinstance(v, set): return v elif sequence_like(v): return set(v) else: raise errors.SetError() def frozenset_validator(v: Any) -> FrozenSet[Any]: if isinstance(v, frozenset): return v elif sequence_like(v): return frozenset(v) else: raise errors.FrozenSetError() def deque_validator(v: Any) -> Deque[Any]: if isinstance(v, deque): return v elif sequence_like(v): return deque(v) else: raise errors.DequeError() def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: try: enum_v = field.type_(v) except ValueError: # field.type_ should be an enum, so will be iterable raise errors.EnumMemberError(enum_values=list(field.type_)) return enum_v.value if config.use_enum_values else enum_v def uuid_validator(v: Any, field: 'ModelField') -> UUID: try: if isinstance(v, str): v = UUID(v) elif isinstance(v, (bytes, bytearray)): try: v = UUID(v.decode()) except ValueError: # 16 bytes in big-endian order as the bytes argument fail # the above check v = UUID(bytes=v) except ValueError: raise errors.UUIDError() if not isinstance(v, UUID): raise errors.UUIDError() required_version = getattr(field.type_, '_required_version', None) if required_version and v.version != required_version: raise errors.UUIDVersionError(required_version=required_version) return v def decimal_validator(v: Any) -> Decimal: if isinstance(v, Decimal): return v elif isinstance(v, (bytes, bytearray)): v = v.decode() v = str(v).strip() try: v = Decimal(v) except DecimalException: raise errors.DecimalError() if not v.is_finite(): raise errors.DecimalIsNotFiniteError() return v def hashable_validator(v: Any) -> Hashable: if isinstance(v, Hashable): return v raise errors.HashableError() def ip_v4_address_validator(v: Any) -> IPv4Address: if isinstance(v, IPv4Address): return v try: return IPv4Address(v) except ValueError: raise errors.IPv4AddressError() def ip_v6_address_validator(v: Any) -> IPv6Address: if isinstance(v, IPv6Address): return v try: return IPv6Address(v) except ValueError: raise errors.IPv6AddressError() def ip_v4_network_validator(v: Any) -> IPv4Network: """ Assume IPv4Network initialised with a default ``strict`` argument See more: https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network """ if isinstance(v, IPv4Network): return v try: return IPv4Network(v) except ValueError: raise errors.IPv4NetworkError() def ip_v6_network_validator(v: Any) -> IPv6Network: """ Assume IPv6Network initialised with a default ``strict`` argument See more: https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network """ if isinstance(v, IPv6Network): return v try: return IPv6Network(v) except ValueError: raise errors.IPv6NetworkError() def ip_v4_interface_validator(v: Any) -> IPv4Interface: if isinstance(v, IPv4Interface): return v try: return IPv4Interface(v) except ValueError: raise errors.IPv4InterfaceError() def ip_v6_interface_validator(v: Any) -> IPv6Interface: if isinstance(v, IPv6Interface): return v try: return IPv6Interface(v) except ValueError: raise errors.IPv6InterfaceError() def path_validator(v: Any) -> Path: if isinstance(v, Path): return v try: return Path(v) except TypeError: raise errors.PathError() def path_exists_validator(v: Any) -> Path: if not v.exists(): raise errors.PathNotExistsError(path=v) return v def callable_validator(v: Any) -> AnyCallable: """ Perform a simple check if the value is callable. Note: complete matching of argument type hints and return types is not performed """ if callable(v): return v raise errors.CallableError(value=v) def enum_validator(v: Any) -> Enum: if isinstance(v, Enum): return v raise errors.EnumError(value=v) def int_enum_validator(v: Any) -> IntEnum: if isinstance(v, IntEnum): return v raise errors.IntEnumError(value=v) def make_literal_validator(type_: Any) -> Callable[[Any], Any]: permitted_choices = all_literal_values(type_) # To have a O(1) complexity and still return one of the values set inside the `Literal`, # we create a dict with the set values (a set causes some problems with the way intersection works). # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) allowed_choices = {v: v for v in permitted_choices} def literal_validator(v: Any) -> Any: try: return allowed_choices[v] except (KeyError, TypeError): raise errors.WrongConstantError(given=v, permitted=permitted_choices) return literal_validator def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': v_len = len(v) min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length if v_len < min_length: raise errors.AnyStrMinLengthError(limit_value=min_length) max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length if max_length is not None and v_len > max_length: raise errors.AnyStrMaxLengthError(limit_value=max_length) return v def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace if strip_whitespace: v = v.strip() return v def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': upper = field.type_.to_upper or config.anystr_upper if upper: v = v.upper() return v def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': lower = field.type_.to_lower or config.anystr_lower if lower: v = v.lower() return v def validate_json(v: Any, config: 'BaseConfig') -> Any: if v is None: # pass None through to other validators return v try: return config.json_loads(v) # type: ignore except ValueError: raise errors.JsonError() except TypeError: raise errors.JsonTypeError() T = TypeVar('T') def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: def arbitrary_type_validator(v: Any) -> T: if isinstance(v, type_): return v raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) return arbitrary_type_validator def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: def class_validator(v: Any) -> Type[T]: if lenient_issubclass(v, type_): return v raise errors.SubclassError(expected_class=type_) return class_validator def any_class_validator(v: Any) -> Type[T]: if isinstance(v, type): return v raise errors.ClassError() def none_validator(v: Any) -> 'Literal[None]': if v is None: return v raise errors.NotNoneError() def pattern_validator(v: Any) -> Pattern[str]: if isinstance(v, Pattern): return v str_value = str_validator(v) try: return re.compile(str_value) except re.error: raise errors.PatternError() NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple) def make_namedtuple_validator( namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig'] ) -> Callable[[Tuple[Any, ...]], NamedTupleT]: from .annotated_types import create_model_from_namedtuple NamedTupleModel = create_model_from_namedtuple( namedtuple_cls, __config__=config, __module__=namedtuple_cls.__module__, ) namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: annotations = NamedTupleModel.__annotations__ if len(values) > len(annotations): raise errors.ListMaxLengthError(limit_value=len(annotations)) dict_values: Dict[str, Any] = dict(zip(annotations, values)) validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) return namedtuple_cls(**validated_dict_values) return namedtuple_validator def make_typeddict_validator( typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type] ) -> Callable[[Any], Dict[str, Any]]: from .annotated_types import create_model_from_typeddict TypedDictModel = create_model_from_typeddict( typeddict_cls, __config__=config, __module__=typeddict_cls.__module__, ) typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type] return TypedDictModel.parse_obj(values).dict(exclude_unset=True) return typeddict_validator class IfConfig: def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: self.validator = validator self.config_attr_names = config_attr_names self.ignored_value = ignored_value def check(self, config: Type['BaseConfig']) -> bool: return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) # order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, # IPv4Interface before IPv4Address, etc _VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ (IntEnum, [int_validator, enum_member_validator]), (Enum, [enum_member_validator]), ( str, [ str_validator, IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), IfConfig(anystr_upper, 'anystr_upper'), IfConfig(anystr_lower, 'anystr_lower'), IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), ], ), ( bytes, [ bytes_validator, IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), IfConfig(anystr_upper, 'anystr_upper'), IfConfig(anystr_lower, 'anystr_lower'), IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), ], ), (bool, [bool_validator]), (int, [int_validator]), (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]), (Path, [path_validator]), (datetime, [parse_datetime]), (date, [parse_date]), (time, [parse_time]), (timedelta, [parse_duration]), (OrderedDict, [ordered_dict_validator]), (dict, [dict_validator]), (list, [list_validator]), (tuple, [tuple_validator]), (set, [set_validator]), (frozenset, [frozenset_validator]), (deque, [deque_validator]), (UUID, [uuid_validator]), (Decimal, [decimal_validator]), (IPv4Interface, [ip_v4_interface_validator]), (IPv6Interface, [ip_v6_interface_validator]), (IPv4Address, [ip_v4_address_validator]), (IPv6Address, [ip_v6_address_validator]), (IPv4Network, [ip_v4_network_validator]), (IPv6Network, [ip_v6_network_validator]), ] def find_validators( # noqa: C901 (ignore complexity) type_: Type[Any], config: Type['BaseConfig'] ) -> Generator[AnyCallable, None, None]: from .dataclasses import is_builtin_dataclass, make_dataclass_validator if type_ is Any or type_ is object: return type_type = type_.__class__ if type_type == ForwardRef or type_type == TypeVar: return if is_none_type(type_): yield none_validator return if type_ is Pattern or type_ is re.Pattern: yield pattern_validator return if type_ is Hashable or type_ is CollectionsHashable: yield hashable_validator return if is_callable_type(type_): yield callable_validator return if is_literal_type(type_): yield make_literal_validator(type_) return if is_builtin_dataclass(type_): yield from make_dataclass_validator(type_, config) return if type_ is Enum: yield enum_validator return if type_ is IntEnum: yield int_enum_validator return if is_namedtuple(type_): yield tuple_validator yield make_namedtuple_validator(type_, config) return if is_typeddict(type_): yield make_typeddict_validator(type_, config) return class_ = get_class(type_) if class_ is not None: if class_ is not Any and isinstance(class_, type): yield make_class_validator(class_) else: yield any_class_validator return for val_type, validators in _VALIDATORS: try: if issubclass(type_, val_type): for v in validators: if isinstance(v, IfConfig): if v.check(config): yield v.validator else: yield v return except TypeError: raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') if config.arbitrary_types_allowed: yield make_arbitrary_type_validator(type_) else: raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') pydantic-1.10.14/pydantic/version.py000066400000000000000000000020171455251250200173440ustar00rootroot00000000000000__all__ = 'compiled', 'VERSION', 'version_info' VERSION = '1.10.14' try: import cython # type: ignore except ImportError: compiled: bool = False else: # pragma: no cover try: compiled = cython.compiled except AttributeError: compiled = False def version_info() -> str: import platform import sys from importlib import import_module from pathlib import Path optional_deps = [] for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'): try: import_module(p.replace('-', '_')) except ImportError: continue optional_deps.append(p) info = { 'pydantic version': VERSION, 'pydantic compiled': compiled, 'install path': Path(__file__).resolve().parent, 'python version': sys.version, 'platform': platform.platform(), 'optional deps. installed': optional_deps, } return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) pydantic-1.10.14/requirements.txt000066400000000000000000000004441455251250200167600ustar00rootroot00000000000000# requirements for compilation and from setup.py so dependabot prompts us to test with latest version of these packages Cython==0.29.32;sys_platform!='win32' devtools==0.9.0 email-validator==2.0.0.post2 dataclasses==0.6; python_version < '3.7' typing-extensions==4.3.0 python-dotenv==0.20.0 pydantic-1.10.14/setup.cfg000066400000000000000000000045361455251250200153230ustar00rootroot00000000000000[tool:pytest] testpaths = tests addopts = -p no:hypothesispytest filterwarnings = error ignore::DeprecationWarning:distutils ignore::DeprecationWarning:Cython # for Python 3.10+: mypy still relies on distutils on windows. We hence ignore those warnings ignore:The distutils package is deprecated and slated for removal in Python 3.12:DeprecationWarning ignore:The distutils.sysconfig module is deprecated, use sysconfig instead:DeprecationWarning # for Python 3.11 ignore:path is deprecated.*:DeprecationWarning:certifi ignore:module 'sre_constants' is deprecated:DeprecationWarning:pkg_resources [flake8] max-line-length = 120 max-complexity = 14 inline-quotes = single multiline-quotes = double ignore = E203, W503 per-file-ignores = docs/examples/schema_unenforced_constraints.py: F811 docs/examples/validation_decorator_async.py: E402 docs/examples/types_constrained.py: F722 [coverage:run] source = pydantic branch = True context = ${CONTEXT} [coverage:report] precision = 2 exclude_lines = pragma: no cover raise NotImplementedError raise NotImplemented if TYPE_CHECKING: @overload [coverage:paths] source = pydantic/ /Users/runner/work/pydantic/pydantic/pydantic/ D:\a\pydantic\pydantic\pydantic [isort] line_length=120 known_first_party=pydantic multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 combine_as_imports=True [mypy] python_version = 3.9 show_error_codes = True follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True warn_unused_configs = True disallow_subclassing_any = True disallow_incomplete_defs = True disallow_untyped_decorators = True disallow_untyped_calls = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True # remaining arguments from `mypy --strict` which cause errors ;no_implicit_optional = True ;warn_return_any = True [mypy-email_validator] ignore_missing_imports = true [mypy-dotenv] ignore_missing_imports = true [mypy-toml] ignore_missing_imports = true # ansi2html and devtools are required to avoid the need to install these packages when running linting, # they're used in the docs build script [mypy-ansi2html] ignore_missing_imports = true [mypy-devtools] ignore_missing_imports = true pydantic-1.10.14/setup.py000066400000000000000000000121741455251250200152110ustar00rootroot00000000000000import os import re import sys from importlib.machinery import SourceFileLoader from pathlib import Path from setuptools import setup if os.name == 'nt': from setuptools.command import build_ext def get_export_symbols(self, ext): """ Slightly modified from: https://github.com/python/cpython/blob/8849e5962ba481d5d414b3467a256aba2134b4da\ /Lib/distutils/command/build_ext.py#L686-L703 """ # Patch from: https://bugs.python.org/issue35893 parts = ext.name.split('.') if parts[-1] == '__init__': suffix = parts[-2] else: suffix = parts[-1] # from here on unchanged try: # Unicode module name support as defined in PEP-489 # https://www.python.org/dev/peps/pep-0489/#export-hook-name suffix.encode('ascii') except UnicodeEncodeError: suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii') initfunc_name = 'PyInit_' + suffix if initfunc_name not in ext.export_symbols: ext.export_symbols.append(initfunc_name) return ext.export_symbols build_ext.build_ext.get_export_symbols = get_export_symbols class ReplaceLinks: def __init__(self): self.links = set() def replace_issues(self, m): id = m.group(1) self.links.add(f'.. _#{id}: https://github.com/pydantic/pydantic/issues/{id}') return f'`#{id}`_' def replace_users(self, m): name = m.group(2) self.links.add(f'.. _@{name}: https://github.com/{name}') return f'{m.group(1)}`@{name}`_' def extra(self): return '\n\n' + '\n'.join(sorted(self.links)) + '\n' description = 'Data validation and settings management using python type hints' THIS_DIR = Path(__file__).resolve().parent try: history = (THIS_DIR / 'HISTORY.md').read_text(encoding='utf-8') history = re.sub(r'(\s)#(\d+)', r'\1[#\2](https://github.com/pydantic/pydantic/issues/\2)', history) history = re.sub(r'( +)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) history = re.sub('@@', '@', history) long_description = (THIS_DIR / 'README.md').read_text(encoding='utf-8') + '\n\n' + history except FileNotFoundError: long_description = description + '.\n\nSee https://pydantic-docs.helpmanual.io/ for documentation.' # avoid loading the package before requirements are installed: version = SourceFileLoader('version', 'pydantic/version.py').load_module() ext_modules = None if not any(arg in sys.argv for arg in ['clean', 'check']) and 'SKIP_CYTHON' not in os.environ: try: from Cython.Build import cythonize except ImportError: pass else: # For cython test coverage install with `make build-trace` compiler_directives = {} if 'CYTHON_TRACE' in sys.argv: compiler_directives['linetrace'] = True # Set CFLAG to all optimizations (-O3), add `-g0` to reduce size of binaries, see #2276 # Any additional CFLAGS will be appended. Only the last optimization flag will have effect os.environ['CFLAGS'] = '-O3 -g0 ' + os.environ.get('CFLAGS', '') ext_modules = cythonize( 'pydantic/*.py', exclude=['pydantic/generics.py'], nthreads=int(os.getenv('CYTHON_NTHREADS', 0)), language_level=3, compiler_directives=compiler_directives, ) setup( name='pydantic', version=str(version.VERSION), description=description, long_description=long_description, long_description_content_type='text/markdown', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: Unix', 'Operating System :: POSIX :: Linux', 'Environment :: Console', 'Environment :: MacOS X', 'Framework :: Hypothesis', 'Framework :: Pydantic', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet', ], author='Samuel Colvin', author_email='s@muelcolvin.com', url='https://github.com/pydantic/pydantic', license='MIT', packages=['pydantic'], package_data={'pydantic': ['py.typed']}, python_requires='>=3.7', zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html install_requires=[ 'typing-extensions>=4.2.0' ], extras_require={ 'email': ['email-validator>=1.0.3'], 'dotenv': ['python-dotenv>=0.10.4'], }, ext_modules=ext_modules, entry_points={'hypothesis': ['_ = pydantic._hypothesis_plugin']}, ) pydantic-1.10.14/tests/000077500000000000000000000000001455251250200146345ustar00rootroot00000000000000pydantic-1.10.14/tests/__init__.py000066400000000000000000000000001455251250200167330ustar00rootroot00000000000000pydantic-1.10.14/tests/check_tag.py000077500000000000000000000016171455251250200171260ustar00rootroot00000000000000#!/usr/bin/env python3 import json import os import re import sys from importlib.machinery import SourceFileLoader from packaging.version import parse def main(env_var='GITHUB_REF') -> int: git_ref = os.getenv(env_var, 'none') tag = re.sub('^refs/tags/v*', '', git_ref.lower()) version = SourceFileLoader('version', 'pydantic/version.py').load_module().VERSION.lower() if tag == version: is_prerelease = parse(version).is_prerelease print( f'✓ {env_var} env var {git_ref!r} matches package version: {tag!r} == {version!r}, ' f'is pre-release: {is_prerelease}' ) print(f'::set-output name=IS_PRERELEASE::{json.dumps(is_prerelease)}') return 0 else: print(f'✖ {env_var} env var {git_ref!r} does not match package version: {tag!r} != {version!r}') return 1 if __name__ == '__main__': sys.exit(main()) pydantic-1.10.14/tests/conftest.py000066400000000000000000000051761455251250200170440ustar00rootroot00000000000000import importlib import inspect import os import secrets import sys import textwrap from types import FunctionType import pytest from _pytest.assertion.rewrite import AssertionRewritingHook # See https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov try: from hypothesis import given # noqa except ImportError: pytest_plugins = [] else: pytest_plugins = ['hypothesis.extra.pytestplugin'] def _extract_source_code_from_function(function): if function.__code__.co_argcount: raise RuntimeError(f'function {function.__qualname__} cannot have any arguments') code_lines = '' body_started = False for line in textwrap.dedent(inspect.getsource(function)).split('\n'): if line.startswith('def '): body_started = True continue elif body_started: code_lines += f'{line}\n' return textwrap.dedent(code_lines) def _create_module_file(code, tmp_path, name): name = f'{name}_{secrets.token_hex(5)}' path = tmp_path / f'{name}.py' path.write_text(code) return name, str(path) class SetEnv: def __init__(self): self.envars = set() def set(self, name, value): self.envars.add(name) os.environ[name] = value def clear(self): for n in self.envars: os.environ.pop(n) @pytest.fixture def env(): setenv = SetEnv() yield setenv setenv.clear() @pytest.fixture def create_module(tmp_path, request): def run(source_code_or_function, rewrite_assertions=True): """ Create module object, execute it and return Can be used as a decorator of the function from the source code of which the module will be constructed :param source_code_or_function string or function with body as a source code for created module :param rewrite_assertions: whether to rewrite assertions in module or not """ if isinstance(source_code_or_function, FunctionType): source_code = _extract_source_code_from_function(source_code_or_function) else: source_code = source_code_or_function module_name, filename = _create_module_file(source_code, tmp_path, request.node.name) if rewrite_assertions: loader = AssertionRewritingHook(config=request.config) loader.mark_rewrite(module_name) else: loader = None spec = importlib.util.spec_from_file_location(module_name, filename, loader=loader) sys.modules[module_name] = module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module return run pydantic-1.10.14/tests/mypy/000077500000000000000000000000001455251250200156325ustar00rootroot00000000000000pydantic-1.10.14/tests/mypy/__init__.py000066400000000000000000000000001455251250200177310ustar00rootroot00000000000000pydantic-1.10.14/tests/mypy/configs/000077500000000000000000000000001455251250200172625ustar00rootroot00000000000000pydantic-1.10.14/tests/mypy/configs/mypy-default.ini000066400000000000000000000004161455251250200224040ustar00rootroot00000000000000[mypy] follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True pydantic-1.10.14/tests/mypy/configs/mypy-plugin-strict-no-any.ini000066400000000000000000000010341455251250200247600ustar00rootroot00000000000000[mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True disallow_untyped_defs = True disallow_any_decorated = True disallow_any_expr = True disallow_any_explicit = True disallow_any_unimported = True disallow_subclassing_any = True warn_return_any = True [pydantic-mypy] init_forbid_extra = True init_typed = True warn_required_dynamic_aliases = True warn_untyped_fields = True pydantic-1.10.14/tests/mypy/configs/mypy-plugin-strict.ini000066400000000000000000000005621455251250200235660ustar00rootroot00000000000000[mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True disallow_untyped_defs = True [pydantic-mypy] init_forbid_extra = True init_typed = True warn_required_dynamic_aliases = True warn_untyped_fields = True pydantic-1.10.14/tests/mypy/configs/mypy-plugin.ini000066400000000000000000000004471455251250200222620ustar00rootroot00000000000000[mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True pydantic-1.10.14/tests/mypy/configs/pyproject-default.toml000066400000000000000000000010021455251250200236110ustar00rootroot00000000000000[build-system] requires = ["poetry>=0.12"] build_backend = "poetry.masonry.api" [tool.poetry] name = "test" version = "0.0.1" readme = "README.md" authors = [ "author@example.com" ] [tool.poetry.dependencies] python = "*" [tool.pytest.ini_options] addopts = "-v -p no:warnings" [tool.mypy] follow_imports = "silent" strict_optional = true warn_redundant_casts = true warn_unused_ignores = true disallow_any_generics = true check_untyped_defs = true no_implicit_reexport = true disallow_untyped_defs = true pydantic-1.10.14/tests/mypy/configs/pyproject-plugin-bad-param.toml000066400000000000000000000012031455251250200253100ustar00rootroot00000000000000[build-system] requires = ["poetry>=0.12"] build_backend = "poetry.masonry.api" [tool.poetry] name = "test" version = "0.0.1" readme = "README.md" authors = [ "author@example.com" ] [tool.poetry.dependencies] python = "*" [tool.pytest.ini_options] addopts = "-v -p no:warnings" [tool.mypy] plugins = [ "pydantic.mypy" ] follow_imports = "silent" strict_optional = true warn_redundant_casts = true warn_unused_ignores = true disallow_any_generics = true check_untyped_defs = true no_implicit_reexport = true disallow_untyped_defs = true [tool.pydantic-mypy] init_forbid_extra = "foo" # this will raise a ValueError for the config pydantic-1.10.14/tests/mypy/configs/pyproject-plugin-strict.toml000066400000000000000000000012451455251250200250020ustar00rootroot00000000000000[build-system] requires = ["poetry>=0.12"] build_backend = "poetry.masonry.api" [tool.poetry] name = "test" version = "0.0.1" readme = "README.md" authors = [ "author@example.com" ] [tool.poetry.dependencies] python = "*" [tool.pytest.ini_options] addopts = "-v -p no:warnings" [tool.mypy] plugins = [ "pydantic.mypy" ] follow_imports = "silent" strict_optional = true warn_redundant_casts = true warn_unused_ignores = true disallow_any_generics = true check_untyped_defs = true no_implicit_reexport = true disallow_untyped_defs = true [tool.pydantic-mypy] init_forbid_extra = true init_typed = true warn_required_dynamic_aliases = true warn_untyped_fields = true pydantic-1.10.14/tests/mypy/configs/pyproject-plugin.toml000066400000000000000000000010441455251250200234710ustar00rootroot00000000000000[build-system] requires = ["poetry>=0.12"] build_backend = "poetry.masonry.api" [tool.poetry] name = "test" version = "0.0.1" readme = "README.md" authors = [ "author@example.com" ] [tool.poetry.dependencies] python = "*" [tool.pytest.ini_options] addopts = "-v -p no:warnings" [tool.mypy] plugins = [ "pydantic.mypy" ] follow_imports = "silent" strict_optional = true warn_redundant_casts = true warn_unused_ignores = true disallow_any_generics = true check_untyped_defs = true no_implicit_reexport = true disallow_untyped_defs = true pydantic-1.10.14/tests/mypy/modules/000077500000000000000000000000001455251250200173025ustar00rootroot00000000000000pydantic-1.10.14/tests/mypy/modules/custom_constructor.py000066400000000000000000000004031455251250200236300ustar00rootroot00000000000000from pydantic import BaseModel class Person(BaseModel): id: int name: str birth_year: int def __init__(self, id: int) -> None: super().__init__(id=id, name='Patrick', birth_year=1991) Person(1) Person(id=1) Person(name='Patrick') pydantic-1.10.14/tests/mypy/modules/fail1.py000066400000000000000000000007711455251250200206550ustar00rootroot00000000000000""" Test mypy failure with missing attribute """ from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr from pydantic.types import Json class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] json_list_of_ints: Json[List[int]] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.age + 'not integer') m.json_list_of_ints[0] + 'not integer' pydantic-1.10.14/tests/mypy/modules/fail2.py000066400000000000000000000005731455251250200206560ustar00rootroot00000000000000""" Test mypy failure with invalid types. """ from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.foobar) pydantic-1.10.14/tests/mypy/modules/fail3.py000066400000000000000000000007061455251250200206550ustar00rootroot00000000000000""" Test mypy failure with invalid types. """ from typing import Generic, List, TypeVar from pydantic import BaseModel from pydantic.generics import GenericModel T = TypeVar('T') class Model(BaseModel): list_of_ints: List[int] class WrapperModel(GenericModel, Generic[T]): payload: T model_instance = Model(list_of_ints=[1]) wrapper_instance = WrapperModel[Model](payload=model_instance) wrapper_instance.payload.list_of_ints.append('1') pydantic-1.10.14/tests/mypy/modules/fail4.py000066400000000000000000000005671455251250200206630ustar00rootroot00000000000000from pydantic import validate_arguments @validate_arguments def foo(a: int, *, c: str = 'x') -> str: return c * a # ok x: str = foo(1, c='hello') # fails foo('x') foo(1, c=1) foo(1, 2) foo(1, d=2) # mypy assumes foo is just a function callable(foo.raw_function) @validate_arguments def bar() -> str: return 'x' # return type should be a string y: int = bar() pydantic-1.10.14/tests/mypy/modules/fail_defaults.py000066400000000000000000000007161455251250200224620ustar00rootroot00000000000000from pydantic import BaseModel, Field class Model(BaseModel): # Required undefined_default_no_args: int = Field() undefined_default: int = Field(description='my desc') positional_ellipsis_default: int = Field(...) named_ellipsis_default: int = Field(default=...) # Not required positional_default: int = Field(1) named_default: int = Field(default=2) named_default_factory: int = Field(default_factory=lambda: 3) Model() pydantic-1.10.14/tests/mypy/modules/no_any.py000066400000000000000000000002001455251250200211270ustar00rootroot00000000000000from pydantic.dataclasses import dataclass @dataclass class Foo: foo: int @dataclass(config={}) class Bar: bar: str pydantic-1.10.14/tests/mypy/modules/plugin_default_factory.py000066400000000000000000000010701455251250200244030ustar00rootroot00000000000000""" See https://github.com/pydantic/pydantic/issues/4457 """ from typing import Dict, List from pydantic import BaseModel, Field def new_list() -> List[int]: return [] class Model(BaseModel): l1: List[str] = Field(default_factory=list) l2: List[int] = Field(default_factory=new_list) l3: List[str] = Field(default_factory=lambda: list()) l4: Dict[str, str] = Field(default_factory=dict) l5: int = Field(default_factory=lambda: 123) l6_error: List[str] = Field(default_factory=new_list) l7_error: int = Field(default_factory=list) pydantic-1.10.14/tests/mypy/modules/plugin_fail.py000066400000000000000000000115141455251250200221470ustar00rootroot00000000000000from typing import Any, Generic, List, Optional, Set, TypeVar, Union from pydantic import BaseModel, BaseSettings, Extra, Field, validator from pydantic.dataclasses import dataclass from pydantic.generics import GenericModel class Model(BaseModel): x: int y: str def method(self) -> None: pass class Config: alias_generator = None allow_mutation = False extra = Extra.forbid def config_method(self) -> None: ... model = Model(x=1, y='y', z='z') model = Model(x=1) model.y = 'a' Model.from_orm({}) Model.from_orm({}) # type: ignore[pydantic-orm] # noqa F821 class ForbidExtraModel(BaseModel): class Config: extra = 'forbid' ForbidExtraModel(x=1) class ForbidExtraModel2(BaseModel): class Config: extra = 'forbid' validate_all = False Config.validate_all = True ForbidExtraModel2(x=1) class BadExtraModel(BaseModel): class Config: extra = 1 # type: ignore[pydantic-config] # noqa F821 extra = 1 class BadConfig1(BaseModel): class Config: orm_mode: Any = {} # not sensible, but should still be handled gracefully class BadConfig2(BaseModel): class Config: orm_mode = list # not sensible, but should still be handled gracefully class InheritingModel(Model): class Config: allow_mutation = True class DefaultTestingModel(BaseModel): # Required a: int b: int = ... c: int = Field(...) d: Union[int, str] e = ... # Not required f: Optional[int] g: int = 1 h: int = Field(1) i: int = Field(None) j = 1 DefaultTestingModel() class UndefinedAnnotationModel(BaseModel): undefined: Undefined # noqa F821 UndefinedAnnotationModel() class Settings(BaseSettings): x: int Model.construct(x=1) Model.construct(_fields_set={'x'}, x=1, y='2') Model.construct(x='1', y='2') Settings() # should pass here due to possibly reading from environment # Strict mode fails inheriting = InheritingModel(x='1', y='1') Settings(x='1') Model(x='1', y='2') class Blah(BaseModel): fields_set: Optional[Set[str]] = None # (comment to keep line numbers unchanged) T = TypeVar('T') class Response(GenericModel, Generic[T]): data: T error: Optional[str] response = Response[Model](data=model, error=None) response = Response[Model](data=1, error=None) class AliasModel(BaseModel): x: str = Field(..., alias='y') z: int AliasModel(y=1, z=2) x_alias = 'y' class DynamicAliasModel(BaseModel): x: str = Field(..., alias=x_alias) z: int DynamicAliasModel(y='y', z='1') class DynamicAliasModel2(BaseModel): x: str = Field(..., alias=x_alias) z: int class Config: allow_population_by_field_name = True DynamicAliasModel2(y='y', z=1) DynamicAliasModel2(x='y', z=1) class AliasGeneratorModel(BaseModel): x: int class Config: alias_generator = lambda x: x + '_' # noqa E731 AliasGeneratorModel(x=1) AliasGeneratorModel(x_=1) AliasGeneratorModel(z=1) class AliasGeneratorModel2(BaseModel): x: int = Field(..., alias='y') class Config: # type: ignore[pydantic-alias] # noqa F821 alias_generator = lambda x: x + '_' # noqa E731 class UntypedFieldModel(BaseModel): x: int = 1 y = 2 z = 2 # type: ignore[pydantic-field] # noqa F821 AliasGeneratorModel2(x=1) AliasGeneratorModel2(y=1, z=1) class CoverageTester(Missing): # noqa F821 def from_orm(self) -> None: pass CoverageTester().from_orm() @dataclass(config={}) class AddProject: name: str slug: Optional[str] description: Optional[str] p = AddProject(name='x', slug='y', description='z') # Same as Model, but with frozen = True class FrozenModel(BaseModel): x: int y: str class Config: alias_generator = None frozen = True extra = Extra.forbid frozenmodel = FrozenModel(x=1, y='b') frozenmodel.y = 'a' class InheritingModel2(FrozenModel): class Config: frozen = False inheriting2 = InheritingModel2(x=1, y='c') inheriting2.y = 'd' def _default_factory() -> str: ... test: List[str] = [] class FieldDefaultTestingModel(BaseModel): # Default e: int = Field(None) f: int = None # Default factory g: str = Field(default_factory=set) h: int = Field(default_factory=_default_factory) i: List[int] = Field(default_factory=list) l: str = Field(default_factory=3) # Default and default factory m: int = Field(default=1, default_factory=list) class ModelWithAnnotatedValidator(BaseModel): name: str @validator('name') def noop_validator_with_annotations(self, name: str) -> str: # This is a mistake: the first argument to a validator is the class itself, # like a classmethod. self.instance_method() return name def instance_method(self) -> None: ... pydantic-1.10.14/tests/mypy/modules/plugin_success.py000066400000000000000000000114021455251250200227000ustar00rootroot00000000000000from typing import Any, ClassVar, Generic, List, Optional, TypeVar, Union from pydantic import BaseModel, BaseSettings, Field, create_model, validator from pydantic.dataclasses import dataclass from pydantic.generics import GenericModel class Model(BaseModel): x: float y: str class Config: orm_mode = True class NotConfig: allow_mutation = False class SelfReferencingModel(BaseModel): submodel: Optional['SelfReferencingModel'] @property def prop(self) -> None: ... SelfReferencingModel.update_forward_refs() model = Model(x=1, y='y') Model(x=1, y='y', z='z') model.x = 2 model.from_orm(model) self_referencing_model = SelfReferencingModel(submodel=SelfReferencingModel(submodel=None)) class InheritingModel(Model): z: int = 1 InheritingModel.from_orm(model) class ForwardReferencingModel(Model): future: 'FutureModel' class FutureModel(Model): pass ForwardReferencingModel.update_forward_refs() future_model = FutureModel(x=1, y='a') forward_model = ForwardReferencingModel(x=1, y='a', future=future_model) class NoMutationModel(BaseModel): x: int class Config: allow_mutation = False class MutationModel(NoMutationModel): a = 1 class Config: allow_mutation = True orm_mode = True MutationModel(x=1).x = 2 MutationModel.from_orm(model) class OverrideModel(Model): x: int OverrideModel(x=1.5, y='b') class Mixin: def f(self) -> None: pass class MultiInheritanceModel(BaseModel, Mixin): pass MultiInheritanceModel().f() class AliasModel(BaseModel): x: str = Field(..., alias='y') alias_model = AliasModel(y='hello') assert alias_model.x == 'hello' class ClassVarModel(BaseModel): x: int y: ClassVar[int] = 1 ClassVarModel(x=1) class Config: validate_assignment = True @dataclass(config=Config) class AddProject: name: str slug: Optional[str] description: Optional[str] p = AddProject(name='x', slug='y', description='z') class TypeAliasAsAttribute(BaseModel): __type_alias_attribute__ = Union[str, bytes] class NestedModel(BaseModel): class Model(BaseModel): id: str model: Model _ = NestedModel.Model DynamicModel = create_model('DynamicModel', __base__=Model) dynamic_model = DynamicModel(x=1, y='y') dynamic_model.x = 2 class FrozenModel(BaseModel): x: int class Config: frozen = True class NotFrozenModel(FrozenModel): a: int = 1 class Config: frozen = False orm_mode = True NotFrozenModel(x=1).x = 2 NotFrozenModel.from_orm(model) class ModelWithSelfField(BaseModel): self: str class SettingsModel(BaseSettings): pass settings = SettingsModel.construct() def f(name: str) -> str: return name class ModelWithAllowReuseValidator(BaseModel): name: str _normalize_name = validator('name', allow_reuse=True)(f) model_with_allow_reuse_validator = ModelWithAllowReuseValidator(name='xyz') T = TypeVar('T') class Response(GenericModel, Generic[T]): data: T error: Optional[str] response = Response[Model](data=model, error=None) class ModelWithAnnotatedValidator(BaseModel): name: str @validator('name') def noop_validator_with_annotations(cls, name: str) -> str: return name def _default_factory_str() -> str: ... def _default_factory_list() -> List[int]: ... class FieldDefaultTestingModel(BaseModel): # Required a: int b: int = Field() c: int = Field(...) # Default d: int = Field(1) # Default factory g: List[int] = Field(default_factory=_default_factory_list) h: str = Field(default_factory=_default_factory_str) i: str = Field(default_factory=lambda: 'test') # Include the import down here to reduce the effect on line numbers from dataclasses import InitVar # noqa E402 @dataclass class MyDataClass: foo: InitVar[str] bar: str MyDataClass(foo='foo', bar='bar') _TModel = TypeVar('_TModel') _TType = TypeVar('_TType') class OrmMixin(Generic[_TModel, _TType]): @classmethod def from_orm(cls, model: _TModel) -> _TType: raise NotImplementedError @classmethod def from_orm_optional(cls, model: Optional[_TModel]) -> Optional[_TType]: if model is None: return None return cls.from_orm(model) class Sample(BaseModel): foo: str bar: Optional[str] = Field(description='optional') zoo: Any Sample(foo='hello world') def get_my_custom_validator(field_name: str) -> Any: @validator(field_name, allow_reuse=True) def my_custom_validator(cls: Any, v: int) -> int: return v return my_custom_validator def foo() -> None: class MyModel(BaseModel): number: int custom_validator = get_my_custom_validator('number') MyModel(number=2) pydantic-1.10.14/tests/mypy/modules/settings_config.py000066400000000000000000000002471455251250200230440ustar00rootroot00000000000000from pydantic import BaseSettings class Settings(BaseSettings): class Config(BaseSettings.Config): env_file = '.env' env_file_encoding = 'utf-8' pydantic-1.10.14/tests/mypy/modules/success.py000066400000000000000000000161171455251250200213320ustar00rootroot00000000000000""" Test pydantic's compliance with mypy. Do a little skipping about with types to demonstrate its usage. """ import json import os from datetime import date, datetime, timedelta from pathlib import Path, PurePath from typing import Any, Dict, ForwardRef, Generic, List, Optional, TypeVar from uuid import UUID from typing_extensions import TypedDict from pydantic import ( UUID1, BaseConfig, BaseModel, BaseSettings, DirectoryPath, Extra, FilePath, FutureDate, Json, NegativeFloat, NegativeInt, NoneStr, NonNegativeFloat, NonNegativeInt, NonPositiveFloat, NonPositiveInt, PastDate, PositiveFloat, PositiveInt, PyObject, StrictBool, StrictBytes, StrictFloat, StrictInt, StrictStr, create_model, create_model_from_typeddict, root_validator, stricturl, validate_arguments, validator, ) from pydantic.fields import Field, PrivateAttr from pydantic.generics import GenericModel class Flags(BaseModel): strict_bool: StrictBool = False def __str__(self) -> str: return f'flag={self.strict_bool}' class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] @validator('age') def check_age(cls, value: int) -> int: assert value < 100, 'too old' return value @root_validator def root_check(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values @root_validator(pre=True, allow_reuse=False, skip_on_failure=False) def pre_root_check(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values def dog_years(age: int) -> int: return age * 7 def day_of_week(dt: datetime) -> int: return dt.date().isoweekday() m = Model(age=21, list_of_ints=[1, '2', b'3']) assert m.age == 21, m.age m.age = 42 assert m.age == 42, m.age assert m.first_name == 'John', m.first_name assert m.last_name is None, m.last_name assert m.list_of_ints == [1, 2, 3], m.list_of_ints dog_age = dog_years(m.age) assert dog_age == 294, dog_age m = Model(age=2, first_name=b'Woof', last_name=b'Woof', signup_ts='2017-06-07 00:00', list_of_ints=[1, '2', b'3']) assert m.first_name == 'Woof', m.first_name assert m.last_name == 'Woof', m.last_name assert m.signup_ts == datetime(2017, 6, 7), m.signup_ts assert day_of_week(m.signup_ts) == 3 data = {'age': 10, 'first_name': 'Alena', 'last_name': 'Sousova', 'list_of_ints': [410]} m_from_obj = Model.parse_obj(data) assert isinstance(m_from_obj, Model) assert m_from_obj.age == 10 assert m_from_obj.first_name == data['first_name'] assert m_from_obj.last_name == data['last_name'] assert m_from_obj.list_of_ints == data['list_of_ints'] m_from_raw = Model.parse_raw(json.dumps(data)) assert isinstance(m_from_raw, Model) assert m_from_raw.age == m_from_obj.age assert m_from_raw.first_name == m_from_obj.first_name assert m_from_raw.last_name == m_from_obj.last_name assert m_from_raw.list_of_ints == m_from_obj.list_of_ints m_copy = m_from_obj.copy() assert isinstance(m_from_raw, Model) assert m_copy.age == m_from_obj.age assert m_copy.first_name == m_from_obj.first_name assert m_copy.last_name == m_from_obj.last_name assert m_copy.list_of_ints == m_from_obj.list_of_ints T = TypeVar('T') class WrapperModel(GenericModel, Generic[T]): payload: T int_instance = WrapperModel[int](payload=1) int_instance.payload += 1 assert int_instance.payload == 2 str_instance = WrapperModel[str](payload='a') str_instance.payload += 'a' assert str_instance.payload == 'aa' model_instance = WrapperModel[Model](payload=m) model_instance.payload.list_of_ints.append(4) assert model_instance.payload.list_of_ints == [1, 2, 3, 4] class WithField(BaseModel): age: int first_name: str = Field('John', const=True) # simple decorator @validate_arguments def foo(a: int, *, c: str = 'x') -> str: return c * a foo(1, c='thing') foo(1) # nested decorator should not produce an error @validate_arguments(config={'arbitrary_types_allowed': True}) def bar(a: int, *, c: str = 'x') -> str: return c * a bar(1, c='thing') bar(1) class Foo(BaseModel): a: int FooRef = ForwardRef('Foo') class MyConf(BaseModel): str_pyobject: PyObject = Field('datetime.date') callable_pyobject: PyObject = Field(date) conf = MyConf() var1: date = conf.str_pyobject(2020, 12, 20) var2: date = conf.callable_pyobject(2111, 1, 1) class MyPrivateAttr(BaseModel): _private_field: str = PrivateAttr() class PydanticTypes(BaseModel): # Boolean my_strict_bool: StrictBool = True # Integer my_positive_int: PositiveInt = 1 my_negative_int: NegativeInt = -1 my_non_positive_int: NonPositiveInt = -1 my_non_negative_int: NonNegativeInt = 1 my_strict_int: StrictInt = 1 # Float my_positive_float: PositiveFloat = 1.1 my_negative_float: NegativeFloat = -1.1 my_non_positive_float: NonPositiveFloat = -1.1 my_non_negative_float: NonNegativeFloat = 1.1 my_strict_float: StrictFloat = 1.1 # Bytes my_strict_bytes: StrictBytes = b'pika' # String my_strict_str: StrictStr = 'pika' # PyObject my_pyobject_str: PyObject = 'datetime.date' # type: ignore my_pyobject_callable: PyObject = date # UUID my_uuid1: UUID1 = UUID('a8098c1a-f86e-11da-bd1a-00112444be1e') my_uuid1_str: UUID1 = 'a8098c1a-f86e-11da-bd1a-00112444be1e' # type: ignore # Path my_file_path: FilePath = Path(__file__) my_file_path_str: FilePath = __file__ # type: ignore my_dir_path: DirectoryPath = Path('.') my_dir_path_str: DirectoryPath = '.' # type: ignore # Json my_json: Json[Dict[str, str]] = '{"hello": "world"}' # type: ignore my_json_list: Json[List[str]] = '["hello", "world"]' # type: ignore # Date my_past_date: PastDate = date.today() - timedelta(1) my_future_date: FutureDate = date.today() + timedelta(1) class Config: validate_all = True validated = PydanticTypes() validated.my_pyobject_str(2021, 1, 1) validated.my_pyobject_callable(2021, 1, 1) validated.my_uuid1.hex validated.my_uuid1_str.hex validated.my_file_path.absolute() validated.my_file_path_str.absolute() validated.my_dir_path.absolute() validated.my_dir_path_str.absolute() validated.my_json['hello'].capitalize() validated.my_json_list[0].capitalize() stricturl(allowed_schemes={'http'}) stricturl(allowed_schemes=frozenset({'http'})) stricturl(allowed_schemes=('s3', 's3n', 's3a')) class SomeDict(TypedDict): val: int name: str obj: SomeDict = { 'val': 12, 'name': 'John', } class Config(BaseConfig): title = 'Record' extra = Extra.ignore max_anystr_length = 1234 class Settings(BaseSettings): ... class CustomPath(PurePath): def __init__(self, *args: str): self.path = os.path.join(*args) def __fspath__(self) -> str: return f'a/custom/{self.path}' def dont_check_path_existence() -> None: Settings(_env_file='a/path', _secrets_dir='a/path') Settings(_env_file=CustomPath('a/path'), _secrets_dir=CustomPath('a/path')) create_model_from_typeddict(SomeDict)(**obj) DynamicModel = create_model('DynamicModel') pydantic-1.10.14/tests/mypy/outputs/000077500000000000000000000000001455251250200173555ustar00rootroot00000000000000pydantic-1.10.14/tests/mypy/outputs/custom_constructor.txt000066400000000000000000000001461455251250200240760ustar00rootroot000000000000009: note: "Person" defined here 15: error: Unexpected keyword argument "name" for "Person" [call-arg] pydantic-1.10.14/tests/mypy/outputs/fail1.txt000066400000000000000000000002211455251250200211050ustar00rootroot0000000000000022: error: Unsupported operand types for + ("int" and "str") [operator] 23: error: Unsupported operand types for + ("int" and "str") [operator]pydantic-1.10.14/tests/mypy/outputs/fail2.txt000066400000000000000000000000741455251250200211140ustar00rootroot0000000000000020: error: "Model" has no attribute "foobar" [attr-defined]pydantic-1.10.14/tests/mypy/outputs/fail3.txt000066400000000000000000000001431455251250200211120ustar00rootroot0000000000000022: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [arg-type]pydantic-1.10.14/tests/mypy/outputs/fail4.txt000066400000000000000000000011721455251250200211160ustar00rootroot000000000000005: note: "foo" defined here 12: error: Argument 1 to "foo" has incompatible type "str"; expected "int" [arg-type] 13: error: Argument "c" to "foo" has incompatible type "int"; expected "str" [arg-type] 14: error: Too many positional arguments for "foo" [misc] 14: error: Argument 2 to "foo" has incompatible type "int"; expected "str" [arg-type] 15: error: Unexpected keyword argument "d" for "foo" [call-arg] 17: error: "Callable[[int, DefaultNamedArg(str, 'c')], str]" has no attribute "raw_function" [attr-defined] 26: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment]pydantic-1.10.14/tests/mypy/outputs/fail_defaults.txt000066400000000000000000000005171455251250200227230ustar00rootroot0000000000000017: error: Missing named argument "undefined_default_no_args" for "Model" [call-arg] 17: error: Missing named argument "undefined_default" for "Model" [call-arg] 17: error: Missing named argument "positional_ellipsis_default" for "Model" [call-arg] 17: error: Missing named argument "named_ellipsis_default" for "Model" [call-arg] pydantic-1.10.14/tests/mypy/outputs/plugin-fail-strict.txt000066400000000000000000000070341455251250200236370ustar00rootroot0000000000000024: error: Unexpected keyword argument "z" for "Model" [call-arg] 25: error: Missing named argument "y" for "Model" [call-arg] 26: error: Property "y" defined in "Model" is read-only [misc] 27: error: "Model" does not have orm_mode=True [pydantic-orm] 36: error: Unexpected keyword argument "x" for "ForbidExtraModel" [call-arg] 47: error: Unexpected keyword argument "x" for "ForbidExtraModel2" [call-arg] 53: error: Invalid value for "Config.extra" [pydantic-config] 58: error: Invalid value for "Config.orm_mode" [pydantic-config] 63: error: Invalid value for "Config.orm_mode" [pydantic-config] 74: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [assignment] 77: error: Untyped fields disallowed [pydantic-field] 83: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 84: error: Untyped fields disallowed [pydantic-field] 87: error: Missing named argument "a" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "b" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "c" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "d" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "e" for "DefaultTestingModel" [call-arg] 91: error: Name "Undefined" is not defined [name-defined] 94: error: Missing named argument "undefined" for "UndefinedAnnotationModel" [call-arg] 101: error: Missing named argument "y" for "construct" of "Model" [call-arg] 103: error: Argument "x" to "construct" of "Model" has incompatible type "str"; expected "int" [arg-type] 108: error: Argument "x" to "InheritingModel" has incompatible type "str"; expected "int" [arg-type] 109: error: Argument "x" to "Settings" has incompatible type "str"; expected "int" [arg-type] 110: error: Argument "x" to "Model" has incompatible type "str"; expected "int" [arg-type] 127: error: Argument "data" to "Response" has incompatible type "int"; expected "Model" [arg-type] 135: error: Argument "y" to "AliasModel" has incompatible type "int"; expected "str" [arg-type] 141: error: Required dynamic aliases disallowed [pydantic-alias] 145: error: Argument "z" to "DynamicAliasModel" has incompatible type "str"; expected "int" [arg-type] 156: error: Unexpected keyword argument "y" for "DynamicAliasModel2" [call-arg] 163: error: Required dynamic aliases disallowed [pydantic-alias] 181: error: Untyped fields disallowed [pydantic-field] 185: error: Unexpected keyword argument "x" for "AliasGeneratorModel2" [call-arg] 186: error: Unexpected keyword argument "z" for "AliasGeneratorModel2" [call-arg] 189: error: Name "Missing" is not defined [name-defined] 219: error: Property "y" defined in "FrozenModel" is read-only [misc] 240: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 241: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 244: error: Incompatible types in assignment (expression has type "Set[Any]", variable has type "str") [assignment] 245: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] 247: error: Argument "default_factory" to "Field" has incompatible type "int"; expected "Optional[Callable[[], Any]]" [arg-type] 250: error: Field default and default_factory cannot be specified together [pydantic-field] 260: error: Missing positional argument "self" in call to "instance_method" of "ModelWithAnnotatedValidator" [call-arg] pydantic-1.10.14/tests/mypy/outputs/plugin-fail.txt000066400000000000000000000051151455251250200223270ustar00rootroot0000000000000024: error: Unexpected keyword argument "z" for "Model" [call-arg] 25: error: Missing named argument "y" for "Model" [call-arg] 26: error: Property "y" defined in "Model" is read-only [misc] 27: error: "Model" does not have orm_mode=True [pydantic-orm] 36: error: Unexpected keyword argument "x" for "ForbidExtraModel" [call-arg] 47: error: Unexpected keyword argument "x" for "ForbidExtraModel2" [call-arg] 53: error: Invalid value for "Config.extra" [pydantic-config] 58: error: Invalid value for "Config.orm_mode" [pydantic-config] 63: error: Invalid value for "Config.orm_mode" [pydantic-config] 74: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [assignment] 83: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 87: error: Missing named argument "a" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "b" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "c" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "d" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "e" for "DefaultTestingModel" [call-arg] 91: error: Name "Undefined" is not defined [name-defined] 94: error: Missing named argument "undefined" for "UndefinedAnnotationModel" [call-arg] 101: error: Missing named argument "y" for "construct" of "Model" [call-arg] 103: error: Argument "x" to "construct" of "Model" has incompatible type "str"; expected "int" [arg-type] 156: error: Missing named argument "x" for "DynamicAliasModel2" [call-arg] 175: error: Unused "type: ignore" comment 182: error: Unused "type: ignore" comment 189: error: Name "Missing" is not defined [name-defined] 219: error: Property "y" defined in "FrozenModel" is read-only [misc] 240: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 241: error: Incompatible types in assignment (expression has type "None", variable has type "int") [assignment] 244: error: Incompatible types in assignment (expression has type "Set[Any]", variable has type "str") [assignment] 245: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] 247: error: Argument "default_factory" to "Field" has incompatible type "int"; expected "Optional[Callable[[], Any]]" [arg-type] 250: error: Field default and default_factory cannot be specified together [pydantic-field] 260: error: Missing positional argument "self" in call to "instance_method" of "ModelWithAnnotatedValidator" [call-arg] pydantic-1.10.14/tests/mypy/outputs/plugin-success-strict.txt000066400000000000000000000004271455251250200243730ustar00rootroot0000000000000030: error: Unexpected keyword argument "z" for "Model" [call-arg] 65: error: Untyped fields disallowed [pydantic-field] 80: error: Argument "x" to "OverrideModel" has incompatible type "float"; expected "int" [arg-type] 278: error: Untyped fields disallowed [pydantic-field] pydantic-1.10.14/tests/mypy/outputs/plugin_default_factory.txt000066400000000000000000000003601455251250200246460ustar00rootroot0000000000000020: error: Incompatible types in assignment (expression has type "List[int]", variable has type "List[str]") [assignment] 21: error: Incompatible types in assignment (expression has type "List[Any]", variable has type "int") [assignment] pydantic-1.10.14/tests/mypy/outputs/plugin_success.txt000066400000000000000000000006031455251250200231430ustar00rootroot00000000000000122: error: Unexpected keyword argument "name" for "AddProject" [call-arg] 122: error: Unexpected keyword argument "slug" for "AddProject" [call-arg] 122: error: Unexpected keyword argument "description" for "AddProject" [call-arg] 239: error: Unexpected keyword argument "foo" for "MyDataClass" [call-arg] 239: error: Unexpected keyword argument "bar" for "MyDataClass" [call-arg] pydantic-1.10.14/tests/mypy/test_mypy.py000066400000000000000000000153731455251250200202520ustar00rootroot00000000000000import importlib import os import re from pathlib import Path import pytest try: from mypy import api as mypy_api from mypy.version import __version__ as mypy_version from pydantic.mypy import parse_mypy_version except ImportError: mypy_api = None mypy_version = None parse_mypy_version = lambda _: (0,) # noqa: E731 try: import dotenv except ImportError: dotenv = None # This ensures mypy can find the test files, no matter where tests are run from: os.chdir(Path(__file__).parent.parent.parent) cases = [ ('mypy-plugin.ini', 'plugin_success.py', None), ('mypy-plugin.ini', 'plugin_fail.py', 'plugin-fail.txt'), ('mypy-plugin.ini', 'custom_constructor.py', 'custom_constructor.txt'), ('mypy-plugin-strict.ini', 'plugin_success.py', 'plugin-success-strict.txt'), ('mypy-plugin-strict.ini', 'plugin_fail.py', 'plugin-fail-strict.txt'), ('mypy-plugin-strict.ini', 'fail_defaults.py', 'fail_defaults.txt'), ('mypy-default.ini', 'success.py', None), ('mypy-default.ini', 'fail1.py', 'fail1.txt'), ('mypy-default.ini', 'fail2.py', 'fail2.txt'), ('mypy-default.ini', 'fail3.py', 'fail3.txt'), ('mypy-default.ini', 'fail4.py', 'fail4.txt'), ('mypy-default.ini', 'plugin_success.py', 'plugin_success.txt'), ('mypy-plugin-strict-no-any.ini', 'no_any.py', None), ('pyproject-default.toml', 'success.py', None), ('pyproject-default.toml', 'fail1.py', 'fail1.txt'), ('pyproject-default.toml', 'fail2.py', 'fail2.txt'), ('pyproject-default.toml', 'fail3.py', 'fail3.txt'), ('pyproject-default.toml', 'fail4.py', 'fail4.txt'), ('pyproject-plugin.toml', 'plugin_success.py', None), ('pyproject-plugin.toml', 'plugin_fail.py', 'plugin-fail.txt'), ('pyproject-plugin-strict.toml', 'plugin_success.py', 'plugin-success-strict.txt'), ('pyproject-plugin-strict.toml', 'plugin_fail.py', 'plugin-fail-strict.txt'), ('pyproject-plugin-strict.toml', 'fail_defaults.py', 'fail_defaults.txt'), ('mypy-plugin-strict.ini', 'settings_config.py', None), ('mypy-plugin-strict.ini', 'plugin_default_factory.py', 'plugin_default_factory.txt'), ] executable_modules = list({fname[:-3] for _, fname, out_fname in cases if out_fname is None}) @pytest.mark.skipif(not (dotenv and mypy_api), reason='dotenv or mypy are not installed') @pytest.mark.parametrize('config_filename,python_filename,output_filename', cases) def test_mypy_results(config_filename: str, python_filename: str, output_filename: str) -> None: full_config_filename = f'tests/mypy/configs/{config_filename}' full_filename = f'tests/mypy/modules/{python_filename}' output_path = None if output_filename is None else Path(f'tests/mypy/outputs/{output_filename}') # Specifying a different cache dir for each configuration dramatically speeds up subsequent execution # It also prevents cache-invalidation-related bugs in the tests cache_dir = f'.mypy_cache/test-{os.path.splitext(config_filename)[0]}' command = [ full_filename, '--config-file', full_config_filename, '--cache-dir', cache_dir, '--show-error-codes', '--show-traceback', ] print(f"\nExecuting: mypy {' '.join(command)}") # makes it easier to debug as necessary actual_result = mypy_api.run(command) actual_out, actual_err, actual_returncode = actual_result # Need to strip filenames due to differences in formatting by OS actual_out = '\n'.join(['.py:'.join(line.split('.py:')[1:]) for line in actual_out.split('\n') if line]).strip() actual_out = re.sub(r'\n\s*\n', r'\n', actual_out) if actual_out: print('{0}\n{1:^100}\n{0}\n{2}\n{0}'.format('=' * 100, 'mypy output', actual_out)) assert actual_err == '' expected_returncode = 0 if output_filename is None else 1 assert actual_returncode == expected_returncode if output_path and not output_path.exists(): output_path.write_text(actual_out) raise RuntimeError(f'wrote actual output to {output_path} since file did not exist') expected_out = Path(output_path).read_text().rstrip('\n') if output_path else '' # fix for compatibility between mypy versions: (this can be dropped once we drop support for mypy<0.930) if actual_out and parse_mypy_version(mypy_version) < (0, 930): actual_out = actual_out.lower() expected_out = expected_out.lower() actual_out = actual_out.replace('variant:', 'variants:') actual_out = re.sub(r'^(\d+: note: {4}).*', r'\1...', actual_out, flags=re.M) expected_out = re.sub(r'^(\d+: note: {4}).*', r'\1...', expected_out, flags=re.M) assert actual_out == expected_out, actual_out @pytest.mark.skipif(not (dotenv and mypy_api), reason='dotenv or mypy are not installed') def test_bad_toml_config() -> None: full_config_filename = 'tests/mypy/configs/pyproject-plugin-bad-param.toml' full_filename = 'tests/mypy/modules/success.py' # Specifying a different cache dir for each configuration dramatically speeds up subsequent execution # It also prevents cache-invalidation-related bugs in the tests cache_dir = '.mypy_cache/test-pyproject-plugin-bad-param' command = [full_filename, '--config-file', full_config_filename, '--cache-dir', cache_dir, '--show-error-codes'] print(f"\nExecuting: mypy {' '.join(command)}") # makes it easier to debug as necessary with pytest.raises(ValueError) as e: mypy_api.run(command) assert str(e.value) == 'Configuration value must be a boolean for key: init_forbid_extra' @pytest.mark.parametrize('module', executable_modules) def test_success_cases_run(module: str) -> None: """ Ensure the "success" files can actually be executed """ importlib.import_module(f'tests.mypy.modules.{module}') def test_explicit_reexports(): from pydantic import __all__ as root_all from pydantic.main import __all__ as main from pydantic.networks import __all__ as networks from pydantic.tools import __all__ as tools from pydantic.types import __all__ as types for name, export_all in [('main', main), ('network', networks), ('tools', tools), ('types', types)]: for export in export_all: assert export in root_all, f'{export} is in {name}.__all__ but missing from re-export in __init__.py' def test_explicit_reexports_exist(): import pydantic for name in pydantic.__all__: assert hasattr(pydantic, name), f'{name} is in pydantic.__all__ but missing from pydantic' @pytest.mark.skipif(mypy_version is None, reason='mypy is not installed') @pytest.mark.parametrize( 'v_str,v_tuple', [ ('0', (0,)), ('0.930', (0, 930)), ('0.940+dev.04cac4b5d911c4f9529e6ce86a27b44f28846f5d.dirty', (0, 940)), ], ) def test_parse_mypy_version(v_str, v_tuple): assert parse_mypy_version(v_str) == v_tuple pydantic-1.10.14/tests/pyright/000077500000000000000000000000001455251250200163225ustar00rootroot00000000000000pydantic-1.10.14/tests/pyright/pyproject.toml000066400000000000000000000001471455251250200212400ustar00rootroot00000000000000[tool.pyright] extraPaths = ['../..'] reportUnnecessaryTypeIgnoreComment = true pythonVersion = '3.10' pydantic-1.10.14/tests/pyright/pyright_example.py000066400000000000000000000014261455251250200221000ustar00rootroot00000000000000""" This file is used to test pyright's ability to check pydantic code. In particular pydantic provides the `@__dataclass_transform__` for `BaseModel` and all subclasses (including `BaseSettings`), see #2721. """ from typing import List from pydantic import BaseModel, BaseSettings, Field class MyModel(BaseModel): x: str y: List[int] m1 = MyModel(x='hello', y=[1, 2, 3]) m2 = MyModel(x='hello') # pyright: ignore class Knight(BaseModel): title: str = Field(default='Sir Lancelot') # this is okay age: int = Field(23) # this works fine at runtime but will case an error for pyright k = Knight() # pyright: ignore class Settings(BaseSettings): x: str y: int s1 = Settings.parse_obj({}) s2 = Settings() # pyright: ignore[reportGeneralTypeIssues] pydantic-1.10.14/tests/requirements-linting.txt000066400000000000000000000002631455251250200215630ustar00rootroot00000000000000black==22.8.0 flake8==5.0.4 flake8-quotes==3.3.1 hypothesis==6.54.4 isort==5.10.1 pyupgrade==2.37.3 mypy==0.971 pre-commit==2.20.0 pycodestyle==2.9.1 pyflakes==2.5.0 twine==4.0.1 pydantic-1.10.14/tests/requirements-testing.txt000066400000000000000000000007001455251250200215700ustar00rootroot00000000000000coverage==6.4.4 hypothesis==6.54.4 # pin importlib-metadata as upper versions need typing-extensions to work if on Python < 3.8 importlib-metadata==3.6.0;python_version<"3.8" mypy==0.971 pytest==7.2.1 pytest-cov==4.0.0 pytest-mock==3.10.0 pytest-sugar==0.9.6 # pin typing-extensions to minimum requirement - see #4885 typing-extensions==4.6.0 # used in FastAPI tests, pin to avoid warnings in newer version # that FastAPI needs to fix Flask==2.2.3 pydantic-1.10.14/tests/test_abc.py000066400000000000000000000022301455251250200167670ustar00rootroot00000000000000import abc import pytest from pydantic import BaseModel def test_model_subclassing_abstract_base_classes(): class Model(BaseModel, abc.ABC): some_field: str def test_model_subclassing_abstract_base_classes_without_implementation_raises_exception(): class Model(BaseModel, abc.ABC): some_field: str @abc.abstractmethod def my_abstract_method(self): pass @classmethod @abc.abstractmethod def my_abstract_classmethod(cls): pass @staticmethod @abc.abstractmethod def my_abstract_staticmethod(): pass @property @abc.abstractmethod def my_abstract_property(self): pass @my_abstract_property.setter @abc.abstractmethod def my_abstract_property(self, val): pass with pytest.raises(TypeError) as excinfo: Model(some_field='some_value') assert str(excinfo.value) == ( "Can't instantiate abstract class Model with abstract methods " "my_abstract_classmethod, my_abstract_method, my_abstract_property, my_abstract_staticmethod" # noqa: Q000 ) pydantic-1.10.14/tests/test_aliases.py000066400000000000000000000250721455251250200176740ustar00rootroot00000000000000import re from contextlib import nullcontext as does_not_raise from typing import Any, ContextManager, List, Optional import pytest from pydantic import BaseConfig, BaseModel, Extra, ValidationError from pydantic.fields import Field def test_alias_generator(): def to_camel(string: str): return ''.join(x.capitalize() for x in string.split('_')) class MyModel(BaseModel): a: List[str] = None foo_bar: str class Config: alias_generator = to_camel data = {'A': ['foo', 'bar'], 'FooBar': 'foobar'} v = MyModel(**data) assert v.a == ['foo', 'bar'] assert v.foo_bar == 'foobar' assert v.dict(by_alias=True) == data def test_alias_generator_with_field_schema(): def to_upper_case(string: str): return string.upper() class MyModel(BaseModel): my_shiny_field: Any # Alias from Config.fields will be used foo_bar: str # Alias from Config.fields will be used baz_bar: str # Alias will be generated another_field: str # Alias will be generated class Config: alias_generator = to_upper_case fields = {'my_shiny_field': 'MY_FIELD', 'foo_bar': {'alias': 'FOO'}, 'another_field': {'not_alias': 'a'}} data = {'MY_FIELD': ['a'], 'FOO': 'bar', 'BAZ_BAR': 'ok', 'ANOTHER_FIELD': '...'} m = MyModel(**data) assert m.dict(by_alias=True) == data def test_alias_generator_wrong_type_error(): def return_bytes(string): return b'not a string' with pytest.raises(TypeError) as e: class MyModel(BaseModel): bar: Any class Config: alias_generator = return_bytes assert str(e.value) == "Config.alias_generator must return str, not " def test_infer_alias(): class Model(BaseModel): a = 'foobar' class Config: fields = {'a': '_a'} assert Model(_a='different').a == 'different' assert repr(Model.__fields__['a']) == ( "ModelField(name='a', type=str, required=False, default='foobar', alias='_a')" ) def test_alias_error(): class Model(BaseModel): a = 123 class Config: fields = {'a': '_a'} assert Model(_a='123').a == 123 with pytest.raises(ValidationError) as exc_info: Model(_a='foo') assert exc_info.value.errors() == [ {'loc': ('_a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_annotation_config(): class Model(BaseModel): b: float a: int = 10 _c: str class Config: fields = {'b': 'foobar'} assert list(Model.__fields__.keys()) == ['b', 'a'] assert [f.alias for f in Model.__fields__.values()] == ['foobar', 'a'] assert Model(foobar='123').b == 123.0 def test_alias_camel_case(): class Model(BaseModel): one_thing: int another_thing: int class Config(BaseConfig): @classmethod def get_field_info(cls, name): field_config = super().get_field_info(name) or {} if 'alias' not in field_config: field_config['alias'] = re.sub(r'(?:^|_)([a-z])', lambda m: m.group(1).upper(), name) return field_config v = Model(**{'OneThing': 123, 'AnotherThing': '321'}) assert v.one_thing == 123 assert v.another_thing == 321 assert v == {'one_thing': 123, 'another_thing': 321} def test_get_field_info_inherit(): class ModelOne(BaseModel): class Config(BaseConfig): @classmethod def get_field_info(cls, name): field_config = super().get_field_info(name) or {} if 'alias' not in field_config: field_config['alias'] = re.sub(r'_([a-z])', lambda m: m.group(1).upper(), name) return field_config class ModelTwo(ModelOne): one_thing: int another_thing: int third_thing: int class Config: fields = {'third_thing': 'Banana'} v = ModelTwo(**{'oneThing': 123, 'anotherThing': '321', 'Banana': 1}) assert v == {'one_thing': 123, 'another_thing': 321, 'third_thing': 1} def test_pop_by_field_name(): class Model(BaseModel): last_updated_by: Optional[str] = None class Config: extra = Extra.forbid allow_population_by_field_name = True fields = {'last_updated_by': 'lastUpdatedBy'} assert Model(lastUpdatedBy='foo').dict() == {'last_updated_by': 'foo'} assert Model(last_updated_by='foo').dict() == {'last_updated_by': 'foo'} with pytest.raises(ValidationError) as exc_info: Model(lastUpdatedBy='foo', last_updated_by='bar') assert exc_info.value.errors() == [ {'loc': ('last_updated_by',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'} ] def test_alias_child_precedence(): class Parent(BaseModel): x: int class Config: fields = {'x': 'x1'} class Child(Parent): y: int class Config: fields = {'y': 'y2', 'x': 'x2'} assert Child.__fields__['y'].alias == 'y2' assert Child.__fields__['x'].alias == 'x2' def test_alias_generator_parent(): class Parent(BaseModel): x: int class Config: allow_population_by_field_name = True @classmethod def alias_generator(cls, f_name): return f_name + '1' class Child(Parent): y: int class Config: @classmethod def alias_generator(cls, f_name): return f_name + '2' assert Child.__fields__['y'].alias == 'y2' assert Child.__fields__['x'].alias == 'x2' def test_alias_generator_on_parent(): class Parent(BaseModel): x: bool = Field(..., alias='a_b_c') y: str class Config: @staticmethod def alias_generator(x): return x.upper() class Child(Parent): y: str z: str assert Parent.__fields__['x'].alias == 'a_b_c' assert Parent.__fields__['y'].alias == 'Y' assert Child.__fields__['x'].alias == 'a_b_c' assert Child.__fields__['y'].alias == 'Y' assert Child.__fields__['z'].alias == 'Z' def test_alias_generator_on_child(): class Parent(BaseModel): x: bool = Field(..., alias='abc') y: str class Child(Parent): y: str z: str class Config: @staticmethod def alias_generator(x): return x.upper() assert [f.alias for f in Parent.__fields__.values()] == ['abc', 'y'] assert [f.alias for f in Child.__fields__.values()] == ['abc', 'Y', 'Z'] def test_low_priority_alias(): class Parent(BaseModel): x: bool = Field(..., alias='abc', alias_priority=1) y: str class Child(Parent): y: str z: str class Config: @staticmethod def alias_generator(x): return x.upper() assert [f.alias for f in Parent.__fields__.values()] == ['abc', 'y'] assert [f.alias for f in Child.__fields__.values()] == ['X', 'Y', 'Z'] def test_low_priority_alias_config(): class Parent(BaseModel): x: bool y: str class Config: fields = {'x': dict(alias='abc', alias_priority=1)} class Child(Parent): y: str z: str class Config: @staticmethod def alias_generator(x): return x.upper() assert [f.alias for f in Parent.__fields__.values()] == ['abc', 'y'] assert [f.alias for f in Child.__fields__.values()] == ['X', 'Y', 'Z'] def test_field_vs_config(): class Model(BaseModel): x: str = Field(..., alias='x_on_field') y: str z: str class Config: fields = {'x': dict(alias='x_on_config'), 'y': dict(alias='y_on_config')} assert [f.alias for f in Model.__fields__.values()] == ['x_on_field', 'y_on_config', 'z'] def test_alias_priority(): class Parent(BaseModel): a: str = Field(..., alias='a_field_parent') b: str = Field(..., alias='b_field_parent') c: str = Field(..., alias='c_field_parent') d: str e: str class Config: fields = { 'a': dict(alias='a_config_parent'), 'c': dict(alias='c_config_parent'), 'd': dict(alias='d_config_parent'), } @staticmethod def alias_generator(x): return f'{x}_generator_parent' class Child(Parent): a: str = Field(..., alias='a_field_child') class Config: fields = {'a': dict(alias='a_config_child'), 'b': dict(alias='b_config_child')} @staticmethod def alias_generator(x): return f'{x}_generator_child' # debug([f.alias for f in Parent.__fields__.values()], [f.alias for f in Child.__fields__.values()]) assert [f.alias for f in Parent.__fields__.values()] == [ 'a_field_parent', 'b_field_parent', 'c_field_parent', 'd_config_parent', 'e_generator_parent', ] assert [f.alias for f in Child.__fields__.values()] == [ 'a_field_child', 'b_config_child', 'c_field_parent', 'd_config_parent', 'e_generator_child', ] def test_empty_string_alias(): class Model(BaseModel): empty_string_key: int = Field(alias='') data = {'': 123} m = Model(**data) assert m.empty_string_key == 123 assert m.dict(by_alias=True) == data @pytest.mark.parametrize( 'use_construct, allow_population_by_field_name_config, arg_name, expectation', [ [False, True, 'bar', does_not_raise()], [False, True, 'bar_', does_not_raise()], [False, False, 'bar', does_not_raise()], [False, False, 'bar_', pytest.raises(ValueError)], [True, True, 'bar', does_not_raise()], [True, True, 'bar_', does_not_raise()], [True, False, 'bar', does_not_raise()], [True, False, 'bar_', does_not_raise()], ], ) def test_allow_population_by_field_name_config( use_construct: bool, allow_population_by_field_name_config: bool, arg_name: str, expectation: ContextManager, ): expected_value: int = 7 class Foo(BaseModel): bar_: int = Field(..., alias='bar') class Config(BaseConfig): allow_population_by_field_name = allow_population_by_field_name_config with expectation: if use_construct: f = Foo.construct(**{arg_name: expected_value}) else: f = Foo(**{arg_name: expected_value}) assert f.bar_ == expected_value pydantic-1.10.14/tests/test_annotated.py000066400000000000000000000103711455251250200202240ustar00rootroot00000000000000from typing import List import pytest from typing_extensions import Annotated from pydantic import BaseModel, Field from pydantic.fields import Undefined from pydantic.typing import get_all_type_hints @pytest.mark.parametrize( ['hint_fn', 'value'], [ # Test Annotated types with arbitrary metadata pytest.param( lambda: Annotated[int, 0], 5, id='misc-default', ), pytest.param( lambda: Annotated[int, 0], Field(default=5, ge=0), id='misc-field-default-constraint', ), # Test valid Annotated Field uses pytest.param( lambda: Annotated[int, Field(description='Test')], # noqa: F821 5, id='annotated-field-value-default', ), pytest.param( lambda: Annotated[int, Field(default_factory=lambda: 5, description='Test')], # noqa: F821 Undefined, id='annotated-field-default_factory', ), ], ) def test_annotated(hint_fn, value): hint = hint_fn() class M(BaseModel): x: hint = value assert M().x == 5 assert M(x=10).x == 10 assert get_all_type_hints(M)['x'] == hint @pytest.mark.parametrize( ['hint_fn', 'value', 'subclass_ctx'], [ pytest.param( lambda: Annotated[int, Field(5)], Undefined, pytest.raises(ValueError, match='`Field` default cannot be set in `Annotated`'), id='annotated-field-default', ), pytest.param( lambda: Annotated[int, Field(), Field()], Undefined, pytest.raises(ValueError, match='cannot specify multiple `Annotated` `Field`s'), id='annotated-field-dup', ), pytest.param( lambda: Annotated[int, Field()], Field(), pytest.raises(ValueError, match='cannot specify `Annotated` and value `Field`'), id='annotated-field-value-field-dup', ), pytest.param( lambda: Annotated[int, Field(default_factory=lambda: 5)], # The factory is not used 5, pytest.raises(ValueError, match='cannot specify both default and default_factory'), id='annotated-field-default_factory-value-default', ), ], ) def test_annotated_model_exceptions(hint_fn, value, subclass_ctx): hint = hint_fn() with subclass_ctx: class M(BaseModel): x: hint = value @pytest.mark.parametrize( ['hint_fn', 'value', 'empty_init_ctx'], [ pytest.param( lambda: Annotated[int, 0], Undefined, pytest.raises(ValueError, match='field required'), id='misc-no-default', ), pytest.param( lambda: Annotated[int, Field()], Undefined, pytest.raises(ValueError, match='field required'), id='annotated-field-no-default', ), ], ) def test_annotated_instance_exceptions(hint_fn, value, empty_init_ctx): hint = hint_fn() class M(BaseModel): x: hint = value with empty_init_ctx: assert M().x == 5 def test_field_reuse(): field = Field(description='Long description') class Model(BaseModel): one: int = field assert Model(one=1).dict() == {'one': 1} class AnnotatedModel(BaseModel): one: Annotated[int, field] assert AnnotatedModel(one=1).dict() == {'one': 1} def test_config_field_info(): class Foo(BaseModel): a: Annotated[int, Field(foobar='hello')] # noqa: F821 class Config: fields = {'a': {'description': 'descr'}} assert Foo.schema(by_alias=True)['properties'] == { 'a': {'title': 'A', 'description': 'descr', 'foobar': 'hello', 'type': 'integer'}, } def test_annotated_alias() -> None: # https://github.com/pydantic/pydantic/issues/2971 StrAlias = Annotated[str, Field(max_length=3)] IntAlias = Annotated[int, Field(default_factory=lambda: 2)] Nested = Annotated[List[StrAlias], Field(description='foo')] class MyModel(BaseModel): a: StrAlias = 'abc' b: StrAlias c: IntAlias d: IntAlias e: Nested assert MyModel(b='def', e=['xyz']) == MyModel(a='abc', b='def', c=2, d=2, e=['xyz']) pydantic-1.10.14/tests/test_annotated_types.py000066400000000000000000000302251455251250200214500ustar00rootroot00000000000000""" Tests for annotated types that _pydantic_ can validate like - NamedTuple - TypedDict """ import json import sys from collections import namedtuple from typing import List, NamedTuple, Optional, Tuple import pytest from typing_extensions import Annotated, NotRequired, Required, TypedDict from pydantic import BaseModel, Field, PositiveInt, ValidationError if sys.version_info < (3, 9, 2): try: from typing import TypedDict as LegacyTypedDict except ImportError: LegacyTypedDict = None else: LegacyTypedDict = None if (3, 9, 2) < sys.version_info < (3, 11): from typing import TypedDict as LegacyRequiredTypedDict else: LegacyRequiredTypedDict = None def test_namedtuple(): Position = namedtuple('Pos', 'x y') class Event(NamedTuple): a: int b: int c: int d: str class Model(BaseModel): pos: Position events: List[Event] model = Model(pos=('1', 2), events=[[b'1', '2', 3, 'qwe']]) assert isinstance(model.pos, Position) assert isinstance(model.events[0], Event) assert model.pos.x == '1' assert model.pos == Position('1', 2) assert model.events[0] == Event(1, 2, 3, 'qwe') assert repr(model) == "Model(pos=Pos(x='1', y=2), events=[Event(a=1, b=2, c=3, d='qwe')])" assert model.json() == json.dumps(model.dict()) == '{"pos": ["1", 2], "events": [[1, 2, 3, "qwe"]]}' with pytest.raises(ValidationError) as exc_info: Model(pos=('1', 2), events=[['qwe', '2', 3, 'qwe']]) assert exc_info.value.errors() == [ { 'loc': ('events', 0, 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer', } ] def test_namedtuple_schema(): class Position1(NamedTuple): x: int y: int Position2 = namedtuple('Position2', 'x y') class Model(BaseModel): pos1: Position1 pos2: Position2 pos3: Tuple[int, int] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'pos1': { 'title': 'Pos1', 'type': 'array', 'items': [ {'title': 'X', 'type': 'integer'}, {'title': 'Y', 'type': 'integer'}, ], 'minItems': 2, 'maxItems': 2, }, 'pos2': { 'title': 'Pos2', 'type': 'array', 'items': [ {'title': 'X'}, {'title': 'Y'}, ], 'minItems': 2, 'maxItems': 2, }, 'pos3': { 'title': 'Pos3', 'type': 'array', 'items': [ {'type': 'integer'}, {'type': 'integer'}, ], 'minItems': 2, 'maxItems': 2, }, }, 'required': ['pos1', 'pos2', 'pos3'], } def test_namedtuple_right_length(): class Point(NamedTuple): x: int y: int class Model(BaseModel): p: Point assert isinstance(Model(p=(1, 2)), Model) with pytest.raises(ValidationError) as exc_info: Model(p=(1, 2, 3)) assert exc_info.value.errors() == [ { 'loc': ('p',), 'msg': 'ensure this value has at most 2 items', 'type': 'value_error.list.max_items', 'ctx': {'limit_value': 2}, } ] def test_namedtuple_postponed_annotation(): """ https://github.com/pydantic/pydantic/issues/2760 """ class Tup(NamedTuple): v: 'PositiveInt' class Model(BaseModel): t: Tup # The effect of issue #2760 is that this call raises a `ConfigError` even though the type declared on `Tup.v` # references a binding in this module's global scope. with pytest.raises(ValidationError): Model.parse_obj({'t': [-1]}) def test_namedtuple_arbitrary_type(): class CustomClass: pass class Tup(NamedTuple): c: CustomClass class Model(BaseModel): x: Tup class Config: arbitrary_types_allowed = True data = {'x': Tup(c=CustomClass())} model = Model.parse_obj(data) assert isinstance(model.x.c, CustomClass) with pytest.raises(RuntimeError): class ModelNoArbitraryTypes(BaseModel): x: Tup def test_typeddict(): class TD(TypedDict): a: int b: int c: int d: str class Model(BaseModel): td: TD m = Model(td={'a': '3', 'b': b'1', 'c': 4, 'd': 'qwe'}) assert m.td == {'a': 3, 'b': 1, 'c': 4, 'd': 'qwe'} with pytest.raises(ValidationError) as exc_info: Model(td={'a': [1], 'b': 2, 'c': 3, 'd': 'qwe'}) assert exc_info.value.errors() == [ { 'loc': ('td', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer', } ] def test_typeddict_non_total(): class FullMovie(TypedDict, total=True): name: str year: int class Model(BaseModel): movie: FullMovie with pytest.raises(ValidationError) as exc_info: Model(movie={'year': '2002'}) assert exc_info.value.errors() == [ { 'loc': ('movie', 'name'), 'msg': 'field required', 'type': 'value_error.missing', } ] class PartialMovie(TypedDict, total=False): name: str year: int class Model(BaseModel): movie: PartialMovie m = Model(movie={'year': '2002'}) assert m.movie == {'year': 2002} def test_partial_new_typeddict(): class OptionalUser(TypedDict, total=False): name: str class User(OptionalUser): id: int class Model(BaseModel): user: User m = Model(user={'id': 1}) assert m.user == {'id': 1} @pytest.mark.skipif(not LegacyTypedDict, reason='python 3.9+ is used, no legacy TypedDict') def test_partial_legacy_typeddict(): class OptionalUser(LegacyTypedDict, total=False): name: str class User(OptionalUser): id: int with pytest.raises( TypeError, match='^You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2.', ): class Model(BaseModel): user: User def test_typeddict_extra(): class User(TypedDict): name: str age: int class Model(BaseModel): u: User class Config: extra = 'forbid' with pytest.raises(ValidationError) as exc_info: Model(u={'name': 'pika', 'age': 7, 'rank': 1}) assert exc_info.value.errors() == [ {'loc': ('u', 'rank'), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, ] def test_typeddict_schema(): class Data(BaseModel): a: int class DataTD(TypedDict): a: int class Model(BaseModel): data: Data data_td: DataTD assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'data': {'$ref': '#/definitions/Data'}, 'data_td': {'$ref': '#/definitions/DataTD'}}, 'required': ['data', 'data_td'], 'definitions': { 'Data': { 'type': 'object', 'title': 'Data', 'properties': {'a': {'title': 'A', 'type': 'integer'}}, 'required': ['a'], }, 'DataTD': { 'type': 'object', 'title': 'DataTD', 'properties': {'a': {'title': 'A', 'type': 'integer'}}, 'required': ['a'], }, }, } def test_typeddict_postponed_annotation(): class DataTD(TypedDict): v: 'PositiveInt' class Model(BaseModel): t: DataTD with pytest.raises(ValidationError): Model.parse_obj({'t': {'v': -1}}) def test_typeddict_required(): class DataTD(TypedDict, total=False): a: int b: Required[str] class Model(BaseModel): t: DataTD assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'t': {'$ref': '#/definitions/DataTD'}}, 'required': ['t'], 'definitions': { 'DataTD': { 'title': 'DataTD', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'string'}, }, 'required': ['b'], } }, } def test_typeddict_not_required(): class DataTD(TypedDict, total=True): a: NotRequired[int] b: str class Model(BaseModel): t: DataTD assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'t': {'$ref': '#/definitions/DataTD'}}, 'required': ['t'], 'definitions': { 'DataTD': { 'title': 'DataTD', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'string'}, }, 'required': ['b'], } }, } def test_typed_dict_inheritance(): class DataTDBase(TypedDict, total=True): a: NotRequired[int] b: str class DataTD(DataTDBase, total=False): c: Required[int] d: str class Model(BaseModel): t: DataTD assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'t': {'$ref': '#/definitions/DataTD'}}, 'required': ['t'], 'definitions': { 'DataTD': { 'title': 'DataTD', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'string'}, 'c': {'title': 'C', 'type': 'integer'}, 'd': {'title': 'D', 'type': 'string'}, }, 'required': ['b', 'c'], } }, } def test_typeddict_annotated_nonoptional(): class DataTD(TypedDict): a: Optional[int] b: Annotated[Optional[int], Field(...)] c: Annotated[Optional[int], Field(..., description='Test')] d: Annotated[Optional[int], Field()] class Model(BaseModel): data_td: DataTD assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'data_td': {'$ref': '#/definitions/DataTD'}}, 'required': ['data_td'], 'definitions': { 'DataTD': { 'type': 'object', 'title': 'DataTD', 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'integer'}, 'c': {'title': 'C', 'type': 'integer', 'description': 'Test'}, 'd': {'title': 'D', 'type': 'integer'}, }, 'required': ['a', 'b', 'c'], }, }, } for bad_obj in ({}, {'data_td': []}, {'data_td': {'a': 1, 'b': 2, 'd': 4}}): with pytest.raises(ValidationError): Model.parse_obj(bad_obj) valid_data = {'a': 1, 'b': 2, 'c': 3} parsed_model = Model.parse_obj({'data_td': valid_data}) assert parsed_model and parsed_model == Model(data_td=valid_data) @pytest.mark.skipif(not LegacyRequiredTypedDict, reason='python 3.11+ used') def test_legacy_typeddict_required_not_required(): class TDRequired(LegacyRequiredTypedDict): a: Required[int] class TDNotRequired(LegacyRequiredTypedDict): a: Required[int] for cls in (TDRequired, TDNotRequired): with pytest.raises( TypeError, match='^You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11.', ): class Model(BaseModel): t: cls @pytest.mark.skipif(not LegacyRequiredTypedDict, reason='python 3.11+ used') def test_legacy_typeddict_no_required_not_required(): class TD(LegacyRequiredTypedDict): a: int class Model(BaseModel): t: TD pydantic-1.10.14/tests/test_assert_in_validators.py000066400000000000000000000012521455251250200224640ustar00rootroot00000000000000""" PYTEST_DONT_REWRITE """ import pytest from pydantic import BaseModel, ValidationError, validator def test_assert_raises_validation_error(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert v == 'a', 'invalid a' return v assert Model(a='a').a == 'a' with pytest.raises(ValidationError) as exc_info: Model(a='snap') expected_errors = [{'loc': ('a',), 'msg': 'invalid a', 'type': 'assertion_error'}] actual_errors = exc_info.value.errors() if expected_errors != actual_errors: pytest.fail(f'Actual errors: {actual_errors}\nExpected errors: {expected_errors}') pydantic-1.10.14/tests/test_callable.py000066400000000000000000000014421455251250200200050ustar00rootroot00000000000000import sys from typing import Callable import pytest from pydantic import BaseModel, ValidationError collection_callable_types = [Callable, Callable[[int], int]] if sys.version_info >= (3, 9): from collections.abc import Callable as CollectionsCallable collection_callable_types += [CollectionsCallable, CollectionsCallable[[int], int]] @pytest.mark.parametrize('annotation', collection_callable_types) def test_callable(annotation): class Model(BaseModel): callback: annotation m = Model(callback=lambda x: x) assert callable(m.callback) @pytest.mark.parametrize('annotation', collection_callable_types) def test_non_callable(annotation): class Model(BaseModel): callback: annotation with pytest.raises(ValidationError): Model(callback=1) pydantic-1.10.14/tests/test_color.py000066400000000000000000000151501455251250200173650ustar00rootroot00000000000000from datetime import datetime import pytest from pydantic import BaseModel, ValidationError from pydantic.color import Color from pydantic.errors import ColorError from pydantic.utils import almost_equal_floats @pytest.mark.parametrize( 'raw_color, as_tuple', [ # named colors ('aliceblue', (240, 248, 255)), ('Antiquewhite', (250, 235, 215)), ('#000000', (0, 0, 0)), ('#DAB', (221, 170, 187)), ('#dab', (221, 170, 187)), ('#000', (0, 0, 0)), ('0x797979', (121, 121, 121)), ('0x777', (119, 119, 119)), ('0x777777', (119, 119, 119)), ('0x777777cc', (119, 119, 119, 0.8)), ('777', (119, 119, 119)), ('777c', (119, 119, 119, 0.8)), (' 777', (119, 119, 119)), ('777 ', (119, 119, 119)), (' 777 ', (119, 119, 119)), ((0, 0, 128), (0, 0, 128)), ([0, 0, 128], (0, 0, 128)), ((0, 0, 205, 1.0), (0, 0, 205)), ((0, 0, 205, 0.5), (0, 0, 205, 0.5)), ('rgb(0, 0, 205)', (0, 0, 205)), ('rgb(0, 0, 205.2)', (0, 0, 205)), ('rgb(0, 0.2, 205)', (0, 0, 205)), ('rgba(0, 0, 128, 0.6)', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, .6)', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, 60%)', (0, 0, 128, 0.6)), (' rgba(0, 0, 128,0.6) ', (0, 0, 128, 0.6)), ('rgba(00,0,128,0.6 )', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, 0)', (0, 0, 128, 0)), ('rgba(0, 0, 128, 1)', (0, 0, 128)), ('hsl(270, 60%, 70%)', (178, 133, 224)), ('hsl(180, 100%, 50%)', (0, 255, 255)), ('hsl(630, 60%, 70%)', (178, 133, 224)), ('hsl(270deg, 60%, 70%)', (178, 133, 224)), ('hsl(.75turn, 60%, 70%)', (178, 133, 224)), ('hsl(-.25turn, 60%, 70%)', (178, 133, 224)), ('hsl(-0.25turn, 60%, 70%)', (178, 133, 224)), ('hsl(4.71238rad, 60%, 70%)', (178, 133, 224)), ('hsl(10.9955rad, 60%, 70%)', (178, 133, 224)), ('hsl(270, 60%, 50%, .15)', (127, 51, 204, 0.15)), ('hsl(270.00deg, 60%, 50%, 15%)', (127, 51, 204, 0.15)), ], ) def test_color_success(raw_color, as_tuple): c = Color(raw_color) assert c.as_rgb_tuple() == as_tuple assert c.original() == raw_color @pytest.mark.parametrize( 'color', [ # named colors 'nosuchname', 'chucknorris', # hex '#0000000', 'x000', # rgb/rgba tuples (256, 256, 256), (128, 128, 128, 0.5, 128), (0, 0, 'x'), (0, 0, 0, 1.5), (0, 0, 0, 'x'), (0, 0, 1280), (0, 0, 1205, 0.1), (0, 0, 1128, 0.5), (0, 0, 1128, -0.5), (0, 0, 1128, 1.5), # rgb/rgba strings 'rgb(0, 0, 1205)', 'rgb(0, 0, 1128)', 'rgba(0, 0, 11205, 0.1)', 'rgba(0, 0, 128, 11.5)', 'hsl(180, 101%, 50%)', # neither a tuple, not a string datetime(2017, 10, 5, 19, 47, 7), object, range(10), ], ) def test_color_fail(color): with pytest.raises(ColorError): Color(color) def test_model_validation(): class Model(BaseModel): color: Color assert Model(color='red').color.as_hex() == '#f00' assert Model(color=Color('red')).color.as_hex() == '#f00' with pytest.raises(ValidationError) as exc_info: Model(color='snot') assert exc_info.value.errors() == [ { 'loc': ('color',), 'msg': 'value is not a valid color: string not recognised as a valid color', 'type': 'value_error.color', 'ctx': {'reason': 'string not recognised as a valid color'}, } ] def test_as_rgb(): assert Color('bad').as_rgb() == 'rgb(187, 170, 221)' assert Color((1, 2, 3, 0.123456)).as_rgb() == 'rgba(1, 2, 3, 0.12)' assert Color((1, 2, 3, 0.1)).as_rgb() == 'rgba(1, 2, 3, 0.1)' def test_as_rgb_tuple(): assert Color((1, 2, 3)).as_rgb_tuple(alpha=None) == (1, 2, 3) assert Color((1, 2, 3, 1)).as_rgb_tuple(alpha=None) == (1, 2, 3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3) assert Color((1, 2, 3)).as_rgb_tuple(alpha=False) == (1, 2, 3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=False) == (1, 2, 3) assert Color((1, 2, 3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 1) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 0.3) def test_as_hsl(): assert Color('bad').as_hsl() == 'hsl(260, 43%, 77%)' assert Color((1, 2, 3, 0.123456)).as_hsl() == 'hsl(210, 50%, 1%, 0.12)' assert Color('hsl(260, 43%, 77%)').as_hsl() == 'hsl(260, 43%, 77%)' def test_as_hsl_tuple(): c = Color('016997') h, s, l, a = c.as_hsl_tuple(alpha=True) assert almost_equal_floats(h, 0.551, delta=0.01) assert almost_equal_floats(s, 0.986, delta=0.01) assert almost_equal_floats(l, 0.298, delta=0.01) assert a == 1 assert c.as_hsl_tuple(alpha=False) == c.as_hsl_tuple(alpha=None) == (h, s, l) c = Color((3, 40, 50, 0.5)) hsla = c.as_hsl_tuple(alpha=None) assert len(hsla) == 4 assert hsla[3] == 0.5 def test_as_hex(): assert Color((1, 2, 3)).as_hex() == '#010203' assert Color((119, 119, 119)).as_hex() == '#777' assert Color((119, 0, 238)).as_hex() == '#70e' assert Color('B0B').as_hex() == '#b0b' assert Color((1, 2, 3, 0.123456)).as_hex() == '#0102031f' assert Color((1, 2, 3, 0.1)).as_hex() == '#0102031a' def test_as_named(): assert Color((0, 255, 255)).as_named() == 'cyan' assert Color('#808000').as_named() == 'olive' assert Color('hsl(180, 100%, 50%)').as_named() == 'cyan' assert Color((240, 248, 255)).as_named() == 'aliceblue' with pytest.raises(ValueError) as exc_info: Color((1, 2, 3)).as_named() assert exc_info.value.args[0] == 'no named color found, use fallback=True, as_hex() or as_rgb()' assert Color((1, 2, 3)).as_named(fallback=True) == '#010203' assert Color((1, 2, 3, 0.1)).as_named(fallback=True) == '#0102031a' def test_str_repr(): assert str(Color('red')) == 'red' assert repr(Color('red')) == "Color('red', rgb=(255, 0, 0))" assert str(Color((1, 2, 3))) == '#010203' assert repr(Color((1, 2, 3))) == "Color('#010203', rgb=(1, 2, 3))" def test_eq(): assert Color('red') == Color('red') assert Color('red') != Color('blue') assert Color('red') != 'red' assert Color('red') == Color((255, 0, 0)) assert Color('red') != Color((0, 0, 255)) def test_color_hashable(): assert hash(Color('red')) != hash(Color('blue')) assert hash(Color('red')) == hash(Color((255, 0, 0))) assert hash(Color('red')) != hash(Color((255, 0, 0, 0.5))) pydantic-1.10.14/tests/test_construction.py000066400000000000000000000212701455251250200210010ustar00rootroot00000000000000import pickle from typing import Any, List, Optional import pytest from pydantic import BaseModel, Field, PrivateAttr from pydantic.fields import Undefined class Model(BaseModel): a: float b: int = 10 def test_simple_construct(): m = Model.construct(a=3.14) assert m.a == 3.14 assert m.b == 10 assert m.__fields_set__ == {'a'} assert m.dict() == {'a': 3.14, 'b': 10} def test_construct_misuse(): m = Model.construct(b='foobar') assert m.b == 'foobar' assert m.dict() == {'b': 'foobar'} with pytest.raises(AttributeError, match="'Model' object has no attribute 'a'"): print(m.a) def test_construct_fields_set(): m = Model.construct(a=3.0, b=-1, _fields_set={'a'}) assert m.a == 3 assert m.b == -1 assert m.__fields_set__ == {'a'} assert m.dict() == {'a': 3, 'b': -1} def test_construct_allow_extra(): """construct() should allow extra fields""" class Foo(BaseModel): x: int assert Foo.construct(x=1, y=2).dict() == {'x': 1, 'y': 2} def test_construct_keep_order(): class Foo(BaseModel): a: int b: int = 42 c: float instance = Foo(a=1, b=321, c=3.14) instance_construct = Foo.construct(**instance.dict()) assert instance == instance_construct assert instance.dict() == instance_construct.dict() assert instance.json() == instance_construct.json() def test_large_any_str(): class Model(BaseModel): a: bytes b: str content_bytes = b'x' * (2**16 + 1) content_str = 'x' * (2**16 + 1) m = Model(a=content_bytes, b=content_str) assert m.a == content_bytes assert m.b == content_str def test_simple_copy(): m = Model(a=24) m2 = m.copy() assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m == m2 assert m.__fields__ == m2.__fields__ class ModelTwo(BaseModel): __foo__ = PrivateAttr({'private'}) a: float b: int = 10 c: str = 'foobar' d: Model def test_deep_copy(): m = ModelTwo(a=24, d=Model(a='12')) m.__foo__ = {'new value'} m2 = m.copy(deep=True) assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m.c == m2.c == 'foobar' assert m.d is not m2.d assert m == m2 assert m.__fields__ == m2.__fields__ assert m.__foo__ == m2.__foo__ assert m.__foo__ is not m2.__foo__ def test_copy_exclude(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(exclude={'b'}) assert m.a == m2.a == 24 assert isinstance(m2.d, Model) assert m2.d.a == 12 assert hasattr(m2, 'c') assert not hasattr(m2, 'b') assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a', 'c', 'd'} assert m != m2 def test_copy_include(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(include={'a'}) assert m.a == m2.a == 24 assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a'} assert m != m2 def test_copy_include_exclude(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(include={'a', 'b', 'c'}, exclude={'c'}) assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a', 'b'} def test_copy_advanced_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}) assert hasattr(m.f, 'c') assert not hasattr(m2.f, 'c') assert m2.dict() == {'e': 'e', 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}} m2 = m.copy(exclude={'e': ..., 'f': {'d'}}) assert m2.dict() == {'f': {'c': 'foo'}} def test_copy_advanced_include(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(include={'f': {'c'}}) assert hasattr(m.f, 'c') assert hasattr(m2.f, 'c') assert m2.dict() == {'f': {'c': 'foo'}} m2 = m.copy(include={'e': ..., 'f': {'d': {-1}}}) assert m2.dict() == {'e': 'e', 'f': {'d': [{'a': 'c', 'b': 'e'}]}} def test_copy_advanced_include_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(include={'e': ..., 'f': {'d'}}, exclude={'e': ..., 'f': {'d': {0}}}) assert m2.dict() == {'f': {'d': [{'a': 'c', 'b': 'e'}]}} def test_copy_update(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(update={'a': 'different'}) assert m.a == 24 assert m2.a == 'different' assert set(m.dict().keys()) == set(m2.dict().keys()) == {'a', 'b', 'c', 'd'} assert m != m2 def test_copy_update_unset(): class Foo(BaseModel): foo: Optional[str] bar: Optional[str] assert Foo(foo='hello').copy(update={'bar': 'world'}).json(exclude_unset=True) == '{"foo": "hello", "bar": "world"}' def test_copy_set_fields(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy() assert m.dict(exclude_unset=True) == {'a': 24.0, 'd': {'a': 12}} assert m.dict(exclude_unset=True) == m2.dict(exclude_unset=True) def test_simple_pickle(): m = Model(a='24') b = pickle.dumps(m) m2 = pickle.loads(b) assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m == m2 assert m is not m2 assert tuple(m) == (('a', 24.0), ('b', 10)) assert tuple(m2) == (('a', 24.0), ('b', 10)) assert m.__fields__ == m2.__fields__ def test_recursive_pickle(): m = ModelTwo(a=24, d=Model(a='123.45')) m2 = pickle.loads(pickle.dumps(m)) assert m == m2 assert m.d.a == 123.45 assert m2.d.a == 123.45 assert m.__fields__ == m2.__fields__ assert m.__foo__ == m2.__foo__ def test_pickle_undefined(): m = ModelTwo(a=24, d=Model(a='123.45')) m2 = pickle.loads(pickle.dumps(m)) assert m2.__foo__ == {'private'} m.__foo__ = Undefined m3 = pickle.loads(pickle.dumps(m)) assert not hasattr(m3, '__foo__') def test_copy_undefined(): m = ModelTwo(a=24, d=Model(a='123.45')) m2 = m.copy() assert m2.__foo__ == {'private'} m.__foo__ = Undefined m3 = m.copy() assert not hasattr(m3, '__foo__') def test_immutable_copy_with_allow_mutation(): class Model(BaseModel): a: int b: int class Config: allow_mutation = False m = Model(a=40, b=10) assert m == m.copy() m2 = m.copy(update={'b': 12}) assert repr(m2) == 'Model(a=40, b=12)' with pytest.raises(TypeError): m2.b = 13 def test_immutable_copy_with_frozen(): class Model(BaseModel): a: int b: int class Config: frozen = True m = Model(a=40, b=10) assert m == m.copy() m2 = m.copy(update={'b': 12}) assert repr(m2) == 'Model(a=40, b=12)' with pytest.raises(TypeError): m2.b = 13 def test_pickle_fields_set(): m = Model(a=24) assert m.dict(exclude_unset=True) == {'a': 24} m2 = pickle.loads(pickle.dumps(m)) assert m2.dict(exclude_unset=True) == {'a': 24} def test_copy_update_exclude(): class SubModel(BaseModel): a: str b: str class Model(BaseModel): c: str d: SubModel m = Model(c='ex', d=dict(a='ax', b='bx')) assert m.dict() == {'c': 'ex', 'd': {'a': 'ax', 'b': 'bx'}} assert m.copy(exclude={'c'}).dict() == {'d': {'a': 'ax', 'b': 'bx'}} assert m.copy(exclude={'c'}, update={'c': 42}).dict() == {'c': 42, 'd': {'a': 'ax', 'b': 'bx'}} assert m._calculate_keys(exclude={'x': ...}, include=None, exclude_unset=False) == {'c', 'd'} assert m._calculate_keys(exclude={'x': ...}, include=None, exclude_unset=False, update={'c': 42}) == {'d'} def test_shallow_copy_modify(): class X(BaseModel): val: int deep: Any x = X(val=1, deep={'deep_thing': [1, 2]}) y = x.copy() y.val = 2 y.deep['deep_thing'].append(3) assert x.val == 1 assert y.val == 2 # deep['deep_thing'] gets modified assert x.deep['deep_thing'] == [1, 2, 3] assert y.deep['deep_thing'] == [1, 2, 3] def test_construct_default_factory(): class Model(BaseModel): foo: List[int] = Field(default_factory=list) bar: str = 'Baz' m = Model.construct() assert m.foo == [] assert m.bar == 'Baz' pydantic-1.10.14/tests/test_create_model.py000066400000000000000000000200261455251250200206700ustar00rootroot00000000000000from typing import Generic, Optional, Tuple, TypeVar import pytest from pydantic import BaseModel, Extra, Field, ValidationError, create_model, errors, validator from pydantic.fields import ModelPrivateAttr from pydantic.generics import GenericModel def test_create_model(): model = create_model('FooModel', foo=(str, ...), bar=123) assert issubclass(model, BaseModel) assert issubclass(model.__config__, BaseModel.Config) assert model.__name__ == 'FooModel' assert model.__fields__.keys() == {'foo', 'bar'} assert model.__validators__ == {} assert model.__config__.__name__ == 'Config' assert model.__module__ == 'pydantic.main' def test_create_model_usage(): model = create_model('FooModel', foo=(str, ...), bar=123) m = model(foo='hello') assert m.foo == 'hello' assert m.bar == 123 with pytest.raises(ValidationError): model() with pytest.raises(ValidationError): model(foo='hello', bar='xxx') def test_create_model_pickle(create_module): """ Pickle will work for dynamically created model only if it was defined globally with its class name and module where it's defined was specified """ @create_module def module(): import pickle from pydantic import create_model FooModel = create_model('FooModel', foo=(str, ...), bar=123, __module__=__name__) m = FooModel(foo='hello') d = pickle.dumps(m) m2 = pickle.loads(d) assert m2.foo == m.foo == 'hello' assert m2.bar == m.bar == 123 assert m2 == m assert m2 is not m def test_invalid_name(): with pytest.warns(RuntimeWarning): model = create_model('FooModel', _foo=(str, ...)) assert len(model.__fields__) == 0 def test_field_wrong_tuple(): with pytest.raises(errors.ConfigError): create_model('FooModel', foo=(1, 2, 3)) def test_config_and_base(): with pytest.raises(errors.ConfigError): create_model('FooModel', __config__=BaseModel.Config, __base__=BaseModel) def test_inheritance(): class BarModel(BaseModel): x = 1 y = 2 model = create_model('FooModel', foo=(str, ...), bar=(int, 123), __base__=BarModel) assert model.__fields__.keys() == {'foo', 'bar', 'x', 'y'} m = model(foo='a', x=4) assert m.dict() == {'bar': 123, 'foo': 'a', 'x': 4, 'y': 2} def test_custom_config(): class Config: fields = {'foo': 'api-foo-field'} model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(**{'api-foo-field': '987'}).foo == 987 assert issubclass(model.__config__, BaseModel.Config) with pytest.raises(ValidationError): model(foo=654) def test_custom_config_inherits(): class Config(BaseModel.Config): fields = {'foo': 'api-foo-field'} model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(**{'api-foo-field': '987'}).foo == 987 assert issubclass(model.__config__, BaseModel.Config) with pytest.raises(ValidationError): model(foo=654) def test_custom_config_extras(): class Config(BaseModel.Config): extra = Extra.forbid model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(foo=654) with pytest.raises(ValidationError): model(bar=654) def test_inheritance_validators(): class BarModel(BaseModel): @validator('a', check_fields=False) def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v model = create_model('FooModel', a='cake', __base__=BarModel) assert model().a == 'cake' assert model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): model(a='something else') def test_inheritance_validators_always(): class BarModel(BaseModel): @validator('a', check_fields=False, always=True) def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v model = create_model('FooModel', a='cake', __base__=BarModel) with pytest.raises(ValidationError): model() assert model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): model(a='something else') def test_inheritance_validators_all(): class BarModel(BaseModel): @validator('*') def check_all(cls, v): return v * 2 model = create_model('FooModel', a=(int, ...), b=(int, ...), __base__=BarModel) assert model(a=2, b=6).dict() == {'a': 4, 'b': 12} def test_funky_name(): model = create_model('FooModel', **{'this-is-funky': (int, ...)}) m = model(**{'this-is-funky': '123'}) assert m.dict() == {'this-is-funky': 123} with pytest.raises(ValidationError) as exc_info: model() assert exc_info.value.errors() == [ {'loc': ('this-is-funky',), 'msg': 'field required', 'type': 'value_error.missing'} ] def test_repeat_base_usage(): class Model(BaseModel): a: str assert Model.__fields__.keys() == {'a'} model = create_model('FooModel', b=1, __base__=Model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} model2 = create_model('Foo2Model', c=1, __base__=Model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} assert model2.__fields__.keys() == {'a', 'c'} model3 = create_model('Foo2Model', d=1, __base__=model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} assert model2.__fields__.keys() == {'a', 'c'} assert model3.__fields__.keys() == {'a', 'b', 'd'} def test_dynamic_and_static(): class A(BaseModel): x: int y: float z: str DynamicA = create_model('A', x=(int, ...), y=(float, ...), z=(str, ...)) for field_name in ('x', 'y', 'z'): assert A.__fields__[field_name].default == DynamicA.__fields__[field_name].default def test_config_field_info_create_model(): class Config: fields = {'a': {'description': 'descr'}} m1 = create_model('M1', __config__=Config, a=(str, ...)) assert m1.schema()['properties'] == {'a': {'title': 'A', 'description': 'descr', 'type': 'string'}} m2 = create_model('M2', __config__=Config, a=(str, Field(...))) assert m2.schema()['properties'] == {'a': {'title': 'A', 'description': 'descr', 'type': 'string'}} def test_generics_model(): T = TypeVar('T') class TestGenericModel(GenericModel): pass AAModel = create_model( 'AAModel', __base__=(TestGenericModel, Generic[T]), __cls_kwargs__={'orm_mode': True}, aa=(int, Field(0)) ) result = AAModel[int](aa=1) assert result.aa == 1 assert result.__config__.orm_mode is True @pytest.mark.parametrize('base', [ModelPrivateAttr, object]) def test_set_name(base): calls = [] class class_deco(base): def __init__(self, fn): super().__init__() self.fn = fn def __set_name__(self, owner, name): calls.append((owner, name)) def __get__(self, obj, type=None): return self.fn(obj) if obj else self class A(BaseModel): x: int @class_deco def _some_func(self): return self.x assert calls == [(A, '_some_func')] a = A(x=2) # we don't test whether calling the method on a PrivateAttr works: # attribute access on privateAttributes is more complicated, it doesn't # get added to the class namespace (and will also get set on the instance # with _init_private_attributes), so the descriptor protocol won't work. if base is object: assert a._some_func == 2 def test_create_model_with_slots(): field_definitions = {'__slots__': (Optional[Tuple[str, ...]], None), 'foobar': (Optional[int], None)} with pytest.warns(RuntimeWarning, match='__slots__ should not be passed to create_model'): model = create_model('PartialPet', **field_definitions) assert model.__fields__.keys() == {'foobar'} pydantic-1.10.14/tests/test_dataclasses.py000066400000000000000000001203261455251250200205400ustar00rootroot00000000000000import copy import dataclasses import pickle import re import sys from collections.abc import Hashable from datetime import datetime try: from functools import cached_property except ImportError: pass from pathlib import Path from typing import Callable, ClassVar, Dict, FrozenSet, List, Optional, Set, Union import pytest from typing_extensions import Literal import pydantic from pydantic import BaseModel, Extra, ValidationError, validator def test_simple(): @pydantic.dataclasses.dataclass class MyDataclass: a: int b: float d = MyDataclass('1', '2.5') assert d.a == 1 assert d.b == 2.5 d = MyDataclass(b=10, a=20) assert d.a == 20 assert d.b == 10 def test_model_name(): @pydantic.dataclasses.dataclass class MyDataClass: model_name: str d = MyDataClass('foo') assert d.model_name == 'foo' d = MyDataClass(model_name='foo') assert d.model_name == 'foo' def test_value_error(): @pydantic.dataclasses.dataclass class MyDataclass: a: int b: int with pytest.raises(ValidationError) as exc_info: MyDataclass(1, 'wrong') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_frozen(): @pydantic.dataclasses.dataclass(frozen=True) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 with pytest.raises(AttributeError): d.a = 7 def test_validate_assignment(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.a = '7' assert d.a == 7 def test_validate_assignment_error(): @pydantic.dataclasses.dataclass(config=dict(validate_assignment=True)) class MyDataclass: a: int d = MyDataclass(1) with pytest.raises(ValidationError) as exc_info: d.a = 'xxx' assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_not_validate_assignment(): @pydantic.dataclasses.dataclass class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.a = '7' assert d.a == '7' def test_validate_assignment_value_change(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config, frozen=False) class MyDataclass: a: int @validator('a') def double_a(cls, v): return v * 2 d = MyDataclass(2) assert d.a == 4 d.a = 3 assert d.a == 6 def test_validate_assignment_extra(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config, frozen=False) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.extra_field = 1.23 assert d.extra_field == 1.23 d.extra_field = 'bye' assert d.extra_field == 'bye' def test_post_init(): post_init_called = False @pydantic.dataclasses.dataclass class MyDataclass: a: int def __post_init__(self): nonlocal post_init_called post_init_called = True d = MyDataclass('1') assert d.a == 1 assert post_init_called def test_post_init_validation(): @dataclasses.dataclass class DC: a: int def __post_init__(self): self.a *= 2 def __post_init_post_parse__(self): self.a += 1 PydanticDC = pydantic.dataclasses.dataclass(DC) assert DC(a='2').a == '22' assert PydanticDC(a='2').a == 23 def test_post_init_inheritance_chain(): parent_post_init_called = False post_init_called = False @pydantic.dataclasses.dataclass class ParentDataclass: a: int def __post_init__(self): nonlocal parent_post_init_called parent_post_init_called = True @pydantic.dataclasses.dataclass class MyDataclass(ParentDataclass): b: int def __post_init__(self): super().__post_init__() nonlocal post_init_called post_init_called = True d = MyDataclass(a=1, b=2) assert d.a == 1 assert d.b == 2 assert parent_post_init_called assert post_init_called def test_post_init_post_parse(): post_init_post_parse_called = False @pydantic.dataclasses.dataclass class MyDataclass: a: int def __post_init_post_parse__(self): nonlocal post_init_post_parse_called post_init_post_parse_called = True d = MyDataclass('1') assert d.a == 1 assert post_init_post_parse_called def test_post_init_post_parse_types(): @pydantic.dataclasses.dataclass class CustomType: b: int @pydantic.dataclasses.dataclass class MyDataclass: a: CustomType def __post_init__(self): assert type(self.a) == dict def __post_init_post_parse__(self): assert type(self.a) == CustomType d = MyDataclass(**{'a': {'b': 1}}) assert d.a.b == 1 def test_post_init_assignment(): from dataclasses import field # Based on: https://docs.python.org/3/library/dataclasses.html#post-init-processing @pydantic.dataclasses.dataclass class C: a: float b: float c: float = field(init=False) def __post_init__(self): self.c = self.a + self.b c = C(0.1, 0.2) assert c.a == 0.1 assert c.b == 0.2 assert c.c == 0.30000000000000004 def test_inheritance(): @pydantic.dataclasses.dataclass class A: a: str = None @pydantic.dataclasses.dataclass class B(A): b: int = None b = B(a='a', b=12) assert b.a == 'a' assert b.b == 12 with pytest.raises(ValidationError): B(a='a', b='b') def test_validate_long_string_error(): class Config: max_anystr_length = 3 @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str with pytest.raises(ValidationError) as exc_info: MyDataclass('xxxx') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value has at most 3 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 3}, } ] def test_validate_assigment_long_string_error(): class Config: max_anystr_length = 3 validate_assignment = True @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str d = MyDataclass('xxx') with pytest.raises(ValidationError) as exc_info: d.a = 'xxxx' assert issubclass(MyDataclass.__pydantic_model__.__config__, BaseModel.Config) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value has at most 3 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 3}, } ] def test_no_validate_assigment_long_string_error(): class Config: max_anystr_length = 3 validate_assignment = False @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str d = MyDataclass('xxx') d.a = 'xxxx' assert d.a == 'xxxx' def test_nested_dataclass(): @pydantic.dataclasses.dataclass class Nested: number: int @pydantic.dataclasses.dataclass class Outer: n: Nested navbar = Outer(n=Nested(number='1')) assert isinstance(navbar.n, Nested) assert navbar.n.number == 1 navbar = Outer(n=('2',)) assert isinstance(navbar.n, Nested) assert navbar.n.number == 2 navbar = Outer(n={'number': '3'}) assert isinstance(navbar.n, Nested) assert navbar.n.number == 3 with pytest.raises(ValidationError) as exc_info: Outer(n='not nested') assert exc_info.value.errors() == [ { 'loc': ('n',), 'msg': 'instance of Nested, tuple or dict expected', 'type': 'type_error.dataclass', 'ctx': {'class_name': 'Nested'}, } ] with pytest.raises(ValidationError) as exc_info: Outer(n=('x',)) assert exc_info.value.errors() == [ {'loc': ('n', 'number'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_arbitrary_types_allowed(): class Button: def __init__(self, href: str): self.href = href class Config: arbitrary_types_allowed = True @pydantic.dataclasses.dataclass(config=Config) class Navbar: button: Button btn = Button(href='a') navbar = Navbar(button=btn) assert navbar.button.href == 'a' with pytest.raises(ValidationError) as exc_info: Navbar(button=('b',)) assert exc_info.value.errors() == [ { 'loc': ('button',), 'msg': 'instance of Button expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'Button'}, } ] def test_nested_dataclass_model(): @pydantic.dataclasses.dataclass class Nested: number: int class Outer(BaseModel): n: Nested navbar = Outer(n=Nested(number='1')) assert navbar.n.number == 1 def test_fields(): @pydantic.dataclasses.dataclass class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id=123) fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True assert fields['id'].default is None assert fields['name'].required is False assert fields['name'].default == 'John Doe' assert fields['signup_ts'].required is False assert fields['signup_ts'].default is None def test_default_factory_field(): @pydantic.dataclasses.dataclass class User: id: int aliases: Dict[str, str] = dataclasses.field(default_factory=lambda: {'John': 'Joey'}) user = User(id=123) fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True assert fields['id'].default is None assert fields['aliases'].required is False assert fields['aliases'].default_factory() == {'John': 'Joey'} def test_default_factory_singleton_field(): class MySingleton: pass class MyConfig: arbitrary_types_allowed = True MY_SINGLETON = MySingleton() @pydantic.dataclasses.dataclass(config=MyConfig) class Foo: singleton: MySingleton = dataclasses.field(default_factory=lambda: MY_SINGLETON) # Returning a singleton from a default_factory is supported assert Foo().singleton is Foo().singleton def test_schema(): @pydantic.dataclasses.dataclass class User: id: int name: str = 'John Doe' aliases: Dict[str, str] = dataclasses.field(default_factory=lambda: {'John': 'Joey'}) signup_ts: datetime = None age: Optional[int] = dataclasses.field( default=None, metadata=dict(title='The age of the user', description='do not lie!') ) height: Optional[int] = pydantic.Field(None, title='The height in cm', ge=50, le=300) user = User(id=123) assert user.__pydantic_model__.schema() == { 'title': 'User', 'type': 'object', 'properties': { 'id': {'title': 'Id', 'type': 'integer'}, 'name': {'title': 'Name', 'default': 'John Doe', 'type': 'string'}, 'aliases': { 'title': 'Aliases', 'type': 'object', 'additionalProperties': {'type': 'string'}, }, 'signup_ts': {'title': 'Signup Ts', 'type': 'string', 'format': 'date-time'}, 'age': { 'title': 'The age of the user', 'description': 'do not lie!', 'type': 'integer', }, 'height': { 'title': 'The height in cm', 'minimum': 50, 'maximum': 300, 'type': 'integer', }, }, 'required': ['id'], } def test_nested_schema(): @pydantic.dataclasses.dataclass class Nested: number: int @pydantic.dataclasses.dataclass class Outer: n: Nested assert Outer.__pydantic_model__.schema() == { 'title': 'Outer', 'type': 'object', 'properties': {'n': {'$ref': '#/definitions/Nested'}}, 'required': ['n'], 'definitions': { 'Nested': { 'title': 'Nested', 'type': 'object', 'properties': {'number': {'title': 'Number', 'type': 'integer'}}, 'required': ['number'], } }, } def test_initvar(): InitVar = dataclasses.InitVar @pydantic.dataclasses.dataclass class TestInitVar: x: int y: InitVar tiv = TestInitVar(1, 2) assert tiv.x == 1 with pytest.raises(AttributeError): tiv.y def test_derived_field_from_initvar(): InitVar = dataclasses.InitVar @pydantic.dataclasses.dataclass class DerivedWithInitVar: plusone: int = dataclasses.field(init=False) number: InitVar[int] def __post_init__(self, number): self.plusone = number + 1 derived = DerivedWithInitVar(1) assert derived.plusone == 2 with pytest.raises(TypeError): DerivedWithInitVar('Not A Number') def test_initvars_post_init(): @pydantic.dataclasses.dataclass class PathDataPostInit: path: Path base_path: dataclasses.InitVar[Optional[Path]] = None def __post_init__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathDataPostInit('world') assert 'path' in path_data.__dict__ assert 'base_path' not in path_data.__dict__ assert path_data.path == Path('world') with pytest.raises(TypeError) as exc_info: PathDataPostInit('world', base_path='/hello') assert str(exc_info.value) == "unsupported operand type(s) for /: 'str' and 'str'" def test_initvars_post_init_post_parse(): @pydantic.dataclasses.dataclass class PathDataPostInitPostParse: path: Path base_path: dataclasses.InitVar[Optional[Path]] = None def __post_init_post_parse__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathDataPostInitPostParse('world') assert 'path' in path_data.__dict__ assert 'base_path' not in path_data.__dict__ assert path_data.path == Path('world') assert PathDataPostInitPostParse('world', base_path='/hello').path == Path('/hello/world') def test_post_init_post_parse_without_initvars(): @pydantic.dataclasses.dataclass class Foo: a: int def __post_init_post_parse__(self): ... Foo(a=1) def test_classvar(): @pydantic.dataclasses.dataclass class TestClassVar: klassvar: ClassVar = "I'm a Class variable" x: int tcv = TestClassVar(2) assert tcv.klassvar == "I'm a Class variable" def test_frozenset_field(): @pydantic.dataclasses.dataclass class TestFrozenSet: set: FrozenSet[int] test_set = frozenset({1, 2, 3}) object_under_test = TestFrozenSet(set=test_set) assert object_under_test.set == test_set def test_inheritance_post_init(): post_init_called = False @pydantic.dataclasses.dataclass class Base: a: int def __post_init__(self): nonlocal post_init_called post_init_called = True @pydantic.dataclasses.dataclass class Child(Base): b: int Child(a=1, b=2) assert post_init_called def test_hashable_required(): @pydantic.dataclasses.dataclass class MyDataclass: v: Hashable MyDataclass(v=None) with pytest.raises(ValidationError) as exc_info: MyDataclass(v=[]) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'value is not a valid hashable', 'type': 'type_error.hashable'} ] with pytest.raises(TypeError) as exc_info: MyDataclass() assert "__init__() missing 1 required positional argument: 'v'" in str(exc_info.value) @pytest.mark.parametrize('default', [1, None, ...]) def test_hashable_optional(default): @pydantic.dataclasses.dataclass class MyDataclass: v: Hashable = default MyDataclass() MyDataclass(v=None) def test_override_builtin_dataclass(): @dataclasses.dataclass class File: hash: str name: Optional[str] size: int content: Optional[bytes] = None ValidFile = pydantic.dataclasses.dataclass(File) file = File(hash='xxx', name=b'whatever.txt', size='456') valid_file = ValidFile(hash='xxx', name=b'whatever.txt', size='456') assert file.name == b'whatever.txt' assert file.size == '456' assert valid_file.name == 'whatever.txt' assert valid_file.size == 456 assert isinstance(valid_file, File) assert isinstance(valid_file, ValidFile) with pytest.raises(ValidationError) as e: ValidFile(hash=[1], name='name', size=3) assert e.value.errors() == [{'loc': ('hash',), 'msg': 'str type expected', 'type': 'type_error.str'}] def test_override_builtin_dataclass_2(): @dataclasses.dataclass class Meta: modified_date: Optional[datetime] seen_count: int Meta(modified_date='not-validated', seen_count=0) @pydantic.dataclasses.dataclass @dataclasses.dataclass class File(Meta): filename: str Meta(modified_date='still-not-validated', seen_count=0) f = File(filename=b'thefilename', modified_date='2020-01-01T00:00', seen_count='7') assert f.filename == 'thefilename' assert f.modified_date == datetime(2020, 1, 1, 0, 0) assert f.seen_count == 7 def test_override_builtin_dataclass_nested(): @dataclasses.dataclass class Meta: modified_date: Optional[datetime] seen_count: int @dataclasses.dataclass class File: filename: str meta: Meta class Foo(BaseModel): file: File FileChecked = pydantic.dataclasses.dataclass(File) f = FileChecked(filename=b'thefilename', meta=Meta(modified_date='2020-01-01T00:00', seen_count='7')) assert f.filename == 'thefilename' assert f.meta.modified_date == datetime(2020, 1, 1, 0, 0) assert f.meta.seen_count == 7 with pytest.raises(ValidationError) as e: FileChecked(filename=b'thefilename', meta=Meta(modified_date='2020-01-01T00:00', seen_count=['7'])) assert e.value.errors() == [ {'loc': ('meta', 'seen_count'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] foo = Foo.parse_obj( { 'file': { 'filename': b'thefilename', 'meta': {'modified_date': '2020-01-01T00:00', 'seen_count': '7'}, }, } ) assert foo.file.filename == 'thefilename' assert foo.file.meta.modified_date == datetime(2020, 1, 1, 0, 0) assert foo.file.meta.seen_count == 7 def test_override_builtin_dataclass_nested_schema(): @dataclasses.dataclass class Meta: modified_date: Optional[datetime] seen_count: int @dataclasses.dataclass class File: filename: str meta: Meta FileChecked = pydantic.dataclasses.dataclass(File) assert FileChecked.__pydantic_model__.schema() == { 'definitions': { 'Meta': { 'properties': { 'modified_date': {'format': 'date-time', 'title': 'Modified ' 'Date', 'type': 'string'}, 'seen_count': {'title': 'Seen Count', 'type': 'integer'}, }, 'required': ['modified_date', 'seen_count'], 'title': 'Meta', 'type': 'object', } }, 'properties': { 'filename': {'title': 'Filename', 'type': 'string'}, 'meta': {'$ref': '#/definitions/Meta'}, }, 'required': ['filename', 'meta'], 'title': 'File', 'type': 'object', } def test_inherit_builtin_dataclass(): @dataclasses.dataclass class Z: z: int @dataclasses.dataclass class Y(Z): y: int @pydantic.dataclasses.dataclass class X(Y): x: int pika = X(x='2', y='4', z='3') assert pika.x == 2 assert pika.y == 4 assert pika.z == 3 def test_dataclass_arbitrary(): class ArbitraryType: def __init__(self): ... @dataclasses.dataclass class Test: foo: ArbitraryType bar: List[ArbitraryType] class TestModel(BaseModel): a: ArbitraryType b: Test class Config: arbitrary_types_allowed = True TestModel(a=ArbitraryType(), b=(ArbitraryType(), [ArbitraryType()])) def test_forward_stdlib_dataclass_params(): @dataclasses.dataclass(frozen=True) class Item: name: str class Example(BaseModel): item: Item other: str class Config: arbitrary_types_allowed = True e = Example(item=Item(name='pika'), other='bulbi') e.other = 'bulbi2' with pytest.raises(dataclasses.FrozenInstanceError): e.item.name = 'pika2' def test_pydantic_callable_field(): """pydantic callable fields behaviour should be the same as stdlib dataclass""" def foo(arg1, arg2): return arg1, arg2 def bar(x: int, y: float, z: str) -> bool: return str(x + y) == z class PydanticModel(BaseModel): required_callable: Callable required_callable_2: Callable[[int, float, str], bool] default_callable: Callable = foo default_callable_2: Callable[[int, float, str], bool] = bar @pydantic.dataclasses.dataclass class PydanticDataclass: required_callable: Callable required_callable_2: Callable[[int, float, str], bool] default_callable: Callable = foo default_callable_2: Callable[[int, float, str], bool] = bar @dataclasses.dataclass class StdlibDataclass: required_callable: Callable required_callable_2: Callable[[int, float, str], bool] default_callable: Callable = foo default_callable_2: Callable[[int, float, str], bool] = bar pyd_m = PydanticModel(required_callable=foo, required_callable_2=bar) pyd_dc = PydanticDataclass(required_callable=foo, required_callable_2=bar) std_dc = StdlibDataclass(required_callable=foo, required_callable_2=bar) assert ( pyd_m.required_callable is pyd_m.default_callable is pyd_dc.required_callable is pyd_dc.default_callable is std_dc.required_callable is std_dc.default_callable ) assert ( pyd_m.required_callable_2 is pyd_m.default_callable_2 is pyd_dc.required_callable_2 is pyd_dc.default_callable_2 is std_dc.required_callable_2 is std_dc.default_callable_2 ) def test_pickle_overriden_builtin_dataclass(create_module): module = create_module( # language=Python """\ import dataclasses import pydantic @dataclasses.dataclass class BuiltInDataclassForPickle: value: int class ModelForPickle(pydantic.BaseModel): # pickle can only work with top level classes as it imports them dataclass: BuiltInDataclassForPickle class Config: validate_assignment = True """ ) obj = module.ModelForPickle(dataclass=module.BuiltInDataclassForPickle(value=5)) pickled_obj = pickle.dumps(obj) restored_obj = pickle.loads(pickled_obj) assert restored_obj.dataclass.value == 5 assert restored_obj == obj # ensure the restored dataclass is still a pydantic dataclass with pytest.raises(ValidationError, match='value\n +value is not a valid integer'): restored_obj.dataclass.value = 'value of a wrong type' def test_config_field_info_create_model(): # works class A1(BaseModel): a: str class Config: fields = {'a': {'description': 'descr'}} assert A1.schema()['properties'] == {'a': {'title': 'A', 'description': 'descr', 'type': 'string'}} @pydantic.dataclasses.dataclass(config=A1.Config) class A2: a: str assert A2.__pydantic_model__.schema()['properties'] == { 'a': {'title': 'A', 'description': 'descr', 'type': 'string'} } def gen_2162_dataclasses(): @dataclasses.dataclass(frozen=True) class StdLibFoo: a: str b: int @pydantic.dataclasses.dataclass(frozen=True) class PydanticFoo: a: str b: int @dataclasses.dataclass(frozen=True) class StdLibBar: c: StdLibFoo @pydantic.dataclasses.dataclass(frozen=True) class PydanticBar: c: PydanticFoo @dataclasses.dataclass(frozen=True) class StdLibBaz: c: PydanticFoo @pydantic.dataclasses.dataclass(frozen=True) class PydanticBaz: c: StdLibFoo foo = StdLibFoo(a='Foo', b=1) yield foo, StdLibBar(c=foo) foo = PydanticFoo(a='Foo', b=1) yield foo, PydanticBar(c=foo) foo = PydanticFoo(a='Foo', b=1) yield foo, StdLibBaz(c=foo) foo = StdLibFoo(a='Foo', b=1) yield foo, PydanticBaz(c=foo) @pytest.mark.parametrize('foo,bar', gen_2162_dataclasses()) def test_issue_2162(foo, bar): assert dataclasses.asdict(foo) == dataclasses.asdict(bar.c) assert dataclasses.astuple(foo) == dataclasses.astuple(bar.c) assert foo == bar.c def test_issue_2383(): @dataclasses.dataclass class A: s: str def __hash__(self): return 123 class B(pydantic.BaseModel): a: A a = A('') b = B(a=a) assert hash(a) == 123 assert hash(b.a) == 123 def test_issue_2398(): @dataclasses.dataclass(order=True) class DC: num: int = 42 class Model(pydantic.BaseModel): dc: DC real_dc = DC() model = Model(dc=real_dc) # This works as expected. assert real_dc <= real_dc assert model.dc <= model.dc assert real_dc <= model.dc def test_issue_2424(): @dataclasses.dataclass class Base: x: str @dataclasses.dataclass class Thing(Base): y: str = dataclasses.field(default_factory=str) assert Thing(x='hi').y == '' @pydantic.dataclasses.dataclass class ValidatedThing(Base): y: str = dataclasses.field(default_factory=str) assert Thing(x='hi').y == '' assert ValidatedThing(x='hi').y == '' def test_issue_2541(): @dataclasses.dataclass(frozen=True) class Infos: id: int @dataclasses.dataclass(frozen=True) class Item: name: str infos: Infos class Example(BaseModel): item: Item e = Example.parse_obj({'item': {'name': 123, 'infos': {'id': '1'}}}) assert e.item.name == '123' assert e.item.infos.id == 1 with pytest.raises(dataclasses.FrozenInstanceError): e.item.infos.id = 2 def test_issue_2555(): @dataclasses.dataclass class Span: first: int last: int @dataclasses.dataclass class LabeledSpan(Span): label: str @dataclasses.dataclass class BinaryRelation: subject: LabeledSpan object: LabeledSpan label: str @dataclasses.dataclass class Sentence: relations: BinaryRelation class M(pydantic.BaseModel): s: Sentence assert M.schema() def test_issue_2594(): @dataclasses.dataclass class Empty: pass @pydantic.dataclasses.dataclass class M: e: Empty assert isinstance(M(e={}).e, Empty) def test_schema_description_unset(): @pydantic.dataclasses.dataclass class A: x: int assert 'description' not in A.__pydantic_model__.schema() @pydantic.dataclasses.dataclass @dataclasses.dataclass class B: x: int assert 'description' not in B.__pydantic_model__.schema() def test_schema_description_set(): @pydantic.dataclasses.dataclass class A: """my description""" x: int assert A.__pydantic_model__.schema()['description'] == 'my description' @pydantic.dataclasses.dataclass @dataclasses.dataclass class B: """my description""" x: int assert A.__pydantic_model__.schema()['description'] == 'my description' def test_issue_3011(): @dataclasses.dataclass class A: thing_a: str class B(A): thing_b: str class Config: arbitrary_types_allowed = True @pydantic.dataclasses.dataclass(config=Config) class C: thing: A b = B('Thing A') c = C(thing=b) assert c.thing.thing_a == 'Thing A' def test_issue_3162(): @dataclasses.dataclass class User: id: int name: str class Users(BaseModel): user: User other_user: User assert Users.schema() == { 'title': 'Users', 'type': 'object', 'properties': {'user': {'$ref': '#/definitions/User'}, 'other_user': {'$ref': '#/definitions/User'}}, 'required': ['user', 'other_user'], 'definitions': { 'User': { 'title': 'User', 'type': 'object', 'properties': {'id': {'title': 'Id', 'type': 'integer'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['id', 'name'], } }, } def test_discriminated_union_basemodel_instance_value(): @pydantic.dataclasses.dataclass class A: l: Literal['a'] @pydantic.dataclasses.dataclass class B: l: Literal['b'] @pydantic.dataclasses.dataclass class Top: sub: Union[A, B] = dataclasses.field(metadata=dict(discriminator='l')) t = Top(sub=A(l='a')) assert isinstance(t, Top) assert Top.__pydantic_model__.schema() == { 'title': 'Top', 'type': 'object', 'properties': { 'sub': { 'title': 'Sub', 'discriminator': {'propertyName': 'l', 'mapping': {'a': '#/definitions/A', 'b': '#/definitions/B'}}, 'oneOf': [{'$ref': '#/definitions/A'}, {'$ref': '#/definitions/B'}], } }, 'required': ['sub'], 'definitions': { 'A': { 'title': 'A', 'type': 'object', 'properties': {'l': {'title': 'L', 'enum': ['a'], 'type': 'string'}}, 'required': ['l'], }, 'B': { 'title': 'B', 'type': 'object', 'properties': {'l': {'title': 'L', 'enum': ['b'], 'type': 'string'}}, 'required': ['l'], }, }, } def test_post_init_after_validation(): @dataclasses.dataclass class SetWrapper: set: Set[int] def __post_init__(self): assert isinstance( self.set, set ), f"self.set should be a set but it's {self.set!r} of type {type(self.set).__name__}" class Model(pydantic.BaseModel, post_init_call='after_validation'): set_wrapper: SetWrapper model = Model(set_wrapper=SetWrapper({1, 2, 3})) json_text = model.json() assert Model.parse_raw(json_text) == model def test_keeps_custom_properties(): class StandardClass: """Class which modifies instance creation.""" a: str def __new__(cls, *args, **kwargs): instance = super().__new__(cls) instance._special_property = 1 return instance StandardLibDataclass = dataclasses.dataclass(StandardClass) PydanticDataclass = pydantic.dataclasses.dataclass(StandardClass) clases_to_test = [StandardLibDataclass, PydanticDataclass] test_string = 'string' for cls in clases_to_test: instance = cls(a=test_string) assert instance._special_property == 1 assert instance.a == test_string def test_ignore_extra(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.ignore)) class Foo: x: int foo = Foo(**{'x': '1', 'y': '2'}) assert foo.__dict__ == {'x': 1, '__pydantic_initialised__': True} def test_ignore_extra_subclass(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.ignore)) class Foo: x: int @pydantic.dataclasses.dataclass(config=dict(extra=Extra.ignore)) class Bar(Foo): y: int bar = Bar(**{'x': '1', 'y': '2', 'z': '3'}) assert bar.__dict__ == {'x': 1, 'y': 2, '__pydantic_initialised__': True} def test_allow_extra(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.allow)) class Foo: x: int foo = Foo(**{'x': '1', 'y': '2'}) assert foo.__dict__ == {'x': 1, 'y': '2', '__pydantic_initialised__': True} def test_allow_extra_subclass(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.allow)) class Foo: x: int @pydantic.dataclasses.dataclass(config=dict(extra=Extra.allow)) class Bar(Foo): y: int bar = Bar(**{'x': '1', 'y': '2', 'z': '3'}) assert bar.__dict__ == {'x': 1, 'y': 2, 'z': '3', '__pydantic_initialised__': True} def test_forbid_extra(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.forbid)) class Foo: x: int with pytest.raises(TypeError, match=re.escape("__init__() got an unexpected keyword argument 'y'")): Foo(**{'x': '1', 'y': '2'}) def test_post_init_allow_extra(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.allow)) class Foobar: a: int b: str def __post_init__(self): self.a *= 2 assert Foobar(a=1, b='a', c=4).__dict__ == {'a': 2, 'b': 'a', 'c': 4, '__pydantic_initialised__': True} def test_self_reference_dataclass(): @pydantic.dataclasses.dataclass class MyDataclass: self_reference: 'MyDataclass' assert MyDataclass.__pydantic_model__.__fields__['self_reference'].type_ is MyDataclass @pytest.mark.skipif(sys.version_info < (3, 10), reason='kw_only is not available in python < 3.10') def test_kw_only(): @pydantic.dataclasses.dataclass(kw_only=True) class A: a: int | None = None b: str with pytest.raises(TypeError, match='takes 1 positional argument but 3 were given'): A(1, '') assert A(b='hi').b == 'hi' def test_extra_forbid_list_no_error(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.forbid)) class Bar: ... @pydantic.dataclasses.dataclass class Foo: a: List[Bar] assert isinstance(Foo(a=[Bar()]).a[0], Bar) def test_extra_forbid_list_error(): @pydantic.dataclasses.dataclass(config=dict(extra=Extra.forbid)) class Bar: ... with pytest.raises(TypeError, match=re.escape("__init__() got an unexpected keyword argument 'a'")): @pydantic.dataclasses.dataclass class Foo: a: List[Bar(a=1)] def test_parent_post_init(): @dataclasses.dataclass class A: a: float = 1 def __post_init__(self): self.a *= 2 @pydantic.dataclasses.dataclass class B(A): @validator('a') def validate_a(cls, value): value += 3 return value assert B().a == 5 # 1 * 2 + 3 def test_subclass_post_init_post_parse(): @dataclasses.dataclass class A: a: float = 1 @pydantic.dataclasses.dataclass class B(A): def __post_init_post_parse__(self): self.a *= 2 @validator('a') def validate_a(cls, value): value += 3 return value assert B().a == 8 # (1 + 3) * 2 def test_subclass_post_init(): @dataclasses.dataclass class A: a: int = 1 @pydantic.dataclasses.dataclass class B(A): def __post_init__(self): self.a *= 2 @validator('a') def validate_a(cls, value): value += 3 return value assert B().a == 5 # 1 * 2 + 3 def test_subclass_post_init_inheritance(): @dataclasses.dataclass class A: a: int = 1 @pydantic.dataclasses.dataclass class B(A): def __post_init__(self): self.a *= 2 @validator('a') def validate_a(cls, value): value += 3 return value @pydantic.dataclasses.dataclass class C(B): def __post_init__(self): self.a *= 3 assert C().a == 6 # 1 * 3 + 3 def test_inheritance_post_init_2(): post_init_calls = 0 post_init_post_parse_calls = 0 @pydantic.dataclasses.dataclass class BaseClass: def __post_init__(self): nonlocal post_init_calls post_init_calls += 1 @pydantic.dataclasses.dataclass class AbstractClass(BaseClass): pass @pydantic.dataclasses.dataclass class ConcreteClass(AbstractClass): def __post_init_post_parse__(self): nonlocal post_init_post_parse_calls post_init_post_parse_calls += 1 ConcreteClass() assert post_init_calls == 1 assert post_init_post_parse_calls == 1 def test_dataclass_setattr(): class Foo: bar: str = 'cat' default_config = dataclasses.make_dataclass( cls_name=Foo.__name__, bases=(dataclasses.dataclass(Foo),), fields=[('bar', ClassVar[str], dataclasses.field(default=Foo.bar))], ) config = pydantic.dataclasses.dataclass(default_config) assert config.bar == 'cat' setattr(config, 'bar', 'dog') assert config.bar == 'dog' def test_frozen_dataclasses(): @dataclasses.dataclass(frozen=True) class First: a: int @dataclasses.dataclass(frozen=True) class Second(First): @property def b(self): return self.a class My(BaseModel): my: Second assert My(my=Second(a='1')).my.b == 1 def test_empty_dataclass(): """should be able to inherit without adding a field""" @dataclasses.dataclass class UnvalidatedDataclass: a: int = 0 @pydantic.dataclasses.dataclass class ValidatedDerivedA(UnvalidatedDataclass): ... @pydantic.dataclasses.dataclass() class ValidatedDerivedB(UnvalidatedDataclass): b: int = 0 @pydantic.dataclasses.dataclass() class ValidatedDerivedC(UnvalidatedDataclass): ... def test_proxy_dataclass(): @dataclasses.dataclass class Foo: a: Optional[int] = dataclasses.field(default=42) b: List = dataclasses.field(default_factory=list) @dataclasses.dataclass class Bar: pass @dataclasses.dataclass class Model1: foo: Foo class Model2(BaseModel): foo: Foo m1 = Model1(foo=Foo()) m2 = Model2(foo=Foo()) assert m1.foo.a == m2.foo.a == 42 assert m1.foo.b == m2.foo.b == [] assert m1.foo.Bar() is not None assert m2.foo.Bar() is not None def test_proxy_dataclass_2(): @dataclasses.dataclass class M1: a: int b: str = 'b' c: float = dataclasses.field(init=False) def __post_init__(self): self.c = float(self.a) @dataclasses.dataclass class M2: a: int b: str = 'b' c: float = dataclasses.field(init=False) def __post_init__(self): self.c = float(self.a) @pydantic.validator('b') def check_b(cls, v): if not v: raise ValueError('b should not be empty') return v m1 = pydantic.parse_obj_as(M1, {'a': 3}) m2 = pydantic.parse_obj_as(M2, {'a': 3}) assert m1.a == m2.a == 3 assert m1.b == m2.b == 'b' assert m1.c == m2.c == 3.0 def test_can_copy_wrapped_dataclass_type(): @pydantic.dataclasses.dataclass @dataclasses.dataclass class A: name: int B = copy.copy(A) assert B is not A assert B(1) == A(1) def test_can_deepcopy_wrapped_dataclass_type(): @pydantic.dataclasses.dataclass @dataclasses.dataclass class A: name: int B = copy.deepcopy(A) assert B is not A assert B(1) == A(1) @pytest.mark.skipif(sys.version_info < (3, 8), reason='cached_property was introduced in python3.8 stdlib') def test_cached_property(): @dataclasses.dataclass(frozen=True) class A: name: str @cached_property def _name(self): return 'name' class MyModel(BaseModel, arbitrary_types_allowed=True, frozen=True, extra=Extra.forbid): scheduler: A models = { 'AX': MyModel(scheduler=A('a')), } sched = A('sched') models_2 = { 'AY': models['AX'].copy(update=dict(scheduler=sched)), } models = {**models, **models_2} models['AY'].scheduler._name MyModel.parse_obj(models['AY'].dict()) pydantic-1.10.14/tests/test_datetime_parse.py000066400000000000000000000322241455251250200212360ustar00rootroot00000000000000""" Stolen from https://github.com/django/django/blob/main/tests/utils_tests/test_dateparse.py at 9718fa2e8abe430c3526a9278dd976443d4ae3c6 Changed to: * use standard pytest layout * parametrize tests """ import re from datetime import date, datetime, time, timedelta, timezone import pytest from pydantic import BaseModel, ValidationError, condate, errors from pydantic.datetime_parse import parse_date, parse_datetime, parse_duration, parse_time def create_tz(minutes): return timezone(timedelta(minutes=minutes)) @pytest.mark.parametrize( 'value,result', [ # Valid inputs ('1494012444.883309', date(2017, 5, 5)), (b'1494012444.883309', date(2017, 5, 5)), (1_494_012_444.883_309, date(2017, 5, 5)), ('1494012444', date(2017, 5, 5)), (1_494_012_444, date(2017, 5, 5)), (0, date(1970, 1, 1)), ('2012-04-23', date(2012, 4, 23)), (b'2012-04-23', date(2012, 4, 23)), ('2012-4-9', date(2012, 4, 9)), (date(2012, 4, 9), date(2012, 4, 9)), (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), # Invalid inputs ('x20120423', errors.DateError), ('2012-04-56', errors.DateError), (19_999_999_999, date(2603, 10, 11)), # just before watershed (20_000_000_001, date(1970, 8, 20)), # just after watershed (1_549_316_052, date(2019, 2, 4)), # nowish in s (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns ('infinity', date(9999, 12, 31)), ('inf', date(9999, 12, 31)), (float('inf'), date(9999, 12, 31)), ('infinity ', date(9999, 12, 31)), (int('1' + '0' * 100), date(9999, 12, 31)), (1e1000, date(9999, 12, 31)), ('-infinity', date(1, 1, 1)), ('-inf', date(1, 1, 1)), ('nan', ValueError), ], ) def test_date_parsing(value, result): if type(result) == type and issubclass(result, Exception): with pytest.raises(result): parse_date(value) else: assert parse_date(value) == result @pytest.mark.parametrize( 'value,result', [ # Valid inputs ('09:15:00', time(9, 15)), ('10:10', time(10, 10)), ('10:20:30.400', time(10, 20, 30, 400_000)), (b'10:20:30.400', time(10, 20, 30, 400_000)), ('4:8:16', time(4, 8, 16)), (time(4, 8, 16), time(4, 8, 16)), (3610, time(1, 0, 10)), (3600.5, time(1, 0, 0, 500000)), (86400 - 1, time(23, 59, 59)), ('11:05:00-05:30', time(11, 5, 0, tzinfo=create_tz(-330))), ('11:05:00-0530', time(11, 5, 0, tzinfo=create_tz(-330))), ('11:05:00Z', time(11, 5, 0, tzinfo=timezone.utc)), ('11:05:00+00', time(11, 5, 0, tzinfo=timezone.utc)), ('11:05-06', time(11, 5, 0, tzinfo=create_tz(-360))), ('11:05+06', time(11, 5, 0, tzinfo=create_tz(360))), # Invalid inputs (86400, errors.TimeError), ('xxx', errors.TimeError), ('091500', errors.TimeError), (b'091500', errors.TimeError), ('09:15:90', errors.TimeError), ('11:05:00Y', errors.TimeError), ('11:05:00-25:00', errors.TimeError), ], ) def test_time_parsing(value, result): if result == errors.TimeError: with pytest.raises(errors.TimeError): parse_time(value) else: assert parse_time(value) == result @pytest.mark.parametrize( 'value,result', [ # Valid inputs # values in seconds ('1494012444.883309', datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), ('1494012444', datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), (b'1494012444', datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), # values in ms ('1494012444000.883309', datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), ('-1494012444000.883309', datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), ('2012-04-23T09:15:00', datetime(2012, 4, 23, 9, 15)), ('2012-4-9 4:8:16', datetime(2012, 4, 9, 4, 8, 16)), ('2012-04-23T09:15:00Z', datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), ('2012-4-9 4:8:16-0320', datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), ('2012-04-23T10:20:30.400+02:30', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), ('2012-04-23T10:20:30.400+02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), ('2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), (b'2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), (datetime(2017, 5, 5), datetime(2017, 5, 5)), (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), # Invalid inputs ('x20120423091500', errors.DateTimeError), ('2012-04-56T09:15:90', errors.DateTimeError), ('2012-04-23T11:05:00-25:00', errors.DateTimeError), (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns ('infinity', datetime(9999, 12, 31, 23, 59, 59, 999999)), ('inf', datetime(9999, 12, 31, 23, 59, 59, 999999)), ('inf ', datetime(9999, 12, 31, 23, 59, 59, 999999)), (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), (float('inf'), datetime(9999, 12, 31, 23, 59, 59, 999999)), ('-infinity', datetime(1, 1, 1, 0, 0)), ('-inf', datetime(1, 1, 1, 0, 0)), ('nan', ValueError), ], ) def test_datetime_parsing(value, result): if type(result) == type and issubclass(result, Exception): with pytest.raises(result): parse_datetime(value) else: assert parse_datetime(value) == result @pytest.mark.parametrize( 'delta', [ timedelta(days=4, minutes=15, seconds=30, milliseconds=100), # fractions of seconds timedelta(hours=10, minutes=15, seconds=30), # hours, minutes, seconds timedelta(days=4, minutes=15, seconds=30), # multiple days timedelta(days=1, minutes=00, seconds=00), # single day timedelta(days=-4, minutes=15, seconds=30), # negative durations timedelta(minutes=15, seconds=30), # minute & seconds timedelta(seconds=30), # seconds ], ) def test_parse_python_format(delta): assert parse_duration(delta) == delta assert parse_duration(str(delta)) == delta @pytest.mark.parametrize( 'value,result', [ # seconds (timedelta(seconds=30), timedelta(seconds=30)), ('30', timedelta(seconds=30)), (30, timedelta(seconds=30)), (30.1, timedelta(seconds=30, milliseconds=100)), (9.9e-05, timedelta(microseconds=99)), # minutes seconds ('15:30', timedelta(minutes=15, seconds=30)), ('5:30', timedelta(minutes=5, seconds=30)), # hours minutes seconds ('10:15:30', timedelta(hours=10, minutes=15, seconds=30)), ('1:15:30', timedelta(hours=1, minutes=15, seconds=30)), ('100:200:300', timedelta(hours=100, minutes=200, seconds=300)), # days ('4 15:30', timedelta(days=4, minutes=15, seconds=30)), ('4 10:15:30', timedelta(days=4, hours=10, minutes=15, seconds=30)), # fractions of seconds ('15:30.1', timedelta(minutes=15, seconds=30, milliseconds=100)), ('15:30.01', timedelta(minutes=15, seconds=30, milliseconds=10)), ('15:30.001', timedelta(minutes=15, seconds=30, milliseconds=1)), ('15:30.0001', timedelta(minutes=15, seconds=30, microseconds=100)), ('15:30.00001', timedelta(minutes=15, seconds=30, microseconds=10)), ('15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)), (b'15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)), # negative ('-4 15:30', timedelta(days=-4, minutes=15, seconds=30)), ('-172800', timedelta(days=-2)), ('-15:30', timedelta(minutes=-15, seconds=30)), ('-1:15:30', timedelta(hours=-1, minutes=15, seconds=30)), ('-30.1', timedelta(seconds=-30, milliseconds=-100)), # iso_8601 ('P4Y', errors.DurationError), ('P4M', errors.DurationError), ('P4W', errors.DurationError), ('P4D', timedelta(days=4)), ('P0.5D', timedelta(hours=12)), ('PT5H', timedelta(hours=5)), ('PT5M', timedelta(minutes=5)), ('PT5S', timedelta(seconds=5)), ('PT0.000005S', timedelta(microseconds=5)), (b'PT0.000005S', timedelta(microseconds=5)), ], ) def test_parse_durations(value, result): if result == errors.DurationError: with pytest.raises(errors.DurationError): parse_duration(value) else: assert parse_duration(value) == result @pytest.mark.parametrize( 'field, value, error_message', [ ('dt', [], 'invalid type; expected datetime, string, bytes, int or float'), ('dt', {}, 'invalid type; expected datetime, string, bytes, int or float'), ('dt', object, 'invalid type; expected datetime, string, bytes, int or float'), ('d', [], 'invalid type; expected date, string, bytes, int or float'), ('d', {}, 'invalid type; expected date, string, bytes, int or float'), ('d', object, 'invalid type; expected date, string, bytes, int or float'), ('t', [], 'invalid type; expected time, string, bytes, int or float'), ('t', {}, 'invalid type; expected time, string, bytes, int or float'), ('t', object, 'invalid type; expected time, string, bytes, int or float'), ('td', [], 'invalid type; expected timedelta, string, bytes, int or float'), ('td', {}, 'invalid type; expected timedelta, string, bytes, int or float'), ('td', object, 'invalid type; expected timedelta, string, bytes, int or float'), ], ) def test_model_type_errors(field, value, error_message): class Model(BaseModel): dt: datetime = None d: date = None t: time = None td: timedelta = None with pytest.raises(ValidationError) as exc_info: Model(**{field: value}) assert len(exc_info.value.errors()) == 1 error = exc_info.value.errors()[0] assert error == {'loc': (field,), 'type': 'type_error', 'msg': error_message} @pytest.mark.parametrize('field', ['dt', 'd', 't', 'dt']) def test_unicode_decode_error(field): class Model(BaseModel): dt: datetime = None d: date = None t: time = None td: timedelta = None with pytest.raises(ValidationError) as exc_info: Model(**{field: b'\x81'}) assert len(exc_info.value.errors()) == 1 error = exc_info.value.errors()[0] assert error == { 'loc': (field,), 'type': 'value_error.unicodedecode', 'msg': "'utf-8' codec can't decode byte 0x81 in position 0: invalid start byte", } def test_nan(): class Model(BaseModel): dt: datetime d: date with pytest.raises(ValidationError) as exc_info: Model(dt='nan', d='nan') assert exc_info.value.errors() == [ { 'loc': ('dt',), 'msg': 'cannot convert float NaN to integer', 'type': 'value_error', }, { 'loc': ('d',), 'msg': 'cannot convert float NaN to integer', 'type': 'value_error', }, ] @pytest.mark.parametrize( 'constraint,msg,ok_value,error_value', [ ('gt', 'greater than', date(2020, 1, 2), date(2019, 12, 31)), ('gt', 'greater than', date(2020, 1, 2), date(2020, 1, 1)), ('ge', 'greater than or equal to', date(2020, 1, 2), date(2019, 12, 31)), ('ge', 'greater than or equal to', date(2020, 1, 1), date(2019, 12, 31)), ('lt', 'less than', date(2019, 12, 31), date(2020, 1, 2)), ('lt', 'less than', date(2019, 12, 31), date(2020, 1, 1)), ('le', 'less than or equal to', date(2019, 12, 31), date(2020, 1, 2)), ('le', 'less than or equal to', date(2020, 1, 1), date(2020, 1, 2)), ], ) def test_date_constraints(constraint, msg, ok_value, error_value): class Model(BaseModel): a: condate(**{constraint: date(2020, 1, 1)}) assert Model(a=ok_value).dict() == {'a': ok_value} match = re.escape( f'ensure this value is {msg} 2020-01-01 ' f'(type=value_error.number.not_{constraint}; limit_value=2020-01-01)' ) with pytest.raises(ValidationError, match=match): Model(a=error_value) pydantic-1.10.14/tests/test_decorator.py000066400000000000000000000347251455251250200202420ustar00rootroot00000000000000import asyncio import inspect import sys from datetime import datetime, timezone from pathlib import Path from typing import List import pytest from typing_extensions import Annotated, TypedDict from pydantic import BaseModel, Extra, Field, ValidationError, validate_arguments from pydantic.decorator import ValidatedFunction from pydantic.errors import ConfigError skip_pre_38 = pytest.mark.skipif(sys.version_info < (3, 8), reason='testing >= 3.8 behaviour only') def test_args(): @validate_arguments def foo(a: int, b: int): return f'{a}, {b}' assert foo(1, 2) == '1, 2' assert foo(*[1, 2]) == '1, 2' assert foo(*(1, 2)) == '1, 2' assert foo(*[1], 2) == '1, 2' with pytest.raises(ValidationError) as exc_info: foo() assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: foo(1, 'x') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, 3) assert exc_info.value.errors() == [ {'loc': ('args',), 'msg': '2 positional arguments expected but 3 given', 'type': 'type_error'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, apple=3) assert exc_info.value.errors() == [ {'loc': ('kwargs',), 'msg': "unexpected keyword argument: 'apple'", 'type': 'type_error'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, a=3) assert exc_info.value.errors() == [ {'loc': ('v__duplicate_kwargs',), 'msg': "multiple values for argument: 'a'", 'type': 'type_error'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, a=3, b=4) assert exc_info.value.errors() == [ {'loc': ('v__duplicate_kwargs',), 'msg': "multiple values for arguments: 'a', 'b'", 'type': 'type_error'} ] def test_wrap(): @validate_arguments def foo_bar(a: int, b: int): """This is the foo_bar method.""" return f'{a}, {b}' assert foo_bar.__doc__ == 'This is the foo_bar method.' assert foo_bar.__name__ == 'foo_bar' assert foo_bar.__module__ == 'tests.test_decorator' assert foo_bar.__qualname__ == 'test_wrap..foo_bar' assert isinstance(foo_bar.vd, ValidatedFunction) assert callable(foo_bar.raw_function) assert foo_bar.vd.arg_mapping == {0: 'a', 1: 'b'} assert foo_bar.vd.positional_only_args == set() assert issubclass(foo_bar.model, BaseModel) assert foo_bar.model.__fields__.keys() == {'a', 'b', 'args', 'kwargs', 'v__duplicate_kwargs'} assert foo_bar.model.__name__ == 'FooBar' assert foo_bar.model.schema()['title'] == 'FooBar' assert repr(inspect.signature(foo_bar)) == '' def test_kwargs(): @validate_arguments def foo(*, a: int, b: int): return a + b assert foo.model.__fields__.keys() == {'a', 'b', 'args', 'kwargs'} assert foo(a=1, b=3) == 4 with pytest.raises(ValidationError) as exc_info: foo(a=1, b='x') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 'x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('args',), 'msg': '0 positional arguments expected but 2 given', 'type': 'type_error'}, ] def test_untyped(): @validate_arguments def foo(a, b, c='x', *, d='y'): return ', '.join(str(arg) for arg in [a, b, c, d]) assert foo(1, 2) == '1, 2, x, y' assert foo(1, {'x': 2}, c='3', d='4') == "1, {'x': 2}, 3, 4" @pytest.mark.parametrize('validated', (True, False)) def test_var_args_kwargs(validated): def foo(a, b, *args, d=3, **kwargs): return f'a={a!r}, b={b!r}, args={args!r}, d={d!r}, kwargs={kwargs!r}' if validated: foo = validate_arguments(foo) assert foo(1, 2) == 'a=1, b=2, args=(), d=3, kwargs={}' assert foo(1, 2, 3, d=4) == 'a=1, b=2, args=(3,), d=4, kwargs={}' assert foo(*[1, 2, 3], d=4) == 'a=1, b=2, args=(3,), d=4, kwargs={}' assert foo(1, 2, args=(10, 11)) == "a=1, b=2, args=(), d=3, kwargs={'args': (10, 11)}" assert foo(1, 2, 3, args=(10, 11)) == "a=1, b=2, args=(3,), d=3, kwargs={'args': (10, 11)}" assert foo(1, 2, 3, e=10) == "a=1, b=2, args=(3,), d=3, kwargs={'e': 10}" assert foo(1, 2, kwargs=4) == "a=1, b=2, args=(), d=3, kwargs={'kwargs': 4}" assert foo(1, 2, kwargs=4, e=5) == "a=1, b=2, args=(), d=3, kwargs={'kwargs': 4, 'e': 5}" def test_field_can_provide_factory() -> None: @validate_arguments def foo(a: int, b: int = Field(default_factory=lambda: 99), *args: int) -> int: """mypy is happy with this""" return a + b + sum(args) assert foo(3) == 102 assert foo(1, 2, 3) == 6 def test_annotated_field_can_provide_factory() -> None: @validate_arguments def foo2(a: int, b: Annotated[int, Field(default_factory=lambda: 99)], *args: int) -> int: """mypy reports Incompatible default for argument "b" if we don't supply ANY as default""" return a + b + sum(args) assert foo2(1) == 100 @skip_pre_38 def test_positional_only(create_module): module = create_module( # language=Python """ from pydantic import validate_arguments @validate_arguments def foo(a, b, /, c=None): return f'{a}, {b}, {c}' """ ) assert module.foo(1, 2) == '1, 2, None' assert module.foo(1, 2, 44) == '1, 2, 44' assert module.foo(1, 2, c=44) == '1, 2, 44' with pytest.raises(ValidationError) as exc_info: module.foo(1, b=2) assert exc_info.value.errors() == [ { 'loc': ('v__positional_only',), 'msg': "positional-only argument passed as keyword argument: 'b'", 'type': 'type_error', } ] with pytest.raises(ValidationError) as exc_info: module.foo(a=1, b=2) assert exc_info.value.errors() == [ { 'loc': ('v__positional_only',), 'msg': "positional-only arguments passed as keyword arguments: 'a', 'b'", 'type': 'type_error', } ] def test_args_name(): @validate_arguments def foo(args: int, kwargs: int): return f'args={args!r}, kwargs={kwargs!r}' assert foo.model.__fields__.keys() == {'args', 'kwargs', 'v__args', 'v__kwargs', 'v__duplicate_kwargs'} assert foo(1, 2) == 'args=1, kwargs=2' with pytest.raises(ValidationError) as exc_info: foo(1, 2, apple=4) assert exc_info.value.errors() == [ {'loc': ('v__kwargs',), 'msg': "unexpected keyword argument: 'apple'", 'type': 'type_error'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, apple=4, banana=5) assert exc_info.value.errors() == [ {'loc': ('v__kwargs',), 'msg': "unexpected keyword arguments: 'apple', 'banana'", 'type': 'type_error'} ] with pytest.raises(ValidationError) as exc_info: foo(1, 2, 3) assert exc_info.value.errors() == [ {'loc': ('v__args',), 'msg': '2 positional arguments expected but 3 given', 'type': 'type_error'} ] def test_v_args(): with pytest.raises( ConfigError, match='"v__args", "v__kwargs", "v__positional_only" and "v__duplicate_kwargs" are not permitted' ): @validate_arguments def foo1(v__args: int): pass with pytest.raises( ConfigError, match='"v__args", "v__kwargs", "v__positional_only" and "v__duplicate_kwargs" are not permitted' ): @validate_arguments def foo2(v__kwargs: int): pass with pytest.raises( ConfigError, match='"v__args", "v__kwargs", "v__positional_only" and "v__duplicate_kwargs" are not permitted' ): @validate_arguments def foo3(v__positional_only: int): pass with pytest.raises( ConfigError, match='"v__args", "v__kwargs", "v__positional_only" and "v__duplicate_kwargs" are not permitted' ): @validate_arguments def foo4(v__duplicate_kwargs: int): pass def test_async(): @validate_arguments async def foo(a, b): return f'a={a} b={b}' async def run(): v = await foo(1, 2) assert v == 'a=1 b=2' asyncio.run(run()) with pytest.raises(ValidationError) as exc_info: asyncio.run(foo('x')) assert exc_info.value.errors() == [{'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_string_annotation(): @validate_arguments def foo(a: 'List[int]', b: 'Path'): return f'a={a!r} b={b!r}' assert foo([1, 2, 3], '/') with pytest.raises(ValidationError) as exc_info: foo(['x']) assert exc_info.value.errors() == [ {'loc': ('a', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_item_method(): class X: def __init__(self, v): self.v = v @validate_arguments def foo(self, a: int, b: int): assert self.v == a return f'{a}, {b}' x = X(4) assert x.foo(4, 2) == '4, 2' assert x.foo(*[4, 2]) == '4, 2' with pytest.raises(ValidationError) as exc_info: x.foo() assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_class_method(): class X: @classmethod @validate_arguments def foo(cls, a: int, b: int): assert cls == X return f'{a}, {b}' x = X() assert x.foo(4, 2) == '4, 2' assert x.foo(*[4, 2]) == '4, 2' with pytest.raises(ValidationError) as exc_info: x.foo() assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_config_title(): @validate_arguments(config=dict(title='Testing')) def foo(a: int, b: int): return f'{a}, {b}' assert foo(1, 2) == '1, 2' assert foo(1, b=2) == '1, 2' assert foo.model.schema()['title'] == 'Testing' def test_config_title_cls(): class Config: title = 'Testing' @validate_arguments(config=Config) def foo(a: int, b: int): return f'{a}, {b}' assert foo(1, 2) == '1, 2' assert foo(1, b=2) == '1, 2' assert foo.model.schema()['title'] == 'Testing' def test_config_fields(): with pytest.raises(ConfigError, match='Setting the "fields" and "alias_generator" property on custom Config for @'): @validate_arguments(config=dict(fields={'b': 'bang'})) def foo(a: int, b: int): return f'{a}, {b}' def test_config_arbitrary_types_allowed(): class EggBox: def __str__(self) -> str: return 'EggBox()' @validate_arguments(config=dict(arbitrary_types_allowed=True)) def foo(a: int, b: EggBox): return f'{a}, {b}' assert foo(1, EggBox()) == '1, EggBox()' with pytest.raises(ValidationError) as exc_info: assert foo(1, 2) == '1, 2' assert exc_info.value.errors() == [ { 'loc': ('b',), 'msg': 'instance of EggBox expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'EggBox'}, }, ] def test_validate(mocker): stub = mocker.stub(name='on_something_stub') @validate_arguments def func(s: str, count: int, *, separator: bytes = b''): stub(s, count, separator) func.validate('qwe', 2) with pytest.raises(ValidationError): func.validate(['qwe'], 2) stub.assert_not_called() def test_annotated_use_of_alias(): @validate_arguments def foo(a: Annotated[int, Field(alias='b')], c: Annotated[int, Field()], d: Annotated[int, Field(alias='')]): return a + c + d assert foo(**{'b': 10, 'c': 12, '': 1}) == 23 with pytest.raises(ValidationError) as exc_info: assert foo(a=10, c=12, d=1) == 10 assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('a',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, {'loc': ('d',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, ] def test_use_of_alias(): @validate_arguments def foo(c: int = Field(default_factory=lambda: 20), a: int = Field(default_factory=lambda: 10, alias='b')): return a + c assert foo(b=10) == 30 def test_allow_population_by_field_name(): @validate_arguments(config=dict(allow_population_by_field_name=True)) def foo(a: Annotated[int, Field(alias='b')], c: Annotated[int, Field(alias='d')]): return a + c assert foo(a=10, d=1) == 11 assert foo(b=10, c=1) == 11 assert foo(a=10, c=1) == 11 def test_validate_all(): @validate_arguments(config=dict(validate_all=True)) def foo(dt: datetime = Field(default_factory=lambda: 946684800)): return dt assert foo() == datetime(2000, 1, 1, tzinfo=timezone.utc) assert foo(0) == datetime(1970, 1, 1, tzinfo=timezone.utc) @skip_pre_38 def test_validate_all_positional(create_module): module = create_module( # language=Python """ from datetime import datetime from pydantic import Field, validate_arguments @validate_arguments(config=dict(validate_all=True)) def foo(dt: datetime = Field(default_factory=lambda: 946684800), /): return dt """ ) assert module.foo() == datetime(2000, 1, 1, tzinfo=timezone.utc) assert module.foo(0) == datetime(1970, 1, 1, tzinfo=timezone.utc) def test_validate_extra(): class TypedTest(TypedDict): y: str @validate_arguments(config={'extra': Extra.allow}) def test(other: TypedTest): return other assert test(other={'y': 'b', 'z': 'a'}) == {'y': 'b', 'z': 'a'} @validate_arguments(config={'extra': Extra.ignore}) def test(other: TypedTest): return other assert test(other={'y': 'b', 'z': 'a'}) == {'y': 'b'} pydantic-1.10.14/tests/test_discrimated_union.py000066400000000000000000000411021455251250200217430ustar00rootroot00000000000000import re from enum import Enum from typing import Generic, TypeVar, Union import pytest from typing_extensions import Annotated, Literal from pydantic import BaseModel, Field, ValidationError from pydantic.errors import ConfigError from pydantic.generics import GenericModel def test_discriminated_union_only_union(): with pytest.raises( TypeError, match='`discriminator` can only be used with `Union` type with more than one variant' ): class Model(BaseModel): x: str = Field(..., discriminator='qwe') def test_discriminated_union_single_variant(): with pytest.raises( TypeError, match='`discriminator` can only be used with `Union` type with more than one variant' ): class Model(BaseModel): x: Union[str] = Field(..., discriminator='qwe') def test_discriminated_union_invalid_type(): with pytest.raises(TypeError, match="Type 'str' is not a valid `BaseModel` or `dataclass`"): class Model(BaseModel): x: Union[str, int] = Field(..., discriminator='qwe') def test_discriminated_union_defined_discriminator(): class Cat(BaseModel): c: str class Dog(BaseModel): pet_type: Literal['dog'] d: str with pytest.raises(ConfigError, match="Model 'Cat' needs a discriminator field for key 'pet_type'"): class Model(BaseModel): pet: Union[Cat, Dog] = Field(..., discriminator='pet_type') number: int def test_discriminated_union_literal_discriminator(): class Cat(BaseModel): pet_type: int c: str class Dog(BaseModel): pet_type: Literal['dog'] d: str with pytest.raises(ConfigError, match="Field 'pet_type' of model 'Cat' needs to be a `Literal`"): class Model(BaseModel): pet: Union[Cat, Dog] = Field(..., discriminator='pet_type') number: int def test_discriminated_union_root_same_discriminator(): class BlackCat(BaseModel): pet_type: Literal['blackcat'] class WhiteCat(BaseModel): pet_type: Literal['whitecat'] class Cat(BaseModel): __root__: Union[BlackCat, WhiteCat] class Dog(BaseModel): pet_type: Literal['dog'] with pytest.raises(ConfigError, match="Field 'pet_type' is not the same for all submodels of 'Cat'"): class Pet(BaseModel): __root__: Union[Cat, Dog] = Field(..., discriminator='pet_type') def test_discriminated_union_validation(): class BlackCat(BaseModel): pet_type: Literal['cat'] color: Literal['black'] black_infos: str class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] white_infos: str class Cat(BaseModel): __root__: Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal['dog'] d: str class Lizard(BaseModel): pet_type: Literal['reptile', 'lizard'] l: str class Model(BaseModel): pet: Annotated[Union[Cat, Dog, Lizard], Field(discriminator='pet_type')] number: int with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_typ': 'cat'}, 'number': 'x'}) assert exc_info.value.errors() == [ { 'loc': ('pet',), 'msg': "Discriminator 'pet_type' is missing in value", 'type': 'value_error.discriminated_union.missing_discriminator', 'ctx': {'discriminator_key': 'pet_type'}, }, {'loc': ('number',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': 'fish', 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet',), 'msg': "Discriminator 'pet_type' is missing in value", 'type': 'value_error.discriminated_union.missing_discriminator', 'ctx': {'discriminator_key': 'pet_type'}, }, ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'fish'}, 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet',), 'msg': ( "No match for discriminator 'pet_type' and value 'fish' " "(allowed values: 'cat', 'dog', 'reptile', 'lizard')" ), 'type': 'value_error.discriminated_union.invalid_discriminator', 'ctx': { 'discriminator_key': 'pet_type', 'discriminator_value': 'fish', 'allowed_values': "'cat', 'dog', 'reptile', 'lizard'", }, }, ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'lizard'}, 'number': 2}) assert exc_info.value.errors() == [ {'loc': ('pet', 'Lizard', 'l'), 'msg': 'field required', 'type': 'value_error.missing'}, ] m = Model.parse_obj({'pet': {'pet_type': 'lizard', 'l': 'pika'}, 'number': 2}) assert isinstance(m.pet, Lizard) assert m.dict() == {'pet': {'pet_type': 'lizard', 'l': 'pika'}, 'number': 2} with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white'}, 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet', 'Cat', '__root__', 'WhiteCat', 'white_infos'), 'msg': 'field required', 'type': 'value_error.missing', } ] m = Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white', 'white_infos': 'pika'}, 'number': 2}) assert isinstance(m.pet.__root__, WhiteCat) def test_discriminated_annotated_union(): class BlackCat(BaseModel): pet_type: Literal['cat'] color: Literal['black'] black_infos: str class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] white_infos: str Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal['dog'] dog_name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Model(BaseModel): pet: Pet number: int with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_typ': 'cat'}, 'number': 'x'}) assert exc_info.value.errors() == [ { 'loc': ('pet',), 'msg': "Discriminator 'pet_type' is missing in value", 'type': 'value_error.discriminated_union.missing_discriminator', 'ctx': {'discriminator_key': 'pet_type'}, }, {'loc': ('number',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'fish'}, 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet',), 'msg': "No match for discriminator 'pet_type' and value 'fish' " "(allowed values: 'cat', 'dog')", 'type': 'value_error.discriminated_union.invalid_discriminator', 'ctx': {'discriminator_key': 'pet_type', 'discriminator_value': 'fish', 'allowed_values': "'cat', 'dog'"}, }, ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'dog'}, 'number': 2}) assert exc_info.value.errors() == [ {'loc': ('pet', 'Dog', 'dog_name'), 'msg': 'field required', 'type': 'value_error.missing'}, ] m = Model.parse_obj({'pet': {'pet_type': 'dog', 'dog_name': 'milou'}, 'number': 2}) assert isinstance(m.pet, Dog) with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'red'}, 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet', 'Union[BlackCat, WhiteCat]'), 'msg': "No match for discriminator 'color' and value 'red' " "(allowed values: 'black', 'white')", 'type': 'value_error.discriminated_union.invalid_discriminator', 'ctx': {'discriminator_key': 'color', 'discriminator_value': 'red', 'allowed_values': "'black', 'white'"}, } ] with pytest.raises(ValidationError) as exc_info: Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white'}, 'number': 2}) assert exc_info.value.errors() == [ { 'loc': ('pet', 'Union[BlackCat, WhiteCat]', 'WhiteCat', 'white_infos'), 'msg': 'field required', 'type': 'value_error.missing', } ] m = Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white', 'white_infos': 'pika'}, 'number': 2}) assert isinstance(m.pet, WhiteCat) def test_discriminated_union_basemodel_instance_value(): class A(BaseModel): l: Literal['a'] class B(BaseModel): l: Literal['b'] class Top(BaseModel): sub: Union[A, B] = Field(..., discriminator='l') t = Top(sub=A(l='a')) assert isinstance(t, Top) def test_discriminated_union_basemodel_instance_value_with_alias(): class A(BaseModel): literal: Literal['a'] = Field(alias='lit') class B(BaseModel): literal: Literal['b'] = Field(alias='lit') class Config: allow_population_by_field_name = True class Top(BaseModel): sub: Union[A, B] = Field(..., discriminator='literal') assert Top(sub=A(lit='a')).sub.literal == 'a' assert Top(sub=B(lit='b')).sub.literal == 'b' assert Top(sub=B(literal='b')).sub.literal == 'b' def test_discriminated_union_model_with_alias(): class A(BaseModel): literal: Literal['a'] = Field(alias='lit') class B(BaseModel): literal: Literal['b'] = Field(alias='lit') class Config: allow_population_by_field_name = True class TopDisallow(BaseModel): sub: Union[A, B] = Field(..., discriminator='literal', alias='s') class TopAllow(BaseModel): sub: Union[A, B] = Field(..., discriminator='literal', alias='s') class Config: allow_population_by_field_name = True assert TopDisallow.parse_obj({'s': {'lit': 'a'}}).sub.literal == 'a' with pytest.raises(ValidationError) as exc_info: TopDisallow.parse_obj({'s': {'literal': 'b'}}) assert exc_info.value.errors() == [ { 'ctx': {'discriminator_key': 'literal'}, 'loc': ('s',), 'msg': "Discriminator 'literal' is missing in value", 'type': 'value_error.discriminated_union.missing_discriminator', }, ] with pytest.raises(ValidationError) as exc_info: TopDisallow.parse_obj({'s': {'literal': 'a'}}) assert exc_info.value.errors() == [ { 'ctx': {'discriminator_key': 'literal'}, 'loc': ('s',), 'msg': "Discriminator 'literal' is missing in value", 'type': 'value_error.discriminated_union.missing_discriminator', } ] with pytest.raises(ValidationError) as exc_info: TopDisallow.parse_obj({'sub': {'lit': 'a'}}) assert exc_info.value.errors() == [ {'loc': ('s',), 'msg': 'field required', 'type': 'value_error.missing'}, ] assert TopAllow.parse_obj({'s': {'lit': 'a'}}).sub.literal == 'a' assert TopAllow.parse_obj({'s': {'lit': 'b'}}).sub.literal == 'b' assert TopAllow.parse_obj({'s': {'literal': 'b'}}).sub.literal == 'b' assert TopAllow.parse_obj({'sub': {'lit': 'a'}}).sub.literal == 'a' assert TopAllow.parse_obj({'sub': {'lit': 'b'}}).sub.literal == 'b' assert TopAllow.parse_obj({'sub': {'literal': 'b'}}).sub.literal == 'b' with pytest.raises(ValidationError) as exc_info: TopAllow.parse_obj({'s': {'literal': 'a'}}) assert exc_info.value.errors() == [ {'loc': ('s', 'A', 'lit'), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: TopAllow.parse_obj({'sub': {'literal': 'a'}}) assert exc_info.value.errors() == [ {'loc': ('s', 'A', 'lit'), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_discriminated_union_int(): class A(BaseModel): l: Literal[1] class B(BaseModel): l: Literal[2] class Top(BaseModel): sub: Union[A, B] = Field(..., discriminator='l') assert isinstance(Top.parse_obj({'sub': {'l': 2}}).sub, B) with pytest.raises(ValidationError) as exc_info: Top.parse_obj({'sub': {'l': 3}}) assert exc_info.value.errors() == [ { 'loc': ('sub',), 'msg': "No match for discriminator 'l' and value 3 (allowed values: 1, 2)", 'type': 'value_error.discriminated_union.invalid_discriminator', 'ctx': {'discriminator_key': 'l', 'discriminator_value': 3, 'allowed_values': '1, 2'}, } ] def test_discriminated_union_enum(): class EnumValue(Enum): a = 1 b = 2 class A(BaseModel): l: Literal[EnumValue.a] class B(BaseModel): l: Literal[EnumValue.b] class Top(BaseModel): sub: Union[A, B] = Field(..., discriminator='l') assert isinstance(Top.parse_obj({'sub': {'l': EnumValue.b}}).sub, B) with pytest.raises(ValidationError) as exc_info: Top.parse_obj({'sub': {'l': 3}}) assert exc_info.value.errors() == [ { 'loc': ('sub',), 'msg': "No match for discriminator 'l' and value 3 (allowed values: , )", 'type': 'value_error.discriminated_union.invalid_discriminator', 'ctx': { 'discriminator_key': 'l', 'discriminator_value': 3, 'allowed_values': ', ', }, } ] def test_alias_different(): class Cat(BaseModel): pet_type: Literal['cat'] = Field(alias='U') c: str class Dog(BaseModel): pet_type: Literal['dog'] = Field(alias='T') d: str with pytest.raises( ConfigError, match=re.escape("Aliases for discriminator 'pet_type' must be the same (got T, U)") ): class Model(BaseModel): pet: Union[Cat, Dog] = Field(discriminator='pet_type') def test_alias_same(): class Cat(BaseModel): pet_type: Literal['cat'] = Field(alias='typeOfPet') c: str class Dog(BaseModel): pet_type: Literal['dog'] = Field(alias='typeOfPet') d: str class Model(BaseModel): pet: Union[Cat, Dog] = Field(discriminator='pet_type') assert Model(**{'pet': {'typeOfPet': 'dog', 'd': 'milou'}}).pet.pet_type == 'dog' def test_nested(): class Cat(BaseModel): pet_type: Literal['cat'] name: str class Dog(BaseModel): pet_type: Literal['dog'] name: str CommonPet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Lizard(BaseModel): pet_type: Literal['reptile', 'lizard'] name: str class Model(BaseModel): pet: Union[CommonPet, Lizard] = Field(..., discriminator='pet_type') n: int assert isinstance(Model(**{'pet': {'pet_type': 'dog', 'name': 'Milou'}, 'n': 5}).pet, Dog) def test_generic(): T = TypeVar('T') class Success(GenericModel, Generic[T]): type: Literal['Success'] = 'Success' data: T class Failure(BaseModel): type: Literal['Failure'] = 'Failure' error_message: str class Container(GenericModel, Generic[T]): result: Union[Success[T], Failure] = Field(discriminator='type') with pytest.raises(ValidationError, match="Discriminator 'type' is missing in value"): Container[str].parse_obj({'result': {}}) with pytest.raises( ValidationError, match=re.escape("No match for discriminator 'type' and value 'Other' (allowed values: 'Success', 'Failure')"), ): Container[str].parse_obj({'result': {'type': 'Other'}}) with pytest.raises( ValidationError, match=re.escape('Container[str]\nresult -> Success[str] -> data\n field required') ): Container[str].parse_obj({'result': {'type': 'Success'}}) # coercion is done properly assert Container[str].parse_obj({'result': {'type': 'Success', 'data': 1}}).result.data == '1' def test_discriminator_with_unhashable_type(): """Verify an unhashable discriminator value raises a ValidationError.""" class Model1(BaseModel): target: Literal['t1'] a: int class Model2(BaseModel): target: Literal['t2'] b: int class Foo(BaseModel): foo: Union[Model1, Model2] = Field(discriminator='target') with pytest.raises(ValidationError, match=re.escape("No match for discriminator 'target' and value {}")): Foo(**{'foo': {'target': {}}}) pydantic-1.10.14/tests/test_edge_cases.py000066400000000000000000001642041455251250200203360ustar00rootroot00000000000000import importlib.util import sys from collections.abc import Hashable from decimal import Decimal from enum import Enum from typing import Any, Dict, FrozenSet, Generic, List, Optional, Sequence, Set, Tuple, Type, TypeVar, Union import pytest from pydantic import ( BaseModel, BaseSettings, Extra, NoneStrBytes, StrBytes, ValidationError, compiled, constr, errors, validate_model, validator, ) from pydantic.fields import Field try: import cython except ImportError: cython = None def test_str_bytes(): class Model(BaseModel): v: StrBytes = ... m = Model(v='s') assert m.v == 's' assert repr(m.__fields__['v']) == "ModelField(name='v', type=Union[str, bytes], required=True)" m = Model(v=b'b') assert m.v == 'b' with pytest.raises(ValidationError) as exc_info: Model(v=None) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_str_bytes_none(): class Model(BaseModel): v: NoneStrBytes = ... m = Model(v='s') assert m.v == 's' m = Model(v=b'b') assert m.v == 'b' m = Model(v=None) assert m.v is None def test_union_int_str(): class Model(BaseModel): v: Union[int, str] = ... m = Model(v=123) assert m.v == 123 m = Model(v='123') assert m.v == 123 m = Model(v=b'foobar') assert m.v == 'foobar' # here both validators work and it's impossible to work out which value "closer" m = Model(v=12.2) assert m.v == 12 with pytest.raises(ValidationError) as exc_info: Model(v=None) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_union_int_any(): class Model(BaseModel): v: Union[int, Any] m = Model(v=123) assert m.v == 123 m = Model(v='123') assert m.v == 123 m = Model(v='foobar') assert m.v == 'foobar' m = Model(v=None) assert m.v is None def test_union_priority(): class ModelOne(BaseModel): v: Union[int, str] = ... class ModelTwo(BaseModel): v: Union[str, int] = ... assert ModelOne(v='123').v == 123 assert ModelTwo(v='123').v == '123' def test_typed_list(): class Model(BaseModel): v: List[int] = ... m = Model(v=[1, 2, '3']) assert m.v == [1, 2, 3] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 'x', 'y']) assert exc_info.value.errors() == [ {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model(v=1) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_typed_set(): class Model(BaseModel): v: Set[int] = ... assert Model(v={1, 2, '3'}).v == {1, 2, 3} assert Model(v=[1, 2, '3']).v == {1, 2, 3} with pytest.raises(ValidationError) as exc_info: Model(v=[1, 'x']) assert exc_info.value.errors() == [ {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_dict_dict(): class Model(BaseModel): v: Dict[str, int] = ... assert Model(v={'foo': 1}).dict() == {'v': {'foo': 1}} def test_none_list(): class Model(BaseModel): v = [None] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'v': {'title': 'V', 'default': [None], 'type': 'array', 'items': {}}}, } @pytest.mark.parametrize( 'value,result', [ ({'a': 2, 'b': 4}, {'a': 2, 'b': 4}), ({1: '2', 'b': 4}, {'1': 2, 'b': 4}), ([('a', 2), ('b', 4)], {'a': 2, 'b': 4}), ], ) def test_typed_dict(value, result): class Model(BaseModel): v: Dict[str, int] = ... assert Model(v=value).v == result @pytest.mark.parametrize( 'value,errors', [ (1, [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}]), ({'a': 'b'}, [{'loc': ('v', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), ([1, 2, 3], [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}]), ], ) def test_typed_dict_error(value, errors): class Model(BaseModel): v: Dict[str, int] = ... with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_dict_key_error(): class Model(BaseModel): v: Dict[int, int] = ... assert Model(v={1: 2, '3': '4'}).v == {1: 2, 3: 4} with pytest.raises(ValidationError) as exc_info: Model(v={'foo': 2, '3': '4'}) assert exc_info.value.errors() == [ {'loc': ('v', '__key__'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_tuple(): class Model(BaseModel): v: Tuple[int, float, bool] m = Model(v=[1.2, '2.2', 'true']) assert m.v == (1, 2.2, True) def test_tuple_more(): class Model(BaseModel): empty_tuple: Tuple[()] simple_tuple: tuple = None tuple_of_different_types: Tuple[int, float, str, bool] = None tuple_of_single_tuples: Tuple[Tuple[int], ...] = () m = Model( empty_tuple=[], simple_tuple=[1, 2, 3, 4], tuple_of_different_types=[4, 3, 2, 1], tuple_of_single_tuples=(('1',), (2,)), ) assert m.dict() == { 'empty_tuple': (), 'simple_tuple': (1, 2, 3, 4), 'tuple_of_different_types': (4, 3.0, '2', True), 'tuple_of_single_tuples': ((1,), (2,)), } @pytest.mark.parametrize( 'dict_cls,frozenset_cls,list_cls,set_cls,tuple_cls,type_cls', [ (Dict, FrozenSet, List, Set, Tuple, Type), (dict, frozenset, list, set, tuple, type), ], ) @pytest.mark.skipif( sys.version_info < (3, 9) or compiled, reason='PEP585 generics only supported for python 3.9 and above' ) def test_pep585_generic_types(dict_cls, frozenset_cls, list_cls, set_cls, tuple_cls, type_cls): class Type1: pass class Type2: pass class Model(BaseModel, arbitrary_types_allowed=True): a: dict_cls a1: dict_cls[str, int] b: frozenset_cls b1: frozenset_cls[int] c: list_cls c1: list_cls[int] d: set_cls d1: set_cls[int] e: tuple_cls e1: tuple_cls[int] e2: tuple_cls[int, ...] e3: tuple_cls[()] f: type_cls f1: type_cls[Type1] default_model_kwargs = dict( a={}, a1={'a': '1'}, b=[], b1=('1',), c=[], c1=('1',), d=[], d1=['1'], e=[], e1=['1'], e2=['1', '2'], e3=[], f=Type1, f1=Type1, ) m = Model(**default_model_kwargs) assert m.a == {} assert m.a1 == {'a': 1} assert m.b == frozenset() assert m.b1 == frozenset({1}) assert m.c == [] assert m.c1 == [1] assert m.d == set() assert m.d1 == {1} assert m.e == () assert m.e1 == (1,) assert m.e2 == (1, 2) assert m.e3 == () assert m.f == Type1 assert m.f1 == Type1 with pytest.raises(ValidationError) as exc_info: Model(**(default_model_kwargs | {'e3': (1,)})) assert exc_info.value.errors() == [ { 'ctx': {'actual_length': 1, 'expected_length': 0}, 'loc': ('e3',), 'msg': 'wrong tuple length 1, expected 0', 'type': 'value_error.tuple.length', } ] Model(**(default_model_kwargs | {'f': Type2})) with pytest.raises(ValidationError) as exc_info: Model(**(default_model_kwargs | {'f1': Type2})) assert exc_info.value.errors() == [ { 'ctx': {'expected_class': 'Type1'}, 'loc': ('f1',), 'msg': 'subclass of Type1 expected', 'type': 'type_error.subclass', } ] def test_tuple_length_error(): class Model(BaseModel): v: Tuple[int, float, bool] w: Tuple[()] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2], w=[1]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'wrong tuple length 2, expected 3', 'type': 'value_error.tuple.length', 'ctx': {'actual_length': 2, 'expected_length': 3}, }, { 'loc': ('w',), 'msg': 'wrong tuple length 1, expected 0', 'type': 'value_error.tuple.length', 'ctx': {'actual_length': 1, 'expected_length': 0}, }, ] def test_tuple_invalid(): class Model(BaseModel): v: Tuple[int, float, bool] with pytest.raises(ValidationError) as exc_info: Model(v='xxx') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid tuple', 'type': 'type_error.tuple'}] def test_tuple_value_error(): class Model(BaseModel): v: Tuple[int, float, Decimal] with pytest.raises(ValidationError) as exc_info: Model(v=['x', 'y', 'x']) assert exc_info.value.errors() == [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 2), 'msg': 'value is not a valid decimal', 'type': 'type_error.decimal'}, ] def test_recursive_list(): class SubModel(BaseModel): name: str = ... count: int = None class Model(BaseModel): v: List[SubModel] = [] m = Model(v=[]) assert m.v == [] m = Model(v=[{'name': 'testing', 'count': 4}]) assert repr(m) == "Model(v=[SubModel(name='testing', count=4)])" assert m.v[0].name == 'testing' assert m.v[0].count == 4 assert m.dict() == {'v': [{'count': 4, 'name': 'testing'}]} with pytest.raises(ValidationError) as exc_info: Model(v=['x']) assert exc_info.value.errors() == [{'loc': ('v', 0), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] def test_recursive_list_error(): class SubModel(BaseModel): name: str = ... count: int = None class Model(BaseModel): v: List[SubModel] = [] with pytest.raises(ValidationError) as exc_info: Model(v=[{}]) assert exc_info.value.errors() == [ {'loc': ('v', 0, 'name'), 'msg': 'field required', 'type': 'value_error.missing'} ] def test_list_unions(): class Model(BaseModel): v: List[Union[int, str]] = ... assert Model(v=[123, '456', 'foobar']).v == [123, 456, 'foobar'] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, None]) assert exc_info.value.errors() == [ {'loc': ('v', 2), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_recursive_lists(): class Model(BaseModel): v: List[List[Union[int, float]]] = ... assert Model(v=[[1, 2], [3, '4', '4.1']]).v == [[1, 2], [3, 4, 4.1]] assert Model.__fields__['v'].sub_fields[0].name == '_v' assert len(Model.__fields__['v'].sub_fields) == 1 assert Model.__fields__['v'].sub_fields[0].sub_fields[0].name == '__v' assert len(Model.__fields__['v'].sub_fields[0].sub_fields) == 1 assert Model.__fields__['v'].sub_fields[0].sub_fields[0].sub_fields[1].name == '__v_float' assert len(Model.__fields__['v'].sub_fields[0].sub_fields[0].sub_fields) == 2 class StrEnum(str, Enum): a = 'a10' b = 'b10' def test_str_enum(): class Model(BaseModel): v: StrEnum = ... assert Model(v='a10').v is StrEnum.a with pytest.raises(ValidationError): Model(v='different') def test_any_dict(): class Model(BaseModel): v: Dict[int, Any] = ... assert Model(v={1: 'foobar'}).dict() == {'v': {1: 'foobar'}} assert Model(v={123: 456}).dict() == {'v': {123: 456}} assert Model(v={2: [1, 2, 3]}).dict() == {'v': {2: [1, 2, 3]}} def test_success_values_include(): class Model(BaseModel): a: int = 1 b: int = 2 c: int = 3 m = Model() assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.dict(include={'a'}) == {'a': 1} assert m.dict(exclude={'a'}) == {'b': 2, 'c': 3} assert m.dict(include={'a', 'b'}, exclude={'a'}) == {'b': 2} def test_include_exclude_unset(): class Model(BaseModel): a: int b: int c: int = 3 d: int = 4 e: int = 5 f: int = 6 m = Model(a=1, b=2, e=5, f=7) assert m.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.__fields_set__ == {'a', 'b', 'e', 'f'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2, 'e': 5, 'f': 7} assert m.dict(include={'a'}, exclude_unset=True) == {'a': 1} assert m.dict(include={'c'}, exclude_unset=True) == {} assert m.dict(exclude={'a'}, exclude_unset=True) == {'b': 2, 'e': 5, 'f': 7} assert m.dict(exclude={'c'}, exclude_unset=True) == {'a': 1, 'b': 2, 'e': 5, 'f': 7} assert m.dict(include={'a', 'b', 'c'}, exclude={'b'}, exclude_unset=True) == {'a': 1} assert m.dict(include={'a', 'b', 'c'}, exclude={'a', 'c'}, exclude_unset=True) == {'b': 2} def test_include_exclude_defaults(): class Model(BaseModel): a: int b: int c: int = 3 d: int = 4 e: int = 5 f: int = 6 m = Model(a=1, b=2, e=5, f=7) assert m.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.__fields_set__ == {'a', 'b', 'e', 'f'} assert m.dict(exclude_defaults=True) == {'a': 1, 'b': 2, 'f': 7} assert m.dict(include={'a'}, exclude_defaults=True) == {'a': 1} assert m.dict(include={'c'}, exclude_defaults=True) == {} assert m.dict(exclude={'a'}, exclude_defaults=True) == {'b': 2, 'f': 7} assert m.dict(exclude={'c'}, exclude_defaults=True) == {'a': 1, 'b': 2, 'f': 7} assert m.dict(include={'a', 'b', 'c'}, exclude={'b'}, exclude_defaults=True) == {'a': 1} assert m.dict(include={'a', 'b', 'c'}, exclude={'a', 'c'}, exclude_defaults=True) == {'b': 2} # abstract set assert m.dict(include={'a': 1}.keys()) == {'a': 1} assert m.dict(exclude={'a': 1}.keys()) == {'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.dict(include={'a': 1}.keys(), exclude_unset=True) == {'a': 1} assert m.dict(exclude={'a': 1}.keys(), exclude_unset=True) == {'b': 2, 'e': 5, 'f': 7} def test_skip_defaults_deprecated(): class Model(BaseModel): x: int b: int = 2 m = Model(x=1) match = r'Model.dict\(\): "skip_defaults" is deprecated and replaced by "exclude_unset"' with pytest.warns(DeprecationWarning, match=match): assert m.dict(skip_defaults=True) == m.dict(exclude_unset=True) with pytest.warns(DeprecationWarning, match=match): assert m.dict(skip_defaults=False) == m.dict(exclude_unset=False) match = r'Model.json\(\): "skip_defaults" is deprecated and replaced by "exclude_unset"' with pytest.warns(DeprecationWarning, match=match): assert m.json(skip_defaults=True) == m.json(exclude_unset=True) with pytest.warns(DeprecationWarning, match=match): assert m.json(skip_defaults=False) == m.json(exclude_unset=False) def test_advanced_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}) == { 'e': 'e', 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}, } assert m.dict(exclude={'e': ..., 'f': {'d'}}) == {'f': {'c': 'foo'}} def test_advanced_exclude_by_alias(): class SubSubModel(BaseModel): a: str aliased_b: str = Field(..., alias='b_alias') class SubModel(BaseModel): aliased_c: str = Field(..., alias='c_alias') aliased_d: List[SubSubModel] = Field(..., alias='d_alias') class Model(BaseModel): aliased_e: str = Field(..., alias='e_alias') aliased_f: SubModel = Field(..., alias='f_alias') m = Model( e_alias='e', f_alias=SubModel(c_alias='foo', d_alias=[SubSubModel(a='a', b_alias='b'), SubSubModel(a='c', b_alias='e')]), ) excludes = {'aliased_f': {'aliased_c': ..., 'aliased_d': {-1: {'a'}}}} assert m.dict(exclude=excludes, by_alias=True) == { 'e_alias': 'e', 'f_alias': {'d_alias': [{'a': 'a', 'b_alias': 'b'}, {'b_alias': 'e'}]}, } excludes = {'aliased_e': ..., 'aliased_f': {'aliased_d'}} assert m.dict(exclude=excludes, by_alias=True) == {'f_alias': {'c_alias': 'foo'}} def test_advanced_value_include(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(include={'f'}) == {'f': {'c': 'foo', 'd': [{'a': 'a', 'b': 'b'}, {'a': 'c', 'b': 'e'}]}} assert m.dict(include={'e'}) == {'e': 'e'} assert m.dict(include={'f': {'d': {0: ..., -1: {'b'}}}}) == {'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}} def test_advanced_value_exclude_include(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}, include={'f'}) == { 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]} } assert m.dict(exclude={'e': ..., 'f': {'d'}}, include={'e', 'f'}) == {'f': {'c': 'foo'}} assert m.dict(exclude={'f': {'d': {-1: {'a'}}}}, include={'f': {'d'}}) == { 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]} } @pytest.mark.parametrize( 'exclude,expected', [ # Normal nested __all__ ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}}}}}, {'subs': [{'k': 1, 'subsubs': [{'j': 1}, {'j': 2}]}, {'k': 2, 'subsubs': [{'j': 3}]}]}, ), # Merge sub dicts ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}}}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'k': 1, 'subsubs': [{}, {}]}, {'k': 2, 'subsubs': [{'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': ...}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1}, {'i': 2}]}, {'k': 2}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'j'}}}, 0: {'subsubs': ...}}}, {'subs': [{'k': 1}, {'k': 2, 'subsubs': [{'i': 3}]}]}, ), # Merge sub sets ( {'subs': {'__all__': {'subsubs': {0}}, 0: {'subsubs': {1}}}}, {'subs': [{'k': 1, 'subsubs': []}, {'k': 2, 'subsubs': []}]}, ), # Merge sub dict-set ( {'subs': {'__all__': {'subsubs': {0: {'i'}}}, 0: {'subsubs': {1}}}}, {'subs': [{'k': 1, 'subsubs': [{'j': 1}]}, {'k': 2, 'subsubs': [{'j': 3}]}]}, ), # Different keys ({'subs': {'__all__': {'subsubs'}, 0: {'k'}}}, {'subs': [{}, {'k': 2}]}), ({'subs': {'__all__': {'subsubs': ...}, 0: {'k'}}}, {'subs': [{}, {'k': 2}]}), ({'subs': {'__all__': {'subsubs'}, 0: {'k': ...}}}, {'subs': [{}, {'k': 2}]}), # Nested different keys ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}, 0: {'j'}}}}}, {'subs': [{'k': 1, 'subsubs': [{}, {'j': 2}]}, {'k': 2, 'subsubs': [{}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'i': ...}, 0: {'j'}}}}}, {'subs': [{'k': 1, 'subsubs': [{}, {'j': 2}]}, {'k': 2, 'subsubs': [{}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}, 0: {'j': ...}}}}}, {'subs': [{'k': 1, 'subsubs': [{}, {'j': 2}]}, {'k': 2, 'subsubs': [{}]}]}, ), # Ignore __all__ for index with defined exclude ( {'subs': {'__all__': {'subsubs'}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1}, {'i': 2}]}, {'k': 2}]}, ), ({'subs': {'__all__': {'subsubs': {'__all__': {'j'}}}, 0: ...}}, {'subs': [{'k': 2, 'subsubs': [{'i': 3}]}]}), ({'subs': {'__all__': ..., 0: {'subsubs'}}}, {'subs': [{'k': 1}]}), ], ) def test_advanced_exclude_nested_lists(exclude, expected): class SubSubModel(BaseModel): i: int j: int class SubModel(BaseModel): k: int subsubs: List[SubSubModel] class Model(BaseModel): subs: List[SubModel] m = Model(subs=[dict(k=1, subsubs=[dict(i=1, j=1), dict(i=2, j=2)]), dict(k=2, subsubs=[dict(i=3, j=3)])]) assert m.dict(exclude=exclude) == expected @pytest.mark.parametrize( 'include,expected', [ # Normal nested __all__ ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}}}}}, {'subs': [{'subsubs': [{'i': 1}, {'i': 2}]}, {'subsubs': [{'i': 3}]}]}, ), # Merge sub dicts ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}}}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': ...}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'subsubs': [{'j': 1}, {'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'j'}}}, 0: {'subsubs': ...}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'j': 3}]}]}, ), # Merge sub sets ( {'subs': {'__all__': {'subsubs': {0}}, 0: {'subsubs': {1}}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), # Merge sub dict-set ( {'subs': {'__all__': {'subsubs': {0: {'i'}}}, 0: {'subsubs': {1}}}}, {'subs': [{'subsubs': [{'i': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3}]}]}, ), # Different keys ( {'subs': {'__all__': {'subsubs'}, 0: {'k'}}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': ...}, 0: {'k'}}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs'}, 0: {'k': ...}}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), # Nested different keys ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}, 0: {'j'}}}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'i': ...}, 0: {'j'}}}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'i'}, 0: {'j': ...}}}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), # Ignore __all__ for index with defined include ( {'subs': {'__all__': {'subsubs'}, 0: {'subsubs': {'__all__': {'j'}}}}}, {'subs': [{'subsubs': [{'j': 1}, {'j': 2}]}, {'subsubs': [{'i': 3, 'j': 3}]}]}, ), ( {'subs': {'__all__': {'subsubs': {'__all__': {'j'}}}, 0: ...}}, {'subs': [{'k': 1, 'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'subsubs': [{'j': 3}]}]}, ), ( {'subs': {'__all__': ..., 0: {'subsubs'}}}, {'subs': [{'subsubs': [{'i': 1, 'j': 1}, {'i': 2, 'j': 2}]}, {'k': 2, 'subsubs': [{'i': 3, 'j': 3}]}]}, ), ], ) def test_advanced_include_nested_lists(include, expected): class SubSubModel(BaseModel): i: int j: int class SubModel(BaseModel): k: int subsubs: List[SubSubModel] class Model(BaseModel): subs: List[SubModel] m = Model(subs=[dict(k=1, subsubs=[dict(i=1, j=1), dict(i=2, j=2)]), dict(k=2, subsubs=[dict(i=3, j=3)])]) assert m.dict(include=include) == expected def test_field_set_ignore_extra(): class Model(BaseModel): a: int b: int c: int = 3 class Config: extra = Extra.ignore m = Model(a=1, b=2) assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.__fields_set__ == {'a', 'b'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2} m2 = Model(a=1, b=2, d=4) assert m2.dict() == {'a': 1, 'b': 2, 'c': 3} assert m2.__fields_set__ == {'a', 'b'} assert m2.dict(exclude_unset=True) == {'a': 1, 'b': 2} def test_field_set_allow_extra(): class Model(BaseModel): a: int b: int c: int = 3 class Config: extra = Extra.allow m = Model(a=1, b=2) assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.__fields_set__ == {'a', 'b'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2} m2 = Model(a=1, b=2, d=4) assert m2.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4} assert m2.__fields_set__ == {'a', 'b', 'd'} assert m2.dict(exclude_unset=True) == {'a': 1, 'b': 2, 'd': 4} def test_field_set_field_name(): class Model(BaseModel): a: int field_set: int b: int = 3 assert Model(a=1, field_set=2).dict() == {'a': 1, 'field_set': 2, 'b': 3} assert Model(a=1, field_set=2).dict(exclude_unset=True) == {'a': 1, 'field_set': 2} assert Model.construct(a=1, field_set=3).dict() == {'a': 1, 'field_set': 3, 'b': 3} def test_values_order(): class Model(BaseModel): a: int = 1 b: int = 2 c: int = 3 m = Model(c=30, b=20, a=10) assert list(m) == [('a', 10), ('b', 20), ('c', 30)] def test_inheritance(): class Foo(BaseModel): a: float = ... class Bar(Foo): x: float = 12.3 a = 123.0 assert Bar().dict() == {'x': 12.3, 'a': 123.0} def test_inheritance_subclass_default(): class MyStr(str): pass # Confirm hint supports a subclass default class Simple(BaseModel): x: str = MyStr('test') # Confirm hint on a base can be overridden with a subclass default on a subclass class Base(BaseModel): x: str y: str class Sub(Base): x = MyStr('test') y: MyStr = MyStr('test') # force subtype assert Sub.__fields__['x'].type_ == str assert Sub.__fields__['y'].type_ == MyStr def test_invalid_type(): with pytest.raises(RuntimeError) as exc_info: class Model(BaseModel): x: 43 = 123 assert 'error checking inheritance of 43 (type: int)' in exc_info.value.args[0] class CustomStr(str): def foobar(self): return 7 @pytest.mark.parametrize( 'value,expected', [ ('a string', 'a string'), (b'some bytes', 'some bytes'), (bytearray('foobar', encoding='utf8'), 'foobar'), (123, '123'), (123.45, '123.45'), (Decimal('12.45'), '12.45'), (True, 'True'), (False, 'False'), (StrEnum.a, 'a10'), (CustomStr('whatever'), 'whatever'), ], ) def test_valid_string_types(value, expected): class Model(BaseModel): v: str assert Model(v=value).v == expected @pytest.mark.parametrize( 'value,errors', [ ({'foo': 'bar'}, [{'loc': ('v',), 'msg': 'str type expected', 'type': 'type_error.str'}]), ([1, 2, 3], [{'loc': ('v',), 'msg': 'str type expected', 'type': 'type_error.str'}]), ], ) def test_invalid_string_types(value, errors): class Model(BaseModel): v: str with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_inheritance_config(): class Parent(BaseModel): a: int class Child(Parent): b: str class Config: fields = {'a': 'aaa', 'b': 'bbb'} m = Child(aaa=1, bbb='s') assert repr(m) == "Child(a=1, b='s')" def test_partial_inheritance_config(): class Parent(BaseModel): a: int class Config: fields = {'a': 'aaa'} class Child(Parent): b: str class Config: fields = {'b': 'bbb'} m = Child(aaa=1, bbb='s') assert repr(m) == "Child(a=1, b='s')" def test_annotation_inheritance(): class A(BaseModel): integer: int = 1 class B(A): integer = 2 if sys.version_info < (3, 10): assert B.__annotations__['integer'] == int else: assert B.__annotations__ == {} assert B.__fields__['integer'].type_ == int class C(A): integer: str = 'G' assert C.__annotations__['integer'] == str assert C.__fields__['integer'].type_ == str with pytest.raises(TypeError) as exc_info: class D(A): integer = 'G' assert str(exc_info.value) == ( 'The type of D.integer differs from the new default value; ' 'if you wish to change the type of this field, please use a type annotation' ) def test_string_none(): class Model(BaseModel): a: constr(min_length=20, max_length=1000) = ... class Config: extra = Extra.ignore with pytest.raises(ValidationError) as exc_info: Model(a=None) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_return_errors_ok(): class Model(BaseModel): foo: int bar: List[int] assert validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}) == ( {'foo': 123, 'bar': [1, 2, 3]}, {'foo', 'bar'}, None, ) d, f, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}, False) assert d == {'foo': 123, 'bar': [1, 2, 3]} assert f == {'foo', 'bar'} assert e is None def test_return_errors_error(): class Model(BaseModel): foo: int bar: List[int] d, f, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 'x')}, False) assert d == {'foo': 123} assert f == {'foo', 'bar'} assert e.errors() == [{'loc': ('bar', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}] d, f, e = validate_model(Model, {'bar': (1, 2, 3)}, False) assert d == {'bar': [1, 2, 3]} assert f == {'bar'} assert e.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_optional_required(): class Model(BaseModel): bar: Optional[int] assert Model(bar=123).dict() == {'bar': 123} assert Model().dict() == {'bar': None} assert Model(bar=None).dict() == {'bar': None} def test_invalid_validator(): class InvalidValidator: @classmethod def __get_validators__(cls): yield cls.has_wrong_arguments @classmethod def has_wrong_arguments(cls, value, bar): pass with pytest.raises(errors.ConfigError) as exc_info: class InvalidValidatorModel(BaseModel): x: InvalidValidator = ... assert exc_info.value.args[0].startswith('Invalid signature for validator') def test_unable_to_infer(): with pytest.raises(errors.ConfigError) as exc_info: class InvalidDefinitionModel(BaseModel): x = None assert exc_info.value.args[0] == 'unable to infer type for attribute "x"' def test_multiple_errors(): class Model(BaseModel): a: Union[None, int, float, Decimal] with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('a',), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('a',), 'msg': 'value is not a valid decimal', 'type': 'type_error.decimal'}, ] assert Model().a is None assert Model(a=None).a is None def test_validate_all(): class Model(BaseModel): a: int b: int class Config: validate_all = True with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_force_extra(): class Model(BaseModel): foo: int class Config: extra = 'ignore' assert Model.__config__.extra is Extra.ignore def test_illegal_extra_value(): with pytest.raises(ValueError, match='is not a valid value for "extra"'): class Model(BaseModel): foo: int class Config: extra = 'foo' def test_multiple_inheritance_config(): class Parent(BaseModel): class Config: allow_mutation = False extra = Extra.forbid class Mixin(BaseModel): class Config: use_enum_values = True class Child(Mixin, Parent): class Config: allow_population_by_field_name = True assert BaseModel.__config__.allow_mutation is True assert BaseModel.__config__.allow_population_by_field_name is False assert BaseModel.__config__.extra is Extra.ignore assert BaseModel.__config__.use_enum_values is False assert Parent.__config__.allow_mutation is False assert Parent.__config__.allow_population_by_field_name is False assert Parent.__config__.extra is Extra.forbid assert Parent.__config__.use_enum_values is False assert Mixin.__config__.allow_mutation is True assert Mixin.__config__.allow_population_by_field_name is False assert Mixin.__config__.extra is Extra.ignore assert Mixin.__config__.use_enum_values is True assert Child.__config__.allow_mutation is False assert Child.__config__.allow_population_by_field_name is True assert Child.__config__.extra is Extra.forbid assert Child.__config__.use_enum_values is True def test_submodel_different_type(): class Foo(BaseModel): a: int class Bar(BaseModel): b: int class Spam(BaseModel): c: Foo assert Spam(c={'a': '123'}).dict() == {'c': {'a': 123}} with pytest.raises(ValidationError): Spam(c={'b': '123'}) assert Spam(c=Foo(a='123')).dict() == {'c': {'a': 123}} with pytest.raises(ValidationError): Spam(c=Bar(b='123')) def test_self(): class Model(BaseModel): self: str m = Model.parse_obj(dict(self='some value')) assert m.dict() == {'self': 'some value'} assert m.self == 'some value' assert m.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'self': {'title': 'Self', 'type': 'string'}}, 'required': ['self'], } @pytest.mark.parametrize('model', [BaseModel, BaseSettings]) def test_self_recursive(model): class SubModel(model): self: int class Model(model): sm: SubModel m = Model.parse_obj({'sm': {'self': '123'}}) assert m.dict() == {'sm': {'self': 123}} @pytest.mark.parametrize('model', [BaseModel, BaseSettings]) def test_nested_init(model): class NestedModel(model): self: str modified_number: int = 1 def __init__(someinit, **kwargs): super().__init__(**kwargs) someinit.modified_number += 1 class TopModel(model): self: str nest: NestedModel m = TopModel.parse_obj(dict(self='Top Model', nest=dict(self='Nested Model', modified_number=0))) assert m.self == 'Top Model' assert m.nest.self == 'Nested Model' assert m.nest.modified_number == 1 def test_init_inspection(): class Foobar(BaseModel): x: int def __init__(self, **data) -> None: with pytest.raises(AttributeError): assert self.x super().__init__(**data) Foobar(x=1) def test_type_on_annotation(): class FooBar: pass class Model(BaseModel): a: int = int b: Type[int] c: Type[int] = int d: FooBar = FooBar e: Type[FooBar] f: Type[FooBar] = FooBar g: Sequence[Type[FooBar]] = [FooBar] h: Union[Type[FooBar], Sequence[Type[FooBar]]] = FooBar i: Union[Type[FooBar], Sequence[Type[FooBar]]] = [FooBar] assert Model.__fields__.keys() == {'b', 'c', 'e', 'f', 'g', 'h', 'i'} def test_assign_type(): class Parent: def echo(self): return 'parent' class Child(Parent): def echo(self): return 'child' class Different: def echo(self): return 'different' class Model(BaseModel): v: Type[Parent] = Parent assert Model(v=Parent).v().echo() == 'parent' assert Model().v().echo() == 'parent' assert Model(v=Child).v().echo() == 'child' with pytest.raises(ValidationError) as exc_info: Model(v=Different) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'subclass of Parent expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'Parent'}, } ] def test_optional_subfields(): class Model(BaseModel): a: Optional[int] assert Model.__fields__['a'].sub_fields is None assert Model.__fields__['a'].allow_none is True with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model().a is None assert Model(a=None).a is None assert Model(a=12).a == 12 def test_not_optional_subfields(): class Model(BaseModel): a: Optional[int] @validator('a') def check_a(cls, v): return v assert Model.__fields__['a'].sub_fields is None # assert Model.__fields__['a'].required is True assert Model.__fields__['a'].allow_none is True with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model().a is None assert Model(a=None).a is None assert Model(a=12).a == 12 def test_optional_field_constraints(): class MyModel(BaseModel): my_int: Optional[int] = Field(..., ge=3) with pytest.raises(ValidationError) as exc_info: MyModel(my_int=2) assert exc_info.value.errors() == [ { 'loc': ('my_int',), 'msg': 'ensure this value is greater than or equal to 3', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': 3}, } ] def test_field_str_shape(): class Model(BaseModel): a: List[int] assert repr(Model.__fields__['a']) == "ModelField(name='a', type=List[int], required=True)" assert str(Model.__fields__['a']) == "name='a' type=List[int] required=True" T1 = TypeVar('T1') T2 = TypeVar('T2') class DisplayGen(Generic[T1, T2]): def __init__(self, t1: T1, t2: T2): self.t1 = t1 self.t2 = t2 @classmethod def __get_validators__(cls): def validator(v): return v yield validator @pytest.mark.parametrize( 'type_,expected', [ (int, 'int'), (Optional[int], 'Optional[int]'), (Union[None, int, str], 'Union[NoneType, int, str]'), (Union[int, str, bytes], 'Union[int, str, bytes]'), (List[int], 'List[int]'), (Tuple[int, str, bytes], 'Tuple[int, str, bytes]'), (Union[List[int], Set[bytes]], 'Union[List[int], Set[bytes]]'), (List[Tuple[int, int]], 'List[Tuple[int, int]]'), (Dict[int, str], 'Mapping[int, str]'), (FrozenSet[int], 'FrozenSet[int]'), (Tuple[int, ...], 'Tuple[int, ...]'), (Optional[List[int]], 'Optional[List[int]]'), (dict, 'dict'), (DisplayGen[bool, str], 'DisplayGen[bool, str]'), ], ) def test_field_type_display(type_, expected): class Model(BaseModel): a: type_ assert Model.__fields__['a']._type_display() == expected def test_any_none(): class MyModel(BaseModel): foo: Any m = MyModel(foo=None) assert dict(m) == {'foo': None} def test_type_var_any(): Foobar = TypeVar('Foobar') class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == {'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo'}}} assert MyModel(foo=None).foo is None assert MyModel(foo='x').foo == 'x' assert MyModel(foo=123).foo == 123 def test_type_var_constraint(): Foobar = TypeVar('Foobar', int, str) class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'anyOf': [{'type': 'integer'}, {'type': 'string'}]}}, 'required': ['foo'], } with pytest.raises(ValidationError, match='none is not an allowed value'): MyModel(foo=None) with pytest.raises(ValidationError, match='value is not a valid integer'): MyModel(foo=[1, 2, 3]) assert MyModel(foo='x').foo == 'x' assert MyModel(foo=123).foo == 123 def test_type_var_bound(): Foobar = TypeVar('Foobar', bound=int) class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'type': 'integer'}}, 'required': ['foo'], } with pytest.raises(ValidationError, match='none is not an allowed value'): MyModel(foo=None) with pytest.raises(ValidationError, match='value is not a valid integer'): MyModel(foo='x') assert MyModel(foo=123).foo == 123 def test_dict_bare(): class MyModel(BaseModel): foo: Dict m = MyModel(foo={'x': 'a', 'y': None}) assert m.foo == {'x': 'a', 'y': None} def test_list_bare(): class MyModel(BaseModel): foo: List m = MyModel(foo=[1, 2, None]) assert m.foo == [1, 2, None] def test_dict_any(): class MyModel(BaseModel): foo: Dict[str, Any] m = MyModel(foo={'x': 'a', 'y': None}) assert m.foo == {'x': 'a', 'y': None} def test_modify_fields(): class Foo(BaseModel): foo: List[List[int]] @validator('foo') def check_something(cls, value): return value class Bar(Foo): pass assert repr(Foo.__fields__['foo']) == "ModelField(name='foo', type=List[List[int]], required=True)" assert repr(Bar.__fields__['foo']) == "ModelField(name='foo', type=List[List[int]], required=True)" assert Foo(foo=[[0, 1]]).foo == [[0, 1]] assert Bar(foo=[[0, 1]]).foo == [[0, 1]] def test_exclude_none(): class MyModel(BaseModel): a: Optional[int] = None b: int = 2 m = MyModel(a=5) assert m.dict(exclude_none=True) == {'a': 5, 'b': 2} m = MyModel(b=3) assert m.dict(exclude_none=True) == {'b': 3} assert m.json(exclude_none=True) == '{"b": 3}' def test_exclude_none_recursive(): class ModelA(BaseModel): a: Optional[int] = None b: int = 1 class ModelB(BaseModel): c: int d: int = 2 e: ModelA f: Optional[str] = None m = ModelB(c=5, e={'a': 0}) assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}, 'f': None} assert m.dict(exclude_none=True) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}, 'f': None} m = ModelB(c=5, e={'b': 20}, f='test') assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': None, 'b': 20}, 'f': 'test'} assert m.dict(exclude_none=True) == {'c': 5, 'd': 2, 'e': {'b': 20}, 'f': 'test'} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': None, 'b': 20}, 'f': 'test'} def test_exclude_none_with_extra(): class MyModel(BaseModel): a: str = 'default' b: Optional[str] = None class Config: extra = 'allow' m = MyModel(a='a', c='c') assert m.dict(exclude_none=True) == {'a': 'a', 'c': 'c'} assert m.dict() == {'a': 'a', 'b': None, 'c': 'c'} m = MyModel(a='a', b='b', c=None) assert m.dict(exclude_none=True) == {'a': 'a', 'b': 'b'} assert m.dict() == {'a': 'a', 'b': 'b', 'c': None} def test_str_method_inheritance(): import pydantic class Foo(pydantic.BaseModel): x: int = 3 y: int = 4 def __str__(self): return str(self.y + self.x) class Bar(Foo): z: bool = False assert str(Foo()) == '7' assert str(Bar()) == '7' def test_repr_method_inheritance(): import pydantic class Foo(pydantic.BaseModel): x: int = 3 y: int = 4 def __repr__(self): return repr(self.y + self.x) class Bar(Foo): z: bool = False assert repr(Foo()) == '7' assert repr(Bar()) == '7' def test_optional_validator(): val_calls = [] class Model(BaseModel): something: Optional[str] @validator('something') def check_something(cls, v): val_calls.append(v) return v assert Model().dict() == {'something': None} assert Model(something=None).dict() == {'something': None} assert Model(something='hello').dict() == {'something': 'hello'} assert val_calls == [None, 'hello'] def test_required_optional(): class Model(BaseModel): nullable1: Optional[int] = ... nullable2: Optional[int] = Field(...) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: Model(nullable1=1) assert exc_info.value.errors() == [{'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}] with pytest.raises(ValidationError) as exc_info: Model(nullable2=2) assert exc_info.value.errors() == [{'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}] assert Model(nullable1=None, nullable2=None).dict() == {'nullable1': None, 'nullable2': None} assert Model(nullable1=1, nullable2=2).dict() == {'nullable1': 1, 'nullable2': 2} with pytest.raises(ValidationError) as exc_info: Model(nullable1='some text') assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_required_any(): class Model(BaseModel): optional1: Any optional2: Any = None nullable1: Any = ... nullable2: Any = Field(...) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: Model(nullable1='a') assert exc_info.value.errors() == [{'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}] with pytest.raises(ValidationError) as exc_info: Model(nullable2=False) assert exc_info.value.errors() == [{'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}] assert Model(nullable1=None, nullable2=None).dict() == { 'optional1': None, 'optional2': None, 'nullable1': None, 'nullable2': None, } assert Model(nullable1=1, nullable2='two').dict() == { 'optional1': None, 'optional2': None, 'nullable1': 1, 'nullable2': 'two', } assert Model(optional1='op1', optional2=False, nullable1=1, nullable2='two').dict() == { 'optional1': 'op1', 'optional2': False, 'nullable1': 1, 'nullable2': 'two', } def test_custom_generic_validators(): T1 = TypeVar('T1') T2 = TypeVar('T2') class MyGen(Generic[T1, T2]): def __init__(self, t1: T1, t2: T2): self.t1 = t1 self.t2 = t2 @classmethod def __get_validators__(cls): yield cls.validate @classmethod def validate(cls, v, field): if not isinstance(v, cls): raise TypeError('Invalid value') if not field.sub_fields: return v t1_f = field.sub_fields[0] t2_f = field.sub_fields[1] errors = [] _, error = t1_f.validate(v.t1, {}, loc='t1') if error: errors.append(error) _, error = t2_f.validate(v.t2, {}, loc='t2') if error: errors.append(error) if errors: raise ValidationError(errors, cls) return v class Model(BaseModel): a: str gen: MyGen[str, bool] gen2: MyGen with pytest.raises(ValidationError) as exc_info: Model(a='foo', gen='invalid', gen2='invalid') assert exc_info.value.errors() == [ {'loc': ('gen',), 'msg': 'Invalid value', 'type': 'type_error'}, {'loc': ('gen2',), 'msg': 'Invalid value', 'type': 'type_error'}, ] with pytest.raises(ValidationError) as exc_info: Model(a='foo', gen=MyGen(t1='bar', t2='baz'), gen2=MyGen(t1='bar', t2='baz')) assert exc_info.value.errors() == [ {'loc': ('gen', 't2'), 'msg': 'value could not be parsed to a boolean', 'type': 'type_error.bool'} ] m = Model(a='foo', gen=MyGen(t1='bar', t2=True), gen2=MyGen(t1=1, t2=2)) assert m.a == 'foo' assert m.gen.t1 == 'bar' assert m.gen.t2 is True assert m.gen2.t1 == 1 assert m.gen2.t2 == 2 def test_custom_generic_arbitrary_allowed(): T1 = TypeVar('T1') T2 = TypeVar('T2') class MyGen(Generic[T1, T2]): def __init__(self, t1: T1, t2: T2): self.t1 = t1 self.t2 = t2 class Model(BaseModel): a: str gen: MyGen[str, bool] class Config: arbitrary_types_allowed = True with pytest.raises(ValidationError) as exc_info: Model(a='foo', gen='invalid') assert exc_info.value.errors() == [ { 'loc': ('gen',), 'msg': 'instance of MyGen expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'MyGen'}, } ] # No validation, no exception m = Model(a='foo', gen=MyGen(t1='bar', t2='baz')) assert m.a == 'foo' assert m.gen.t1 == 'bar' assert m.gen.t2 == 'baz' m = Model(a='foo', gen=MyGen(t1='bar', t2=True)) assert m.a == 'foo' assert m.gen.t1 == 'bar' assert m.gen.t2 is True def test_custom_generic_disallowed(): T1 = TypeVar('T1') T2 = TypeVar('T2') class MyGen(Generic[T1, T2]): def __init__(self, t1: T1, t2: T2): self.t1 = t1 self.t2 = t2 match = r'Fields of type(.*)are not supported.' with pytest.raises(TypeError, match=match): class Model(BaseModel): a: str gen: MyGen[str, bool] def test_hashable_required(): class Model(BaseModel): v: Hashable Model(v=None) with pytest.raises(ValidationError) as exc_info: Model(v=[]) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'value is not a valid hashable', 'type': 'type_error.hashable'} ] with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'field required', 'type': 'value_error.missing'}] @pytest.mark.parametrize('default', [1, None]) def test_hashable_optional(default): class Model(BaseModel): v: Hashable = default Model(v=None) Model() def test_default_factory_called_once(): """It should never call `default_factory` more than once even when `validate_all` is set""" v = 0 def factory() -> int: nonlocal v v += 1 return v class MyModel(BaseModel): id: int = Field(default_factory=factory) class Config: validate_all = True m1 = MyModel() assert m1.id == 1 class MyBadModel(BaseModel): id: List[str] = Field(default_factory=factory) class Config: validate_all = True with pytest.raises(ValidationError) as exc_info: MyBadModel() assert v == 2 # `factory` has been called to run validation assert exc_info.value.errors() == [ {'loc': ('id',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}, ] def test_default_factory_validator_child(): class Parent(BaseModel): foo: List[str] = Field(default_factory=list) @validator('foo', pre=True, each_item=True) def mutate_foo(cls, v): return f'{v}-1' assert Parent(foo=['a', 'b']).foo == ['a-1', 'b-1'] class Child(Parent): pass assert Child(foo=['a', 'b']).foo == ['a-1', 'b-1'] @pytest.mark.skipif(cython is None, reason='cython not installed') def test_cython_function_untouched(): Model = cython.inline( # language=Python """ from pydantic import BaseModel class Model(BaseModel): a = 0.0 b = 10 def get_double_a(self) -> float: return self.a + self.b return Model """ ) model = Model(a=10.2) assert model.a == 10.2 assert model.b == 10 assert model.get_double_a() == 20.2 def test_resolve_annotations_module_missing(tmp_path): # see https://github.com/pydantic/pydantic/issues/2363 file_path = tmp_path / 'module_to_load.py' # language=Python file_path.write_text( """ from pydantic import BaseModel class User(BaseModel): id: int name = 'Jane Doe' """ ) spec = importlib.util.spec_from_file_location('my_test_module', file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) assert module.User(id=12).dict() == {'id': 12, 'name': 'Jane Doe'} def test_iter_coverage(): class MyModel(BaseModel): x: int = 1 y: str = 'a' assert list(MyModel()._iter(by_alias=True)) == [('x', 1), ('y', 'a')] def test_config_field_info(): class Foo(BaseModel): a: str = Field(...) class Config: fields = {'a': {'description': 'descr'}} assert Foo.schema(by_alias=True)['properties'] == {'a': {'title': 'A', 'description': 'descr', 'type': 'string'}} def test_config_field_info_alias(): class Foo(BaseModel): a: str = Field(...) class Config: fields = {'a': {'alias': 'b'}} assert Foo.schema(by_alias=True)['properties'] == {'b': {'title': 'B', 'type': 'string'}} def test_config_field_info_merge(): class Foo(BaseModel): a: str = Field(..., foo='Foo') class Config: fields = {'a': {'bar': 'Bar'}} assert Foo.schema(by_alias=True)['properties'] == { 'a': {'bar': 'Bar', 'foo': 'Foo', 'title': 'A', 'type': 'string'} } def test_config_field_info_allow_mutation(): class Foo(BaseModel): a: str = Field(...) class Config: validate_assignment = True assert Foo.__fields__['a'].field_info.allow_mutation is True f = Foo(a='x') f.a = 'y' assert f.dict() == {'a': 'y'} class Bar(BaseModel): a: str = Field(...) class Config: fields = {'a': {'allow_mutation': False}} validate_assignment = True assert Bar.__fields__['a'].field_info.allow_mutation is False b = Bar(a='x') with pytest.raises(TypeError): b.a = 'y' assert b.dict() == {'a': 'x'} def test_arbitrary_types_allowed_custom_eq(): class Foo: def __eq__(self, other): if other.__class__ is not Foo: raise TypeError(f'Cannot interpret {other.__class__.__name__!r} as a valid type') return True class Model(BaseModel): x: Foo = Foo() class Config: arbitrary_types_allowed = True assert Model().x == Foo() def test_bytes_subclass(): class MyModel(BaseModel): my_bytes: bytes class BytesSubclass(bytes): def __new__(cls, data: bytes): self = bytes.__new__(cls, data) return self m = MyModel(my_bytes=BytesSubclass(b'foobar')) assert m.my_bytes.__class__ == BytesSubclass def test_int_subclass(): class MyModel(BaseModel): my_int: int class IntSubclass(int): def __new__(cls, data: int): self = int.__new__(cls, data) return self m = MyModel(my_int=IntSubclass(123)) assert m.my_int.__class__ == IntSubclass def test_model_issubclass(): assert not issubclass(int, BaseModel) class MyModel(BaseModel): x: int assert issubclass(MyModel, BaseModel) class Custom: __fields__ = True assert not issubclass(Custom, BaseModel) def test_long_int(): """ see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 """ class Model(BaseModel): x: int assert Model(x='1' * 4_300).x == int('1' * 4_300) assert Model(x=b'1' * 4_300).x == int('1' * 4_300) assert Model(x=bytearray(b'1' * 4_300)).x == int('1' * 4_300) too_long = '1' * 4_301 with pytest.raises(ValidationError) as exc_info: Model(x=too_long) assert exc_info.value.errors() == [ { 'loc': ('x',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer', }, ] too_long_b = too_long.encode('utf-8') with pytest.raises(ValidationError): Model(x=too_long_b) with pytest.raises(ValidationError): Model(x=bytearray(too_long_b)) # this used to hang indefinitely with pytest.raises(ValidationError): Model(x='1' * (10**7)) pydantic-1.10.14/tests/test_errors.py000066400000000000000000000250641455251250200175700ustar00rootroot00000000000000import pickle from typing import Dict, List, Optional, Union from uuid import UUID, uuid4 import pytest from typing_extensions import Literal from pydantic import UUID1, BaseConfig, BaseModel, PydanticTypeError, ValidationError, conint, errors, validator from pydantic.error_wrappers import flatten_errors, get_exc_type from pydantic.errors import StrRegexError def test_pydantic_error(): class TestError(PydanticTypeError): code = 'test_code' msg_template = 'test message template "{test_ctx}"' def __init__(self, *, test_ctx: int) -> None: super().__init__(test_ctx=test_ctx) with pytest.raises(TestError) as exc_info: raise TestError(test_ctx='test_value') assert str(exc_info.value) == 'test message template "test_value"' def test_pydantic_error_pickable(): """ Pydantic errors should be (un)pickable. (this test does not create a custom local error as we can't pickle local objects) """ p = pickle.dumps(StrRegexError(pattern='pika')) error = pickle.loads(p) assert isinstance(error, StrRegexError) assert error.pattern == 'pika' def test_interval_validation_error(): class Foo(BaseModel): model_type: Literal['foo'] f: int class Bar(BaseModel): model_type: Literal['bar'] b: int class MyModel(BaseModel): foobar: Union[Foo, Bar] @validator('foobar', pre=True) def check_action(cls, v): if isinstance(v, dict): model_type = v.get('model_type') if model_type == 'foo': return Foo(**v) if model_type == 'bar': return Bar(**v) raise ValueError('not valid Foo or Bar') m1 = MyModel(foobar={'model_type': 'foo', 'f': '1'}) assert m1.foobar.f == 1 assert isinstance(m1.foobar, Foo) m2 = MyModel(foobar={'model_type': 'bar', 'b': '2'}) assert m2.foobar.b == 2 assert isinstance(m2.foobar, BaseModel) with pytest.raises(ValidationError) as exc_info: MyModel(foobar={'model_type': 'foo', 'f': 'x'}) assert exc_info.value.errors() == [ {'loc': ('foobar', 'f'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_error_on_optional(): class Foobar(BaseModel): foo: Optional[str] = None @validator('foo', always=True, pre=True) def check_foo(cls, v): raise ValueError('custom error') with pytest.raises(ValidationError) as exc_info: Foobar(foo='x') assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'custom error', 'type': 'value_error'}] assert repr(exc_info.value.raw_errors[0]) == "ErrorWrapper(exc=ValueError('custom error'), loc=('foo',))" with pytest.raises(ValidationError) as exc_info: Foobar(foo=None) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'custom error', 'type': 'value_error'}] @pytest.mark.parametrize( 'result,expected', ( ( 'errors', [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b', 'x'), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b', 'z'), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('c', 0, 'x'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('d',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('d',), 'msg': 'value is not a valid uuid', 'type': 'type_error.uuid'}, {'loc': ('e', '__key__'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('f', 0), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, { 'loc': ('g',), 'msg': 'uuid version 1 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 1}, }, { 'loc': ('h',), 'msg': 'yet another error message template 42', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 42}, }, ], ), ( 'json', """\ [ { "loc": [ "a" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "b", "x" ], "msg": "field required", "type": "value_error.missing" }, { "loc": [ "b", "z" ], "msg": "field required", "type": "value_error.missing" }, { "loc": [ "c", 0, "x" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "d" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "d" ], "msg": "value is not a valid uuid", "type": "type_error.uuid" }, { "loc": [ "e", "__key__" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "f", 0 ], "msg": "none is not an allowed value", "type": "type_error.none.not_allowed" }, { "loc": [ "g" ], "msg": "uuid version 1 expected", "type": "value_error.uuid.version", "ctx": { "required_version": 1 } }, { "loc": [ "h" ], "msg": "yet another error message template 42", "type": "value_error.number.not_gt", "ctx": { "limit_value": 42 } } ]""", ), ( '__str__', """\ 10 validation errors for Model a value is not a valid integer (type=type_error.integer) b -> x field required (type=value_error.missing) b -> z field required (type=value_error.missing) c -> 0 -> x value is not a valid integer (type=type_error.integer) d value is not a valid integer (type=type_error.integer) d value is not a valid uuid (type=type_error.uuid) e -> __key__ value is not a valid integer (type=type_error.integer) f -> 0 none is not an allowed value (type=type_error.none.not_allowed) g uuid version 1 expected (type=value_error.uuid.version; required_version=1) h yet another error message template 42 (type=value_error.number.not_gt; limit_value=42)""", ), ), ) def test_validation_error(result, expected): class SubModel(BaseModel): x: int y: int z: str class Model(BaseModel): a: int b: SubModel c: List[SubModel] d: Union[int, UUID] e: Dict[int, str] f: List[Union[int, str]] g: UUID1 h: conint(gt=42) class Config: error_msg_templates = {'value_error.number.not_gt': 'yet another error message template {limit_value}'} with pytest.raises(ValidationError) as exc_info: Model.parse_obj( { 'a': 'not_int', 'b': {'y': 42}, 'c': [{'x': 'not_int', 'y': 42, 'z': 'string'}], 'd': 'string', 'e': {'not_int': 'string'}, 'f': [None], 'g': uuid4(), 'h': 21, } ) assert getattr(exc_info.value, result)() == expected def test_errors_unknown_error_object(): with pytest.raises(RuntimeError): list(flatten_errors([object], BaseConfig)) @pytest.mark.parametrize( 'exc,type_', ( (TypeError(), 'type_error'), (ValueError(), 'value_error'), (AssertionError(), 'assertion_error'), (errors.DecimalIsNotFiniteError(), 'value_error.decimal.not_finite'), ), ) def test_get_exc_type(exc, type_): if isinstance(type_, str): assert get_exc_type(type(exc)) == type_ else: with pytest.raises(type_) as exc_info: get_exc_type(type(exc)) assert isinstance(exc_info.value, type_) def test_single_error(): class Model(BaseModel): x: int with pytest.raises(ValidationError) as exc_info: Model(x='x') expected = """\ 1 validation error for Model x value is not a valid integer (type=type_error.integer)""" assert str(exc_info.value) == expected assert str(exc_info.value) == expected # to check lru cache doesn't break anything with pytest.raises(ValidationError) as exc_info: Model() assert ( str(exc_info.value) == """\ 1 validation error for Model x field required (type=value_error.missing)""" ) def test_nested_error(): class NestedModel3(BaseModel): x: str class NestedModel2(BaseModel): data2: List[NestedModel3] class NestedModel1(BaseModel): data1: List[NestedModel2] with pytest.raises(ValidationError) as exc_info: NestedModel1(data1=[{'data2': [{'y': 1}]}]) expected = [{'loc': ('data1', 0, 'data2', 0, 'x'), 'msg': 'field required', 'type': 'value_error.missing'}] assert exc_info.value.errors() == expected def test_validate_assignment_error(): class Model(BaseModel): x: int class Config: validate_assignment = True model = Model(x=1) with pytest.raises(ValidationError) as exc_info: model.x = 'a' assert ( str(exc_info.value) == '1 validation error for Model\nx\n value is not a valid integer (type=type_error.integer)' ) def test_submodel_override_validation_error(): class SubmodelA(BaseModel): x: str class SubmodelB(SubmodelA): x: int class Model(BaseModel): submodel: SubmodelB submodel = SubmodelA(x='a') with pytest.raises(ValidationError) as exc_info: Model(submodel=submodel) assert exc_info.value.errors() == [ {'loc': ('submodel', 'x'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_validation_error_methods(): class Model(BaseModel): x: int with pytest.raises(ValidationError) as exc_info: Model(x='x') e = exc_info.value assert ( str(e) == """\ 1 validation error for Model x value is not a valid integer (type=type_error.integer)""" ) assert e.errors() == [{'loc': ('x',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}] assert e.json(indent=None) == ( '[{"loc": ["x"], "msg": "value is not a valid integer", "type": "type_error.integer"}]' ) assert repr(e) == ( "ValidationError(model='Model', errors=[{'loc': ('x',), 'msg': 'value is not a valid integer', " "'type': 'type_error.integer'}])" ) pydantic-1.10.14/tests/test_fastapi.sh000077500000000000000000000004731455251250200176650ustar00rootroot00000000000000#! /usr/bin/env bash set -x set -e cd fastapi git fetch --tags git checkout 0.99.1 # temp fix for flask dependency issue # see: https://stackoverflow.com/questions/77213053/importerror-cannot-import-name-url-quote-from-werkzeug-urls pip install Werkzeug==2.2.2 pip install -r requirements.txt ./scripts/test.sh pydantic-1.10.14/tests/test_forward_ref.py000066400000000000000000000454021455251250200205520ustar00rootroot00000000000000import sys from typing import Dict, ForwardRef, List, Optional, Tuple import pytest from pydantic import BaseModel, ConfigError, ValidationError def test_postponed_annotations(create_module): module = create_module( # language=Python """ from __future__ import annotations from pydantic import BaseModel class Model(BaseModel): a: int """ ) m = module.Model(a='123') assert m.dict() == {'a': 123} def test_postponed_annotations_optional(create_module): module = create_module( # language=Python """ from __future__ import annotations from typing import Optional from pydantic import BaseModel class Model(BaseModel): a: Optional[int] """ ) assert module.Model(a='123').dict() == {'a': 123} assert module.Model().dict() == {'a': None} def test_postponed_annotations_auto_update_forward_refs(create_module): module = create_module( # language=Python """ from __future__ import annotations from pydantic import BaseModel class Model(BaseModel): a: Model """ ) assert module.Model.__fields__['a'].type_ is module.Model def test_forward_ref_auto_update_no_model(create_module): module = create_module( # language=Python """ from pydantic import BaseModel class Foo(BaseModel): a: 'Bar' class Bar(BaseModel): b: 'Foo' """ ) assert module.Foo.__fields__['a'].type_ == ForwardRef('Bar') assert module.Bar.__fields__['b'].type_ is module.Foo def test_forward_ref_one_of_fields_not_defined(create_module): @create_module def module(): from pydantic import BaseModel class Foo(BaseModel): foo: 'Foo' bar: 'Bar' # noqa: F821 assert module.Foo.__fields__['bar'].type_ == ForwardRef('Bar') assert module.Foo.__fields__['foo'].type_ is module.Foo def test_basic_forward_ref(create_module): @create_module def module(): from typing import ForwardRef, Optional from pydantic import BaseModel class Foo(BaseModel): a: int FooRef = ForwardRef('Foo') class Bar(BaseModel): b: Optional[FooRef] assert module.Bar().dict() == {'b': None} assert module.Bar(b={'a': '123'}).dict() == {'b': {'a': 123}} def test_self_forward_ref_module(create_module): @create_module def module(): from typing import ForwardRef from pydantic import BaseModel Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: 'Foo' = None Foo.update_forward_refs() assert module.Foo().dict() == {'a': 123, 'b': None} assert module.Foo(b={'a': '321'}).dict() == {'a': 123, 'b': {'a': 321, 'b': None}} def test_self_forward_ref_collection(create_module): @create_module def module(): from typing import Dict, List from pydantic import BaseModel class Foo(BaseModel): a: int = 123 b: 'Foo' = None c: 'List[Foo]' = [] d: 'Dict[str, Foo]' = {} Foo.update_forward_refs() assert module.Foo().dict() == {'a': 123, 'b': None, 'c': [], 'd': {}} assert module.Foo(b={'a': '321'}, c=[{'a': 234}], d={'bar': {'a': 345}}).dict() == { 'a': 123, 'b': {'a': 321, 'b': None, 'c': [], 'd': {}}, 'c': [{'a': 234, 'b': None, 'c': [], 'd': {}}], 'd': {'bar': {'a': 345, 'b': None, 'c': [], 'd': {}}}, } with pytest.raises(ValidationError) as exc_info: module.Foo(b={'a': '321'}, c=[{'b': 234}], d={'bar': {'a': 345}}) assert exc_info.value.errors() == [ {'loc': ('c', 0, 'b'), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'} ] assert module.Foo.__fields__['a'].type_ is int assert module.Foo.__fields__['b'].type_ is module.Foo assert module.Foo.__fields__['b'].outer_type_ is module.Foo assert module.Foo.__fields__['c'].type_ is module.Foo assert module.Foo.__fields__['c'].outer_type_ == List[module.Foo] assert module.Foo.__fields__['d'].type_ is module.Foo assert module.Foo.__fields__['d'].outer_type_ == Dict[str, module.Foo] def test_self_forward_ref_local(create_module): @create_module def module(): from typing import ForwardRef from pydantic import BaseModel def main(): Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None Foo.update_forward_refs() return Foo Foo = module.main() assert Foo().dict() == {'a': 123, 'b': None} assert Foo(b={'a': '321'}).dict() == {'a': 123, 'b': {'a': 321, 'b': None}} def test_missing_update_forward_refs(create_module): @create_module def module(): from typing import ForwardRef from pydantic import BaseModel Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None with pytest.raises(ConfigError) as exc_info: module.Foo(b=123) assert str(exc_info.value).startswith('field "b" not yet prepared so type is still a ForwardRef') def test_forward_ref_dataclass(create_module): @create_module def module(): from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class Dataclass: url: AnyUrl m = module.Dataclass('http://example.com ') assert m.url == 'http://example.com' def test_forward_ref_dataclass_with_future_annotations(create_module): module = create_module( # language=Python """ from __future__ import annotations from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class Dataclass: url: AnyUrl """ ) m = module.Dataclass('http://example.com ') assert m.url == 'http://example.com' def test_forward_ref_sub_types(create_module): @create_module def module(): from typing import ForwardRef, Union from pydantic import BaseModel class Leaf(BaseModel): a: str TreeType = Union[ForwardRef('Node'), Leaf] class Node(BaseModel): value: int left: TreeType right: TreeType Node.update_forward_refs() Node = module.Node Leaf = module.Leaf data = {'value': 3, 'left': {'a': 'foo'}, 'right': {'value': 5, 'left': {'a': 'bar'}, 'right': {'a': 'buzz'}}} node = Node(**data) assert isinstance(node.left, Leaf) assert isinstance(node.right, Node) def test_forward_ref_nested_sub_types(create_module): @create_module def module(): from typing import ForwardRef, Tuple, Union from pydantic import BaseModel class Leaf(BaseModel): a: str TreeType = Union[Union[Tuple[ForwardRef('Node'), str], int], Leaf] class Node(BaseModel): value: int left: TreeType right: TreeType Node.update_forward_refs() Node = module.Node Leaf = module.Leaf data = { 'value': 3, 'left': {'a': 'foo'}, 'right': [{'value': 5, 'left': {'a': 'bar'}, 'right': {'a': 'buzz'}}, 'test'], } node = Node(**data) assert isinstance(node.left, Leaf) assert isinstance(node.right[0], Node) def test_self_reference_json_schema(create_module): @create_module def module(): from typing import List from pydantic import BaseModel class Account(BaseModel): name: str subaccounts: List['Account'] = [] Account.update_forward_refs() Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name'], } }, } def test_self_reference_json_schema_with_future_annotations(create_module): module = create_module( # language=Python """ from __future__ import annotations from typing import List from pydantic import BaseModel class Account(BaseModel): name: str subaccounts: List[Account] = [] Account.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name'], } }, } def test_circular_reference_json_schema(create_module): @create_module def module(): from typing import List from pydantic import BaseModel class Owner(BaseModel): account: 'Account' class Account(BaseModel): name: str owner: 'Owner' subaccounts: List['Account'] = [] Account.update_forward_refs() Owner.update_forward_refs() Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'owner': {'$ref': '#/definitions/Owner'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name', 'owner'], }, 'Owner': { 'title': 'Owner', 'type': 'object', 'properties': {'account': {'$ref': '#/definitions/Account'}}, 'required': ['account'], }, }, } def test_circular_reference_json_schema_with_future_annotations(create_module): module = create_module( # language=Python """ from __future__ import annotations from typing import List from pydantic import BaseModel class Owner(BaseModel): account: Account class Account(BaseModel): name: str owner: Owner subaccounts: List[Account] = [] Account.update_forward_refs() Owner.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'owner': {'$ref': '#/definitions/Owner'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name', 'owner'], }, 'Owner': { 'title': 'Owner', 'type': 'object', 'properties': {'account': {'$ref': '#/definitions/Account'}}, 'required': ['account'], }, }, } def test_forward_ref_with_field(create_module): @create_module def module(): from typing import ForwardRef, List import pytest from pydantic import BaseModel, Field Foo = ForwardRef('Foo') with pytest.raises( ValueError, match='On field "c" the following field constraints are set but not enforced: gt.' ): class Foo(BaseModel): c: List[Foo] = Field(..., gt=0) def test_forward_ref_optional(create_module): module = create_module( # language=Python """ from __future__ import annotations from pydantic import BaseModel, Field from typing import List, Optional class Spec(BaseModel): spec_fields: List[str] = Field(..., alias="fields") filter: Optional[str] sort: Optional[str] class PSpec(Spec): g: Optional[GSpec] class GSpec(Spec): p: Optional[PSpec] PSpec.update_forward_refs() class Filter(BaseModel): g: Optional[GSpec] p: Optional[PSpec] """ ) Filter = module.Filter assert isinstance(Filter(p={'sort': 'some_field:asc', 'fields': []}), Filter) def test_forward_ref_with_create_model(create_module): @create_module def module(): import pydantic Sub = pydantic.create_model('Sub', foo='bar', __module__=__name__) assert Sub # get rid of "local variable 'Sub' is assigned to but never used" Main = pydantic.create_model('Main', sub=('Sub', ...), __module__=__name__) instance = Main(sub={}) assert instance.sub.dict() == {'foo': 'bar'} def test_resolve_forward_ref_dataclass(create_module): module = create_module( # language=Python """ from __future__ import annotations from dataclasses import dataclass from pydantic import BaseModel from pydantic.typing import Literal @dataclass class Base: literal: Literal[1, 2] class What(BaseModel): base: Base """ ) m = module.What(base=module.Base(literal=1)) assert m.base.literal == 1 def test_nested_forward_ref(): class NestedTuple(BaseModel): x: Tuple[int, Optional['NestedTuple']] # noqa: F821 obj = NestedTuple.parse_obj({'x': ('1', {'x': ('2', {'x': ('3', None)})})}) assert obj.dict() == {'x': (1, {'x': (2, {'x': (3, None)})})} def test_discriminated_union_forward_ref(create_module): @create_module def module(): from typing import Union from typing_extensions import Literal from pydantic import BaseModel, Field class Pet(BaseModel): __root__: Union['Cat', 'Dog'] = Field(..., discriminator='type') # noqa: F821 class Cat(BaseModel): type: Literal['cat'] class Dog(BaseModel): type: Literal['dog'] with pytest.raises(ConfigError, match='you might need to call Pet.update_forward_refs()'): module.Pet.parse_obj({'type': 'pika'}) module.Pet.update_forward_refs() with pytest.raises(ValidationError, match="No match for discriminator 'type' and value 'pika'"): module.Pet.parse_obj({'type': 'pika'}) assert module.Pet.schema() == { 'title': 'Pet', 'discriminator': {'propertyName': 'type', 'mapping': {'cat': '#/definitions/Cat', 'dog': '#/definitions/Dog'}}, 'oneOf': [{'$ref': '#/definitions/Cat'}, {'$ref': '#/definitions/Dog'}], 'definitions': { 'Cat': { 'title': 'Cat', 'type': 'object', 'properties': {'type': {'title': 'Type', 'enum': ['cat'], 'type': 'string'}}, 'required': ['type'], }, 'Dog': { 'title': 'Dog', 'type': 'object', 'properties': {'type': {'title': 'Type', 'enum': ['dog'], 'type': 'string'}}, 'required': ['type'], }, }, } def test_class_var_as_string(create_module): module = create_module( # language=Python """ from __future__ import annotations from typing import ClassVar from pydantic import BaseModel class Model(BaseModel): a: ClassVar[int] """ ) assert module.Model.__class_vars__ == {'a'} def test_json_encoder_str(create_module): module = create_module( # language=Python """ from pydantic import BaseModel class User(BaseModel): x: str FooUser = User class User(BaseModel): y: str class Model(BaseModel): foo_user: FooUser user: User class Config: json_encoders = { 'User': lambda v: f'User({v.y})', } """ ) m = module.Model(foo_user={'x': 'user1'}, user={'y': 'user2'}) assert m.json(models_as_dict=False) == '{"foo_user": {"x": "user1"}, "user": "User(user2)"}' def test_json_encoder_forward_ref(create_module): module = create_module( # language=Python """ from pydantic import BaseModel from typing import ForwardRef, List, Optional class User(BaseModel): name: str friends: Optional[List['User']] = None class Config: json_encoders = { ForwardRef('User'): lambda v: f'User({v.name})', } """ ) m = module.User(name='anne', friends=[{'name': 'ben'}, {'name': 'charlie'}]) assert m.json(models_as_dict=False) == '{"name": "anne", "friends": ["User(ben)", "User(charlie)"]}' skip_pep585 = pytest.mark.skipif( sys.version_info < (3, 9), reason='PEP585 generics only supported for python 3.9 and above' ) @skip_pep585 def test_pep585_self_referencing_generics(): class SelfReferencing(BaseModel): names: list['SelfReferencing'] # noqa: F821 SelfReferencing.update_forward_refs() # will raise an exception if the forward ref isn't resolvable # test the class assert SelfReferencing.__fields__['names'].type_ is SelfReferencing # NOTE: outer_type_ is not converted assert SelfReferencing.__fields__['names'].outer_type_ == list['SelfReferencing'] # test that object creation works obj = SelfReferencing(names=[SelfReferencing(names=[])]) assert obj.names == [SelfReferencing(names=[])] @skip_pep585 def test_pep585_recursive_generics(create_module): @create_module def module(): from pydantic import BaseModel class Team(BaseModel): name: str heroes: list['Hero'] # noqa: F821 class Hero(BaseModel): name: str teams: list[Team] Team.update_forward_refs() assert module.Team.__fields__['heroes'].type_ is module.Hero assert module.Hero.__fields__['teams'].type_ is module.Team module.Hero(name='Ivan', teams=[module.Team(name='TheBest', heroes=[])]) @pytest.mark.skipif(sys.version_info < (3, 9), reason='needs 3.9 or newer') def test_class_var_forward_ref(create_module): # see #3679 create_module( # language=Python """ from __future__ import annotations from typing import ClassVar from pydantic import BaseModel class WithClassVar(BaseModel): Instances: ClassVar[dict[str, WithClassVar]] = {} """ ) pydantic-1.10.14/tests/test_generics.py000066400000000000000000001233441455251250200200530ustar00rootroot00000000000000import gc import itertools import json import sys from enum import Enum from typing import ( Any, Callable, ClassVar, Dict, FrozenSet, Generic, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, ) import pytest from typing_extensions import Annotated, Literal from pydantic import BaseModel, Field, Json, ValidationError, create_model, root_validator, validator from pydantic.generics import ( GenericModel, _assigned_parameters, _generic_types_cache, iter_contained_typevars, replace_types, ) @pytest.fixture(autouse=True) def clean_cache(): gc.collect() # cleans up _generic_types_cache for checking item counts in the cache def test_generic_name(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type if sys.version_info >= (3, 9): assert Result[list[int]].__name__ == 'Result[list[int]]' assert Result[List[int]].__name__ == 'Result[List[int]]' assert Result[int].__name__ == 'Result[int]' def test_double_parameterize_error(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type with pytest.raises(TypeError) as exc_info: Result[int][int] assert str(exc_info.value) == 'Cannot parameterize a concrete instantiation of a generic model' def test_value_validation(): T = TypeVar('T') class Response(GenericModel, Generic[T]): data: T @validator('data', each_item=True) def validate_value_nonzero(cls, v): if v == 0: raise ValueError('value is zero') return v @root_validator() def validate_sum(cls, values): if sum(values.get('data', {}).values()) > 5: raise ValueError('sum too large') return values assert Response[Dict[int, int]](data={1: '4'}).dict() == {'data': {1: 4}} with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 'a'}) assert exc_info.value.errors() == [ {'loc': ('data', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 0}) assert exc_info.value.errors() == [{'loc': ('data', 1), 'msg': 'value is zero', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 3, 2: 6}) assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'sum too large', 'type': 'value_error'}] def test_methods_are_inherited(): class CustomGenericModel(GenericModel): def method(self): return self.data T = TypeVar('T') class Model(CustomGenericModel, Generic[T]): data: T instance = Model[int](data=1) assert instance.method() == 1 def test_config_is_inherited(): class CustomGenericModel(GenericModel): class Config: allow_mutation = False T = TypeVar('T') class Model(CustomGenericModel, Generic[T]): data: T instance = Model[int](data=1) with pytest.raises(TypeError) as exc_info: instance.data = 2 assert str(exc_info.value) == '"Model[int]" is immutable and does not support item assignment' def test_default_argument(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other: bool = True result = Result[int](data=1) assert result.other is True def test_default_argument_for_typevar(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T = 4 result = Result[int]() assert result.data == 4 result = Result[float]() assert result.data == 4 result = Result[int](data=1) assert result.data == 1 def test_classvar(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other: ClassVar[int] = 1 assert Result.other == 1 assert Result[int].other == 1 assert Result[int](data=1).other == 1 assert 'other' not in Result.__fields__ def test_non_annotated_field(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other = True assert 'other' in Result.__fields__ assert 'other' in Result[int].__fields__ result = Result[int](data=1) assert result.other is True def test_must_inherit_from_generic(): with pytest.raises(TypeError) as exc_info: class Result(GenericModel): pass Result[int] assert str(exc_info.value) == 'Type Result must inherit from typing.Generic before being parameterized' def test_parameters_placed_on_generic(): T = TypeVar('T') with pytest.raises(TypeError, match='Type parameters should be placed on typing.Generic, not GenericModel'): class Result(GenericModel[T]): pass def test_parameters_must_be_typevar(): with pytest.raises(TypeError, match='Type GenericModel must inherit from typing.Generic before being '): class Result(GenericModel[int]): pass def test_subclass_can_be_genericized(): T = TypeVar('T') class Result(GenericModel, Generic[T]): pass Result[T] def test_parameter_count(): T = TypeVar('T') S = TypeVar('S') class Model(GenericModel, Generic[T, S]): x: T y: S with pytest.raises(TypeError) as exc_info: Model[int, int, int] assert str(exc_info.value) == 'Too many parameters for Model; actual 3, expected 2' with pytest.raises(TypeError) as exc_info: Model[int] assert str(exc_info.value) == 'Too few parameters for Model; actual 1, expected 2' def test_cover_cache(): cache_size = len(_generic_types_cache) T = TypeVar('T') class Model(GenericModel, Generic[T]): x: T models = [] # keep references to models to get cache size models.append(Model[int]) # adds both with-tuple and without-tuple version to cache assert len(_generic_types_cache) == cache_size + 2 models.append(Model[int]) # uses the cache assert len(_generic_types_cache) == cache_size + 2 del models def test_cache_keys_are_hashable(): cache_size = len(_generic_types_cache) T = TypeVar('T') C = Callable[[str, Dict[str, Any]], Iterable[str]] class MyGenericModel(GenericModel, Generic[T]): t: T # Callable's first params get converted to a list, which is not hashable. # Make sure we can handle that special case Simple = MyGenericModel[Callable[[int], str]] models = [] # keep references to models to get cache size models.append(Simple) assert len(_generic_types_cache) == cache_size + 2 # Nested Callables models.append(MyGenericModel[Callable[[C], Iterable[str]]]) assert len(_generic_types_cache) == cache_size + 4 models.append(MyGenericModel[Callable[[Simple], Iterable[int]]]) assert len(_generic_types_cache) == cache_size + 6 models.append(MyGenericModel[Callable[[MyGenericModel[C]], Iterable[int]]]) assert len(_generic_types_cache) == cache_size + 10 class Model(BaseModel): x: MyGenericModel[Callable[[C], Iterable[str]]] = Field(...) models.append(Model) assert len(_generic_types_cache) == cache_size + 10 del models def test_caches_get_cleaned_up(): types_cache_size = len(_generic_types_cache) params_cache_size = len(_assigned_parameters) T = TypeVar('T') class MyGenericModel(GenericModel, Generic[T]): x: T Model = MyGenericModel[int] assert len(_generic_types_cache) == types_cache_size + 2 assert len(_assigned_parameters) == params_cache_size + 1 del Model gc.collect() assert len(_generic_types_cache) == types_cache_size assert len(_assigned_parameters) == params_cache_size def test_generics_work_with_many_parametrized_base_models(): cache_size = len(_generic_types_cache) params_size = len(_assigned_parameters) count_create_models = 1000 T = TypeVar('T') C = TypeVar('C') class A(GenericModel, Generic[T, C]): x: T y: C class B(A[int, C], GenericModel, Generic[C]): pass models = [create_model(f'M{i}') for i in range(count_create_models)] generics = [] for m in models: Working = B[m] generics.append(Working) assert len(_generic_types_cache) == cache_size + count_create_models * 5 + 1 assert len(_assigned_parameters) == params_size + count_create_models * 3 + 1 del models del generics def test_generic_config(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type class Config: allow_mutation = False result = Result[int](data=1) assert result.data == 1 with pytest.raises(TypeError): result.data = 2 def test_enum_generic(): T = TypeVar('T') class MyEnum(Enum): x = 1 y = 2 class Model(GenericModel, Generic[T]): enum: T Model[MyEnum](enum=MyEnum.x) Model[MyEnum](enum=2) def test_generic(): data_type = TypeVar('data_type') error_type = TypeVar('error_type') class Result(GenericModel, Generic[data_type, error_type]): data: Optional[List[data_type]] error: Optional[error_type] positive_number: int @validator('error', always=True) def validate_error(cls, v: Optional[error_type], values: Dict[str, Any]) -> Optional[error_type]: if values.get('data', None) is None and v is None: raise ValueError('Must provide data or error') if values.get('data', None) is not None and v is not None: raise ValueError('Must not provide both data and error') return v @validator('positive_number') def validate_positive_number(cls, v: int) -> int: if v < 0: raise ValueError return v class Error(BaseModel): message: str class Data(BaseModel): number: int text: str success1 = Result[Data, Error](data=[Data(number=1, text='a')], positive_number=1) assert success1.dict() == {'data': [{'number': 1, 'text': 'a'}], 'error': None, 'positive_number': 1} assert repr(success1) == "Result[Data, Error](data=[Data(number=1, text='a')], error=None, positive_number=1)" success2 = Result[Data, Error](error=Error(message='error'), positive_number=1) assert success2.dict() == {'data': None, 'error': {'message': 'error'}, 'positive_number': 1} assert repr(success2) == "Result[Data, Error](data=None, error=Error(message='error'), positive_number=1)" with pytest.raises(ValidationError) as exc_info: Result[Data, Error](error=Error(message='error'), positive_number=-1) assert exc_info.value.errors() == [{'loc': ('positive_number',), 'msg': '', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Result[Data, Error](data=[Data(number=1, text='a')], error=Error(message='error'), positive_number=1) assert exc_info.value.errors() == [ {'loc': ('error',), 'msg': 'Must not provide both data and error', 'type': 'value_error'} ] with pytest.raises(ValidationError) as exc_info: Result[Data, Error](data=[Data(number=1, text='a')], error=Error(message='error'), positive_number=1) assert exc_info.value.errors() == [ {'loc': ('error',), 'msg': 'Must not provide both data and error', 'type': 'value_error'} ] def test_alongside_concrete_generics(): from pydantic.generics import GenericModel T = TypeVar('T') class MyModel(GenericModel, Generic[T]): item: T metadata: Dict[str, Any] model = MyModel[int](item=1, metadata={}) assert model.item == 1 assert model.metadata == {} def test_complex_nesting(): from pydantic.generics import GenericModel T = TypeVar('T') class MyModel(GenericModel, Generic[T]): item: List[Dict[Union[int, T], str]] item = [{1: 'a', 'a': 'a'}] model = MyModel[str](item=item) assert model.item == item def test_required_value(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: int with pytest.raises(ValidationError) as exc_info: MyModel[int]() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_optional_value(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: Optional[int] = 1 model = MyModel[int]() assert model.dict() == {'a': 1} def test_custom_schema(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: int = Field(1, description='Custom') schema = MyModel[int].schema() assert schema['properties']['a'].get('description') == 'Custom' def test_child_schema(): T = TypeVar('T') class Model(GenericModel, Generic[T]): a: T class Child(Model[T], Generic[T]): pass schema = Child[int].schema() assert schema == { 'title': 'Child[int]', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'integer'}}, 'required': ['a'], } def test_custom_generic_naming(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): value: Optional[T] @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: param_names = [param.__name__ if hasattr(param, '__name__') else str(param) for param in params] title = param_names[0].title() return f'Optional{title}Wrapper' assert repr(MyModel[int](value=1)) == 'OptionalIntWrapper(value=1)' assert repr(MyModel[str](value=None)) == 'OptionalStrWrapper(value=None)' def test_nested(): AT = TypeVar('AT') class InnerT(GenericModel, Generic[AT]): a: AT inner_int = InnerT[int](a=8) inner_str = InnerT[str](a='ate') inner_dict_any = InnerT[Any](a={}) inner_int_any = InnerT[Any](a=7) class OuterT_SameType(GenericModel, Generic[AT]): i: InnerT[AT] OuterT_SameType[int](i=inner_int) OuterT_SameType[str](i=inner_str) OuterT_SameType[int](i=inner_int_any) # ensure parsing the broader inner type works with pytest.raises(ValidationError) as exc_info: OuterT_SameType[int](i=inner_str) assert exc_info.value.errors() == [ {'loc': ('i', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: OuterT_SameType[int](i=inner_dict_any) assert exc_info.value.errors() == [ {'loc': ('i', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_partial_specification(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT partial_model = Model[int, BT] concrete_model = partial_model[str] concrete_model(a=1, b='abc') with pytest.raises(ValidationError) as exc_info: concrete_model(a='abc', b=None) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, ] def test_partial_specification_with_inner_typevar(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: List[AT] b: List[BT] partial_model = Model[str, BT] assert partial_model.__concrete__ is False concrete_model = partial_model[int] assert concrete_model.__concrete__ is True # nested resolution of partial models should work as expected nested_resolved = concrete_model(a=[123], b=['456']) assert nested_resolved.a == ['123'] assert nested_resolved.b == [456] def test_partial_specification_name(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT partial_model = Model[int, BT] assert partial_model.__name__ == 'Model[int, BT]' concrete_model = partial_model[str] assert concrete_model.__name__ == 'Model[int, BT][str]' def test_partial_specification_instantiation(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT partial_model = Model[int, BT] partial_model(a=1, b=2) partial_model(a=1, b='a') with pytest.raises(ValidationError) as exc_info: partial_model(a='a', b=2) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_partial_specification_instantiation_bounded(): AT = TypeVar('AT') BT = TypeVar('BT', bound=int) class Model(GenericModel, Generic[AT, BT]): a: AT b: BT Model(a=1, b=1) with pytest.raises(ValidationError) as exc_info: Model(a=1, b='a') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] partial_model = Model[int, BT] partial_model(a=1, b=1) with pytest.raises(ValidationError) as exc_info: partial_model(a=1, b='a') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_typevar_parametrization(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT CT = TypeVar('CT', bound=int) DT = TypeVar('DT', bound=int) with pytest.raises(ValidationError) as exc_info: Model[CT, DT](a='a', b='b') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_multiple_specification(): AT = TypeVar('AT') BT = TypeVar('BT') class Model(GenericModel, Generic[AT, BT]): a: AT b: BT CT = TypeVar('CT') partial_model = Model[CT, CT] concrete_model = partial_model[str] with pytest.raises(ValidationError) as exc_info: concrete_model(a=None, b=None) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, {'loc': ('b',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, ] def test_generic_subclass_of_concrete_generic(): T = TypeVar('T') U = TypeVar('U') class GenericBaseModel(GenericModel, Generic[T]): data: T class GenericSub(GenericBaseModel[int], Generic[U]): extra: U ConcreteSub = GenericSub[int] with pytest.raises(ValidationError): ConcreteSub(data=2, extra='wrong') with pytest.raises(ValidationError): ConcreteSub(data='wrong', extra=2) ConcreteSub(data=2, extra=3) def test_generic_model_pickle(create_module): # Using create_module because pickle doesn't support # objects with in their __qualname__ (e. g. defined in function) @create_module def module(): import pickle from typing import Generic, TypeVar from pydantic import BaseModel from pydantic.generics import GenericModel t = TypeVar('t') class Model(BaseModel): a: float b: int = 10 class MyGeneric(GenericModel, Generic[t]): value: t original = MyGeneric[Model](value=Model(a='24')) dumped = pickle.dumps(original) loaded = pickle.loads(dumped) assert loaded.value.a == original.value.a == 24 assert loaded.value.b == original.value.b == 10 assert loaded == original def test_generic_model_from_function_pickle_fail(create_module): @create_module def module(): import pickle from typing import Generic, TypeVar import pytest from pydantic import BaseModel from pydantic.generics import GenericModel t = TypeVar('t') class Model(BaseModel): a: float b: int = 10 class MyGeneric(GenericModel, Generic[t]): value: t def get_generic(t): return MyGeneric[t] original = get_generic(Model)(value=Model(a='24')) with pytest.raises(pickle.PicklingError): pickle.dumps(original) def test_generic_model_redefined_without_cache_fail(create_module, monkeypatch): # match identity checker otherwise we never get to the redefinition check monkeypatch.setattr('pydantic.generics.all_identical', lambda left, right: False) @create_module def module(): from typing import Generic, TypeVar from pydantic import BaseModel from pydantic.generics import GenericModel, _generic_types_cache t = TypeVar('t') class MyGeneric(GenericModel, Generic[t]): value: t class Model(BaseModel): ... concrete = MyGeneric[Model] _generic_types_cache.clear() second_concrete = MyGeneric[Model] class Model(BaseModel): # same name, but type different, so it's not in cache ... third_concrete = MyGeneric[Model] assert concrete is not second_concrete assert concrete is not third_concrete assert second_concrete is not third_concrete assert globals()['MyGeneric[Model]'] is concrete assert globals()['MyGeneric[Model]_'] is second_concrete assert globals()['MyGeneric[Model]__'] is third_concrete def test_generic_model_caching_detect_order_of_union_args_basic(create_module): # Basic variant of https://github.com/pydantic/pydantic/issues/4474 @create_module def module(): from typing import Generic, TypeVar, Union from pydantic.generics import GenericModel t = TypeVar('t') class Model(GenericModel, Generic[t]): data: t int_or_float_model = Model[Union[int, float]] float_or_int_model = Model[Union[float, int]] assert type(int_or_float_model(data='1').data) is int assert type(float_or_int_model(data='1').data) is float @pytest.mark.skipif(sys.version_info < (3, 10), reason='pep-604 syntax (Ex.: list | int) was added in python3.10') def test_generic_model_caching_detect_order_of_union_args_basic_with_pep_604_syntax(create_module): # Basic variant of https://github.com/pydantic/pydantic/issues/4474 with pep-604 syntax. @create_module def module(): from typing import Generic, TypeVar from pydantic.generics import GenericModel t = TypeVar('t') class Model(GenericModel, Generic[t]): data: t int_or_float_model = Model[int | float] float_or_int_model = Model[float | int] assert type(int_or_float_model(data='1').data) is int assert type(float_or_int_model(data='1').data) is float @pytest.mark.skip( reason=""" Depends on similar issue in CPython itself: https://github.com/python/cpython/issues/86483 Documented and skipped for possible fix later. """ ) def test_generic_model_caching_detect_order_of_union_args_nested(create_module): # Nested variant of https://github.com/pydantic/pydantic/issues/4474 @create_module def module(): from typing import Generic, List, TypeVar, Union from pydantic.generics import GenericModel t = TypeVar('t') class Model(GenericModel, Generic[t]): data: t int_or_float_model = Model[List[Union[int, float]]] float_or_int_model = Model[List[Union[float, int]]] assert type(int_or_float_model(data=['1']).data[0]) is int assert type(float_or_int_model(data=['1']).data[0]) is float def test_get_caller_frame_info(create_module): @create_module def module(): from pydantic.generics import get_caller_frame_info def function(): assert get_caller_frame_info() == (__name__, True) another_function() def another_function(): assert get_caller_frame_info() == (__name__, False) third_function() def third_function(): assert get_caller_frame_info() == (__name__, False) function() def test_get_caller_frame_info_called_from_module(create_module): @create_module def module(): from unittest.mock import patch import pytest from pydantic.generics import get_caller_frame_info with pytest.raises(RuntimeError, match='This function must be used inside another function'): with patch('sys._getframe', side_effect=ValueError('getframe_exc')): get_caller_frame_info() def test_get_caller_frame_info_when_sys_getframe_undefined(): from pydantic.generics import get_caller_frame_info getframe = sys._getframe del sys._getframe try: assert get_caller_frame_info() == (None, False) finally: # just to make sure we always setting original attribute back sys._getframe = getframe def test_iter_contained_typevars(): T = TypeVar('T') T2 = TypeVar('T2') class Model(GenericModel, Generic[T]): a: T assert list(iter_contained_typevars(Model[T])) == [T] assert list(iter_contained_typevars(Optional[List[Union[str, Model[T]]]])) == [T] assert list(iter_contained_typevars(Optional[List[Union[str, Model[int]]]])) == [] assert list(iter_contained_typevars(Optional[List[Union[str, Model[T], Callable[[T2, T], str]]]])) == [T, T2, T] def test_nested_identity_parameterization(): T = TypeVar('T') T2 = TypeVar('T2') class Model(GenericModel, Generic[T]): a: T assert Model[T][T][T] is Model assert Model[T] is Model assert Model[T2] is not Model def test_replace_types(): T = TypeVar('T') class Model(GenericModel, Generic[T]): a: T assert replace_types(T, {T: int}) is int assert replace_types(List[Union[str, list, T]], {T: int}) == List[Union[str, list, int]] assert replace_types(Callable, {T: int}) == Callable assert replace_types(Callable[[int, str, T], T], {T: int}) == Callable[[int, str, int], int] assert replace_types(T, {}) is T assert replace_types(Model[List[T]], {T: int}) == Model[List[T]][int] assert replace_types(T, {}) is T assert replace_types(Type[T], {T: int}) == Type[int] assert replace_types(Model[T], {T: T}) == Model[T] if sys.version_info >= (3, 9): # Check generic aliases (subscripted builtin types) to make sure they # resolve correctly (don't get translated to typing versions for # example) assert replace_types(list[Union[str, list, T]], {T: int}) == list[Union[str, list, int]] @pytest.mark.skipif(sys.version_info < (3, 10), reason='pep-604 syntax (Ex.: list | int) was added in python3.10') def test_replace_types_with_pep_604_syntax() -> None: T = TypeVar('T') class Model(GenericModel, Generic[T]): a: T assert replace_types(T | None, {T: int}) == int | None assert replace_types(T | int | str, {T: float}) == float | int | str assert replace_types(list[T] | None, {T: int}) == list[int] | None assert replace_types(List[str | list | T], {T: int}) == List[str | list | int] assert replace_types(list[str | list | T], {T: int}) == list[str | list | int] assert replace_types(list[str | list | list[T]], {T: int}) == list[str | list | list[int]] assert replace_types(list[Model[T] | None] | None, {T: T}) == list[Model[T] | None] | None assert ( replace_types(T | list[T | list[T | list[T | None] | None] | None] | None, {T: int}) == int | list[int | list[int | list[int | None] | None] | None] | None ) assert replace_types(list[list[list[T | None]]], {T: int}) == list[list[list[int | None]]] def test_replace_types_with_user_defined_generic_type_field(): """Test that using user defined generic types as generic model fields are handled correctly.""" T = TypeVar('T') KT = TypeVar('KT') VT = TypeVar('VT') class GenericMapping(Mapping[KT, VT]): pass class GenericList(List[T]): pass class Model(GenericModel, Generic[T, KT, VT]): map_field: GenericMapping[KT, VT] list_field: GenericList[T] assert replace_types(Model, {T: bool, KT: str, VT: int}) == Model[bool, str, int] assert replace_types(Model[T, KT, VT], {T: bool, KT: str, VT: int}) == Model[bool, str, int] assert replace_types(Model[T, VT, KT], {T: bool, KT: str, VT: int}) == Model[T, VT, KT][bool, int, str] def test_replace_types_identity_on_unchanged(): T = TypeVar('T') U = TypeVar('U') type_ = List[Union[str, Callable[[list], Optional[str]], U]] assert replace_types(type_, {T: int}) is type_ def test_deep_generic(): T = TypeVar('T') S = TypeVar('S') R = TypeVar('R') class OuterModel(GenericModel, Generic[T, S, R]): a: Dict[R, Optional[List[T]]] b: Optional[Union[S, R]] c: R d: float class InnerModel(GenericModel, Generic[T, R]): c: T d: R class NormalModel(BaseModel): e: int f: str inner_model = InnerModel[int, str] generic_model = OuterModel[inner_model, NormalModel, int] inner_models = [inner_model(c=1, d='a')] generic_model(a={1: inner_models, 2: None}, b=None, c=1, d=1.5) generic_model(a={}, b=NormalModel(e=1, f='a'), c=1, d=1.5) generic_model(a={}, b=1, c=1, d=1.5) assert InnerModel.__concrete__ is False assert inner_model.__concrete__ is True @pytest.mark.skipif(sys.version_info < (3, 10), reason='pep-604 syntax (Ex.: list | int) was added in python3.10') def test_wrapping_resolved_generic_with_pep_604_syntax() -> None: T = TypeVar('T') class InnerModel(GenericModel, Generic[T]): generic: list[T] | None class OuterModel(BaseModel): wrapper: InnerModel[int] with pytest.raises(ValidationError): OuterModel(wrapper={'generic': ['string_instead_of_int']}) assert OuterModel(wrapper={'generic': [1]}).dict() == {'wrapper': {'generic': [1]}} @pytest.mark.skipif(sys.version_info < (3, 10), reason='pep-604 syntax (Ex.: list | int) was added in python3.10') def test_type_propagation_in_deep_generic_with_pep_604_syntax() -> None: T = TypeVar('T') class InnerModel(GenericModel, Generic[T]): generic: list[T] | None class OuterModel(GenericModel, Generic[T]): wrapper: InnerModel[T] | None with pytest.raises(ValidationError): OuterModel[int](wrapper={'generic': ['string_instead_of_int']}) assert OuterModel[int](wrapper={'generic': [1]}) == {'wrapper': {'generic': [1]}} @pytest.mark.skipif(sys.version_info < (3, 10), reason='pep-604 syntax (Ex.: list | int) was added in python3.10') def test_deep_generic_with_pep_604_syntax() -> None: T = TypeVar('T') S = TypeVar('S') R = TypeVar('R') class OuterModel(GenericModel, Generic[T, S, R]): a: Dict[R, list[T] | None] b: S | R | None c: R d: float class InnerModel(GenericModel, Generic[T, R]): c: list[T] | None d: list[R] | None class NormalModel(BaseModel): e: int f: str inner_model = InnerModel[int, str] generic_model = OuterModel[inner_model, NormalModel, int] inner_models = [inner_model(c=[1], d=['a'])] generic_model(a={1: inner_models, 2: None}, b=None, c=1, d=1.5) generic_model(a={}, b=NormalModel(e=1, f='a'), c=1, d=1.5) generic_model(a={}, b=1, c=1, d=1.5) assert InnerModel.__concrete__ is False assert inner_model.__concrete__ is True def test_deep_generic_with_inner_typevar(): T = TypeVar('T') class OuterModel(GenericModel, Generic[T]): a: List[T] class InnerModel(OuterModel[T], Generic[T]): pass assert InnerModel[int].__concrete__ is True assert InnerModel.__concrete__ is False with pytest.raises(ValidationError): InnerModel[int](a=['wrong']) assert InnerModel[int](a=['1']).a == [1] def test_deep_generic_with_referenced_generic(): T = TypeVar('T') R = TypeVar('R') class ReferencedModel(GenericModel, Generic[R]): a: R class OuterModel(GenericModel, Generic[T]): a: ReferencedModel[T] class InnerModel(OuterModel[T], Generic[T]): pass assert InnerModel[int].__concrete__ is True assert InnerModel.__concrete__ is False with pytest.raises(ValidationError): InnerModel[int](a={'a': 'wrong'}) assert InnerModel[int](a={'a': 1}).a.a == 1 def test_deep_generic_with_referenced_inner_generic(): T = TypeVar('T') class ReferencedModel(GenericModel, Generic[T]): a: T class OuterModel(GenericModel, Generic[T]): a: Optional[List[Union[ReferencedModel[T], str]]] class InnerModel(OuterModel[T], Generic[T]): pass assert InnerModel[int].__concrete__ is True assert InnerModel.__concrete__ is False with pytest.raises(ValidationError): InnerModel[int](a=['s', {'a': 'wrong'}]) assert InnerModel[int](a=['s', {'a': 1}]).a[1].a == 1 assert InnerModel[int].__fields__['a'].outer_type_ == List[Union[ReferencedModel[int], str]] assert (InnerModel[int].__fields__['a'].sub_fields[0].sub_fields[0].outer_type_.__fields__['a'].outer_type_) == int def test_deep_generic_with_multiple_typevars(): T = TypeVar('T') U = TypeVar('U') class OuterModel(GenericModel, Generic[T]): data: List[T] class InnerModel(OuterModel[T], Generic[U, T]): extra: U ConcreteInnerModel = InnerModel[int, float] assert ConcreteInnerModel.__fields__['data'].outer_type_ == List[float] assert ConcreteInnerModel.__fields__['extra'].outer_type_ == int assert ConcreteInnerModel(data=['1'], extra='2').dict() == {'data': [1.0], 'extra': 2} def test_deep_generic_with_multiple_inheritance(): K = TypeVar('K') V = TypeVar('V') T = TypeVar('T') class OuterModelA(GenericModel, Generic[K, V]): data: Dict[K, V] class OuterModelB(GenericModel, Generic[T]): stuff: List[T] class InnerModel(OuterModelA[K, V], OuterModelB[T], Generic[K, V, T]): extra: int ConcreteInnerModel = InnerModel[int, float, str] assert ConcreteInnerModel.__fields__['data'].outer_type_ == Dict[int, float] assert ConcreteInnerModel.__fields__['stuff'].outer_type_ == List[str] assert ConcreteInnerModel.__fields__['extra'].outer_type_ == int ConcreteInnerModel(data={1.1: '5'}, stuff=[123], extra=5).dict() == { 'data': {1: 5}, 'stuff': ['123'], 'extra': 5, } def test_generic_with_referenced_generic_type_1(): T = TypeVar('T') class ModelWithType(GenericModel, Generic[T]): # Type resolves to type origin of "type" which is non-subscriptible for # python < 3.9 so we want to make sure it works for other versions some_type: Type[T] class ReferenceModel(GenericModel, Generic[T]): abstract_base_with_type: ModelWithType[T] ReferenceModel[int] def test_generic_with_referenced_nested_typevar(): T = TypeVar('T') class ModelWithType(GenericModel, Generic[T]): # Type resolves to type origin of "collections.abc.Sequence" which is # non-subscriptible for # python < 3.9 so we want to make sure it works for other versions some_type: Sequence[T] class ReferenceModel(GenericModel, Generic[T]): abstract_base_with_type: ModelWithType[T] ReferenceModel[int] def test_generic_with_callable(): T = TypeVar('T') class Model(GenericModel, Generic[T]): # Callable is a test for any type that accepts a list as an argument some_callable: Callable[[Optional[int], T], None] Model[str].__concrete__ is True Model.__concrete__ is False def test_generic_with_partial_callable(): T = TypeVar('T') U = TypeVar('U') class Model(GenericModel, Generic[T, U]): t: T u: U # Callable is a test for any type that accepts a list as an argument some_callable: Callable[[Optional[int], str], None] Model[str, U].__concrete__ is False Model[str, U].__parameters__ == [U] Model[str, int].__concrete__ is False def test_generic_recursive_models(create_module): @create_module def module(): from typing import Generic, TypeVar, Union from pydantic.generics import GenericModel T = TypeVar('T') class Model1(GenericModel, Generic[T]): ref: 'Model2[T]' class Model2(GenericModel, Generic[T]): ref: Union[T, Model1[T]] Model1.update_forward_refs() Model1 = module.Model1 Model2 = module.Model2 result = Model1[str].parse_obj(dict(ref=dict(ref=dict(ref=dict(ref=123))))) assert result == Model1(ref=Model2(ref=Model1(ref=Model2(ref='123')))) def test_generic_enum(): T = TypeVar('T') class SomeGenericModel(GenericModel, Generic[T]): some_field: T class SomeStringEnum(str, Enum): A = 'A' B = 'B' class MyModel(BaseModel): my_gen: SomeGenericModel[SomeStringEnum] m = MyModel.parse_obj({'my_gen': {'some_field': 'A'}}) assert m.my_gen.some_field is SomeStringEnum.A def test_generic_literal(): FieldType = TypeVar('FieldType') ValueType = TypeVar('ValueType') class GModel(GenericModel, Generic[FieldType, ValueType]): field: Dict[FieldType, ValueType] Fields = Literal['foo', 'bar'] m = GModel[Fields, str](field={'foo': 'x'}) assert m.dict() == {'field': {'foo': 'x'}} def test_generic_enums(): T = TypeVar('T') class GModel(GenericModel, Generic[T]): x: T class EnumA(str, Enum): a = 'a' class EnumB(str, Enum): b = 'b' class Model(BaseModel): g_a: GModel[EnumA] g_b: GModel[EnumB] assert set(Model.schema()['definitions']) == {'EnumA', 'EnumB', 'GModel_EnumA_', 'GModel_EnumB_'} def test_generic_with_user_defined_generic_field(): T = TypeVar('T') class GenericList(List[T]): pass class Model(GenericModel, Generic[T]): field: GenericList[T] model = Model[int](field=[5]) assert model.field[0] == 5 with pytest.raises(ValidationError): model = Model[int](field=['a']) def test_generic_annotated(): T = TypeVar('T') class SomeGenericModel(GenericModel, Generic[T]): some_field: Annotated[T, Field(alias='the_alias')] SomeGenericModel[str](the_alias='qwe') def test_generic_subclass(): T = TypeVar('T') class A(GenericModel, Generic[T]): ... class B(A[T], Generic[T]): ... assert B[int].__name__ == 'B[int]' assert issubclass(B[int], B) assert issubclass(B[int], A[int]) assert not issubclass(B[int], A[str]) def test_generic_subclass_with_partial_application(): T = TypeVar('T') S = TypeVar('S') class A(GenericModel, Generic[T]): ... class B(A[S], Generic[T, S]): ... PartiallyAppliedB = B[str, T] assert issubclass(PartiallyAppliedB[int], A[int]) assert not issubclass(PartiallyAppliedB[int], A[str]) assert not issubclass(PartiallyAppliedB[str], A[int]) def test_multilevel_generic_binding(): T = TypeVar('T') S = TypeVar('S') class A(GenericModel, Generic[T, S]): ... class B(A[str, T], Generic[T]): ... assert B[int].__name__ == 'B[int]' assert issubclass(B[int], A[str, int]) assert not issubclass(B[str], A[str, int]) def test_generic_subclass_with_extra_type(): T = TypeVar('T') S = TypeVar('S') class A(GenericModel, Generic[T]): ... class B(A[S], Generic[T, S]): ... assert B[int, str].__name__ == 'B[int, str]', B[int, str].__name__ assert issubclass(B[str, int], B) assert issubclass(B[str, int], A[int]) assert not issubclass(B[int, str], A[int]) def test_multi_inheritance_generic_binding(): T = TypeVar('T') class A(GenericModel, Generic[T]): ... class B(A[int], Generic[T]): ... class C(B[str], Generic[T]): ... assert C[float].__name__ == 'C[float]' assert issubclass(C[float], B[str]) assert not issubclass(C[float], B[int]) assert issubclass(C[float], A[int]) assert not issubclass(C[float], A[str]) def test_parse_generic_json(): T = TypeVar('T') class MessageWrapper(GenericModel, Generic[T]): message: Json[T] class Payload(BaseModel): payload_field: str raw = json.dumps({'payload_field': 'payload'}) record = MessageWrapper[Payload](message=raw) assert isinstance(record.message, Payload) schema = record.schema() assert schema['properties'] == {'message': {'$ref': '#/definitions/Payload'}} assert schema['definitions']['Payload'] == { 'title': 'Payload', 'type': 'object', 'properties': {'payload_field': {'title': 'Payload Field', 'type': 'string'}}, 'required': ['payload_field'], } def memray_limit_memory(limit): if '--memray' in sys.argv: return pytest.mark.limit_memory(limit) else: return pytest.mark.skip(reason='memray not enabled') @memray_limit_memory('100 MB') def test_generics_memory_use(): """See: - https://github.com/pydantic/pydantic/issues/3829 - https://github.com/pydantic/pydantic/pull/4083 - https://github.com/pydantic/pydantic/pull/5052 """ T = TypeVar('T') U = TypeVar('U') V = TypeVar('V') class MyModel(GenericModel, Generic[T, U, V]): message: Json[T] field: Dict[U, V] class Outer(GenericModel, Generic[T]): inner: T types = [ int, str, float, bool, bytes, ] containers = [ List, Tuple, Set, FrozenSet, ] all = [*types, *[container[tp] for container in containers for tp in types]] total = list(itertools.product(all, all, all)) for t1, t2, t3 in total: class Foo(MyModel[t1, t2, t3]): pass class _(Outer[Foo]): pass pydantic-1.10.14/tests/test_hypothesis_plugin.py000066400000000000000000000107101455251250200220210ustar00rootroot00000000000000import typing from datetime import date import pytest import pydantic from pydantic.networks import import_email_validator try: from hypothesis import HealthCheck, given, settings, strategies as st except ImportError: from unittest import mock given = settings = lambda *a, **kw: (lambda f: f) # pass-through decorator HealthCheck = st = mock.Mock() pytestmark = pytest.mark.skipif(True, reason='"hypothesis" not installed') def gen_models(): class MiscModel(pydantic.BaseModel): # Each of these models contains a few related fields; the idea is that # if there's a bug we have neither too many fields to dig through nor # too many models to read. obj: pydantic.PyObject color: pydantic.color.Color json_any: pydantic.Json class StringsModel(pydantic.BaseModel): card: pydantic.PaymentCardNumber secbytes: pydantic.SecretBytes secstr: pydantic.SecretStr class UUIDsModel(pydantic.BaseModel): uuid1: pydantic.UUID1 uuid3: pydantic.UUID3 uuid4: pydantic.UUID4 uuid5: pydantic.UUID5 class IPvAnyAddress(pydantic.BaseModel): address: pydantic.IPvAnyAddress class IPvAnyInterface(pydantic.BaseModel): interface: pydantic.IPvAnyInterface class IPvAnyNetwork(pydantic.BaseModel): network: pydantic.IPvAnyNetwork class StrictNumbersModel(pydantic.BaseModel): strictbool: pydantic.StrictBool strictint: pydantic.StrictInt strictfloat: pydantic.StrictFloat strictstr: pydantic.StrictStr class NumbersModel(pydantic.BaseModel): posint: pydantic.PositiveInt negint: pydantic.NegativeInt posfloat: pydantic.PositiveFloat negfloat: pydantic.NegativeFloat nonposint: pydantic.NonPositiveInt nonnegint: pydantic.NonNegativeInt nonposfloat: pydantic.NonPositiveFloat nonnegfloat: pydantic.NonNegativeFloat class JsonModel(pydantic.BaseModel): json_int: pydantic.Json[int] json_float: pydantic.Json[float] json_str: pydantic.Json[str] json_int_or_str: pydantic.Json[typing.Union[int, str]] json_list_of_float: pydantic.Json[typing.List[float]] json_pydantic_model: pydantic.Json[pydantic.BaseModel] class ConstrainedNumbersModel(pydantic.BaseModel): conintt: pydantic.conint(gt=10, lt=100) coninte: pydantic.conint(ge=10, le=100) conintmul: pydantic.conint(ge=10, le=100, multiple_of=7) confloatt: pydantic.confloat(gt=10, lt=100) confloate: pydantic.confloat(ge=10, le=100) confloatemul: pydantic.confloat(ge=10, le=100, multiple_of=4.2) confloattmul: pydantic.confloat(gt=10, lt=100, multiple_of=10) condecimalt: pydantic.condecimal(gt=10, lt=100) condecimale: pydantic.condecimal(ge=10, le=100) condecimaltplc: pydantic.condecimal(gt=10, lt=100, decimal_places=5) condecimaleplc: pydantic.condecimal(ge=10, le=100, decimal_places=2) class ConstrainedDateModel(pydantic.BaseModel): condatet: pydantic.condate(gt=date(1980, 1, 1), lt=date(2180, 12, 31)) condatee: pydantic.condate(ge=date(1980, 1, 1), le=date(2180, 12, 31)) future: pydantic.FutureDate past: pydantic.PastDate yield from ( MiscModel, StringsModel, UUIDsModel, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, StrictNumbersModel, NumbersModel, JsonModel, ConstrainedNumbersModel, ConstrainedDateModel, ) try: import_email_validator() except ImportError: pass else: class EmailsModel(pydantic.BaseModel): email: pydantic.EmailStr name_email: pydantic.NameEmail yield EmailsModel @pytest.mark.parametrize('model', gen_models()) @settings(suppress_health_check={HealthCheck.too_slow}, deadline=None) @given(data=st.data()) def test_can_construct_models_with_all_fields(data, model): # The value of this test is to confirm that Hypothesis knows how to provide # valid values for each field - otherwise, this would raise ValidationError. instance = data.draw(st.from_type(model)) # We additionally check that the instance really is of type `model`, because # an evil implementation could avoid ValidationError by means of e.g. # `st.register_type_strategy(model, st.none())`, skipping the constructor. assert isinstance(instance, model) pydantic-1.10.14/tests/test_json.py000066400000000000000000000262751455251250200172320ustar00rootroot00000000000000import datetime import json import re import sys from dataclasses import dataclass as vanilla_dataclass from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import List, Optional from uuid import UUID import pytest from pydantic import BaseModel, NameEmail, create_model from pydantic.color import Color from pydantic.dataclasses import dataclass as pydantic_dataclass from pydantic.json import pydantic_encoder, timedelta_isoformat from pydantic.types import ConstrainedDecimal, DirectoryPath, FilePath, SecretBytes, SecretStr class MyEnum(Enum): foo = 'bar' snap = 'crackle' @pytest.mark.parametrize( 'input,output', [ (UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8'), '"ebcdab58-6eb8-46fb-a190-d07a33e9eac8"'), (IPv4Address('192.168.0.1'), '"192.168.0.1"'), (Color('#000'), '"black"'), (Color((1, 12, 123)), '"#010c7b"'), (SecretStr('abcd'), '"**********"'), (SecretStr(''), '""'), (SecretBytes(b'xyz'), '"**********"'), (SecretBytes(b''), '""'), (NameEmail('foo bar', 'foobaR@example.com'), '"foo bar "'), (IPv6Address('::1:0:1'), '"::1:0:1"'), (IPv4Interface('192.168.0.0/24'), '"192.168.0.0/24"'), (IPv6Interface('2001:db00::/120'), '"2001:db00::/120"'), (IPv4Network('192.168.0.0/24'), '"192.168.0.0/24"'), (IPv6Network('2001:db00::/120'), '"2001:db00::/120"'), (datetime.datetime(2032, 1, 1, 1, 1), '"2032-01-01T01:01:00"'), (datetime.datetime(2032, 1, 1, 1, 1, tzinfo=datetime.timezone.utc), '"2032-01-01T01:01:00+00:00"'), (datetime.datetime(2032, 1, 1), '"2032-01-01T00:00:00"'), (datetime.time(12, 34, 56), '"12:34:56"'), (datetime.timedelta(days=12, seconds=34, microseconds=56), '1036834.000056'), (datetime.timedelta(seconds=-1), '-1.0'), ({1, 2, 3}, '[1, 2, 3]'), (frozenset([1, 2, 3]), '[1, 2, 3]'), ((v for v in range(4)), '[0, 1, 2, 3]'), (b'this is bytes', '"this is bytes"'), (Decimal('12.34'), '12.34'), (create_model('BarModel', a='b', c='d')(), '{"a": "b", "c": "d"}'), (MyEnum.foo, '"bar"'), (re.compile('^regex$'), '"^regex$"'), ], ) def test_encoding(input, output): assert output == json.dumps(input, default=pydantic_encoder) @pytest.mark.skipif(sys.platform.startswith('win'), reason='paths look different on windows') def test_path_encoding(tmpdir): class PathModel(BaseModel): path: Path file_path: FilePath dir_path: DirectoryPath tmpdir = Path(tmpdir) file_path = tmpdir / 'bar' file_path.touch() dir_path = tmpdir / 'baz' dir_path.mkdir() model = PathModel(path=Path('/path/test/example/'), file_path=file_path, dir_path=dir_path) expected = f'{{"path": "/path/test/example", "file_path": "{file_path}", "dir_path": "{dir_path}"}}' assert json.dumps(model, default=pydantic_encoder) == expected def test_model_encoding(): class ModelA(BaseModel): x: int y: str class Model(BaseModel): a: float b: bytes c: Decimal d: ModelA m = Model(a=10.2, b='foobar', c=10.2, d={'x': 123, 'y': '123'}) assert m.dict() == {'a': 10.2, 'b': b'foobar', 'c': Decimal('10.2'), 'd': {'x': 123, 'y': '123'}} assert m.json() == '{"a": 10.2, "b": "foobar", "c": 10.2, "d": {"x": 123, "y": "123"}}' assert m.json(exclude={'b'}) == '{"a": 10.2, "c": 10.2, "d": {"x": 123, "y": "123"}}' def test_subclass_encoding(): class SubDate(datetime.datetime): pass class Model(BaseModel): a: datetime.datetime b: SubDate m = Model(a=datetime.datetime(2032, 1, 1, 1, 1), b=SubDate(2020, 2, 29, 12, 30)) assert m.dict() == {'a': datetime.datetime(2032, 1, 1, 1, 1), 'b': SubDate(2020, 2, 29, 12, 30)} assert m.json() == '{"a": "2032-01-01T01:01:00", "b": "2020-02-29T12:30:00"}' def test_subclass_custom_encoding(): class SubDate(datetime.datetime): pass class SubDelta(datetime.timedelta): pass class Model(BaseModel): a: SubDate b: SubDelta class Config: json_encoders = { datetime.datetime: lambda v: v.strftime('%a, %d %b %C %H:%M:%S'), datetime.timedelta: timedelta_isoformat, } m = Model(a=SubDate(2032, 1, 1, 1, 1), b=SubDelta(hours=100)) assert m.dict() == {'a': SubDate(2032, 1, 1, 1, 1), 'b': SubDelta(days=4, seconds=14400)} assert m.json() == '{"a": "Thu, 01 Jan 20 01:01:00", "b": "P4DT4H0M0.000000S"}' def test_invalid_model(): class Foo: pass with pytest.raises(TypeError): json.dumps(Foo, default=pydantic_encoder) @pytest.mark.parametrize( 'input,output', [ (datetime.timedelta(days=12, seconds=34, microseconds=56), 'P12DT0H0M34.000056S'), (datetime.timedelta(days=1001, hours=1, minutes=2, seconds=3, microseconds=654_321), 'P1001DT1H2M3.654321S'), (datetime.timedelta(seconds=-1), '-P1DT23H59M59.000000S'), (datetime.timedelta(), 'P0DT0H0M0.000000S'), ], ) def test_iso_timedelta(input, output): assert output == timedelta_isoformat(input) def test_custom_encoder(): class Model(BaseModel): x: datetime.timedelta y: Decimal z: datetime.date class Config: json_encoders = {datetime.timedelta: lambda v: f'{v.total_seconds():0.3f}s', Decimal: lambda v: 'a decimal'} assert Model(x=123, y=5, z='2032-06-01').json() == '{"x": "123.000s", "y": "a decimal", "z": "2032-06-01"}' def test_custom_iso_timedelta(): class Model(BaseModel): x: datetime.timedelta class Config: json_encoders = {datetime.timedelta: timedelta_isoformat} m = Model(x=123) assert m.json() == '{"x": "P0DT0H2M3.000000S"}' def test_con_decimal_encode() -> None: """ Makes sure a decimal with decimal_places = 0, as well as one with places can handle a encode/decode roundtrip. """ class Id(ConstrainedDecimal): max_digits = 22 decimal_places = 0 ge = 0 class Obj(BaseModel): id: Id price: Decimal = Decimal('0.01') assert Obj(id=1).json() == '{"id": 1, "price": 0.01}' assert Obj.parse_raw('{"id": 1, "price": 0.01}') == Obj(id=1) def test_json_encoder_simple_inheritance(): class Parent(BaseModel): dt: datetime.datetime = datetime.datetime.now() timedt: datetime.timedelta = datetime.timedelta(hours=100) class Config: json_encoders = {datetime.datetime: lambda _: 'parent_encoder'} class Child(Parent): class Config: json_encoders = {datetime.timedelta: lambda _: 'child_encoder'} assert Child().json() == '{"dt": "parent_encoder", "timedt": "child_encoder"}' def test_json_encoder_inheritance_override(): class Parent(BaseModel): dt: datetime.datetime = datetime.datetime.now() class Config: json_encoders = {datetime.datetime: lambda _: 'parent_encoder'} class Child(Parent): class Config: json_encoders = {datetime.datetime: lambda _: 'child_encoder'} assert Child().json() == '{"dt": "child_encoder"}' def test_custom_encoder_arg(): class Model(BaseModel): x: datetime.timedelta m = Model(x=123) assert m.json() == '{"x": 123.0}' assert m.json(encoder=lambda v: '__default__') == '{"x": "__default__"}' def test_encode_dataclass(): @vanilla_dataclass class Foo: bar: int spam: str f = Foo(bar=123, spam='apple pie') assert '{"bar": 123, "spam": "apple pie"}' == json.dumps(f, default=pydantic_encoder) def test_encode_pydantic_dataclass(): @pydantic_dataclass class Foo: bar: int spam: str f = Foo(bar=123, spam='apple pie') assert '{"bar": 123, "spam": "apple pie"}' == json.dumps(f, default=pydantic_encoder) def test_encode_custom_root(): class Model(BaseModel): __root__: List[str] assert Model(__root__=['a', 'b']).json() == '["a", "b"]' def test_custom_decode_encode(): load_calls, dump_calls = 0, 0 def custom_loads(s): nonlocal load_calls load_calls += 1 return json.loads(s.strip('$')) def custom_dumps(s, default=None, **kwargs): nonlocal dump_calls dump_calls += 1 return json.dumps(s, default=default, indent=2) class Model(BaseModel): a: int b: str class Config: json_loads = custom_loads json_dumps = custom_dumps m = Model.parse_raw('${"a": 1, "b": "foo"}$$') assert m.dict() == {'a': 1, 'b': 'foo'} assert m.json() == '{\n "a": 1,\n "b": "foo"\n}' def test_json_nested_encode_models(): class Phone(BaseModel): manufacturer: str number: int class User(BaseModel): name: str SSN: int birthday: datetime.datetime phone: Phone friend: Optional['User'] = None # noqa: F821 # https://github.com/PyCQA/pyflakes/issues/567 class Config: json_encoders = { datetime.datetime: lambda v: v.timestamp(), Phone: lambda v: v.number if v else None, 'User': lambda v: v.SSN, } User.update_forward_refs() iphone = Phone(manufacturer='Apple', number=18002752273) galaxy = Phone(manufacturer='Samsung', number=18007267864) timon = User( name='Timon', SSN=123, birthday=datetime.datetime(1993, 6, 1, tzinfo=datetime.timezone.utc), phone=iphone ) pumbaa = User( name='Pumbaa', SSN=234, birthday=datetime.datetime(1993, 5, 15, tzinfo=datetime.timezone.utc), phone=galaxy ) timon.friend = pumbaa assert iphone.json(models_as_dict=False) == '{"manufacturer": "Apple", "number": 18002752273}' assert ( pumbaa.json(models_as_dict=False) == '{"name": "Pumbaa", "SSN": 234, "birthday": 737424000.0, "phone": 18007267864, "friend": null}' ) assert ( timon.json(models_as_dict=False) == '{"name": "Timon", "SSN": 123, "birthday": 738892800.0, "phone": 18002752273, "friend": 234}' ) def test_custom_encode_fallback_basemodel(): class MyExoticType: pass def custom_encoder(o): if isinstance(o, MyExoticType): return 'exo' raise TypeError('not serialisable') class Foo(BaseModel): x: MyExoticType class Config: arbitrary_types_allowed = True class Bar(BaseModel): foo: Foo assert Bar(foo=Foo(x=MyExoticType())).json(encoder=custom_encoder) == '{"foo": {"x": "exo"}}' def test_custom_encode_error(): class MyExoticType: pass def custom_encoder(o): raise TypeError('not serialisable') class Foo(BaseModel): x: MyExoticType class Config: arbitrary_types_allowed = True with pytest.raises(TypeError, match='not serialisable'): Foo(x=MyExoticType()).json(encoder=custom_encoder) def test_recursive(): class Model(BaseModel): value: Optional[str] nested: Optional[BaseModel] assert Model(value=None, nested=Model(value=None)).json(exclude_none=True) == '{"nested": {}}' pydantic-1.10.14/tests/test_main.py000066400000000000000000001715201455251250200171770ustar00rootroot00000000000000import sys from collections import defaultdict from copy import deepcopy from enum import Enum from typing import ( Any, Callable, ClassVar, Counter, DefaultDict, Dict, List, Mapping, Optional, Type, TypeVar, get_type_hints, ) from uuid import UUID, uuid4 import pytest from typing_extensions import Annotated from pydantic import ( BaseConfig, BaseModel, ConfigError, Extra, Field, NoneBytes, NoneStr, PrivateAttr, Required, SecretStr, ValidationError, constr, root_validator, validator, ) from pydantic.typing import Final, Literal def test_success(): # same as below but defined here so class definition occurs inside the test class Model(BaseModel): a: float b: int = 10 m = Model(a=10.2) assert m.a == 10.2 assert m.b == 10 class UltraSimpleModel(BaseModel): a: float b: int = 10 def test_ultra_simple_missing(): with pytest.raises(ValidationError) as exc_info: UltraSimpleModel() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_ultra_simple_failed(): with pytest.raises(ValidationError) as exc_info: UltraSimpleModel(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_ultra_simple_repr(): m = UltraSimpleModel(a=10.2) assert str(m) == 'a=10.2 b=10' assert repr(m) == 'UltraSimpleModel(a=10.2, b=10)' assert repr(m.__fields__['a']) == "ModelField(name='a', type=float, required=True)" assert repr(m.__fields__['b']) == "ModelField(name='b', type=int, required=False, default=10)" assert dict(m) == {'a': 10.2, 'b': 10} assert m.dict() == {'a': 10.2, 'b': 10} assert m.json() == '{"a": 10.2, "b": 10}' assert str(m) == 'a=10.2 b=10' def test_default_factory_field(): def myfunc(): return 1 class Model(BaseModel): a: int = Field(default_factory=myfunc) m = Model() assert str(m) == 'a=1' assert ( repr(m.__fields__['a']) == "ModelField(name='a', type=int, required=False, default_factory='')" ) assert dict(m) == {'a': 1} assert m.json() == '{"a": 1}' def test_default_factory_no_type_field(): def myfunc(): return 1 with pytest.raises(ConfigError) as e: class Model(BaseModel): a = Field(default_factory=myfunc) assert str(e.value) == "you need to set the type of field 'a' when using `default_factory`" def test_comparing(): m = UltraSimpleModel(a=10.2, b='100') assert m == {'a': 10.2, 'b': 100} assert m == UltraSimpleModel(a=10.2, b=100) def test_nullable_strings_success(): class NoneCheckModel(BaseModel): existing_str_value = 'foo' required_str_value: str = ... required_str_none_value: NoneStr = ... existing_bytes_value = b'foo' required_bytes_value: bytes = ... required_bytes_none_value: NoneBytes = ... m = NoneCheckModel( required_str_value='v1', required_str_none_value=None, required_bytes_value='v2', required_bytes_none_value=None ) assert m.required_str_value == 'v1' assert m.required_str_none_value is None assert m.required_bytes_value == b'v2' assert m.required_bytes_none_value is None def test_nullable_strings_fails(): class NoneCheckModel(BaseModel): existing_str_value = 'foo' required_str_value: str = ... required_str_none_value: NoneStr = ... existing_bytes_value = b'foo' required_bytes_value: bytes = ... required_bytes_none_value: NoneBytes = ... with pytest.raises(ValidationError) as exc_info: NoneCheckModel( required_str_value=None, required_str_none_value=None, required_bytes_value=None, required_bytes_none_value=None, ) assert exc_info.value.errors() == [ {'loc': ('required_str_value',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, { 'loc': ('required_bytes_value',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed', }, ] class RecursiveModel(BaseModel): grape: bool = ... banana: UltraSimpleModel = ... def test_recursion(): m = RecursiveModel(grape=1, banana={'a': 1}) assert m.grape is True assert m.banana.a == 1.0 assert m.banana.b == 10 assert repr(m) == 'RecursiveModel(grape=True, banana=UltraSimpleModel(a=1.0, b=10))' def test_recursion_fails(): with pytest.raises(ValidationError): RecursiveModel(grape=1, banana=123) def test_not_required(): class Model(BaseModel): a: float = None assert Model(a=12.2).a == 12.2 assert Model().a is None assert Model(a=None).a is None def test_infer_type(): class Model(BaseModel): a = False b = '' c = 0 assert Model().a is False assert Model().b == '' assert Model().c == 0 def test_allow_extra(): class Model(BaseModel): a: float = ... class Config: extra = Extra.allow assert Model(a='10.2', b=12).dict() == {'a': 10.2, 'b': 12} def test_allow_extra_repr(): class Model(BaseModel): a: float = ... class Config: extra = Extra.allow assert str(Model(a='10.2', b=12)) == 'a=10.2 b=12' def test_forbidden_extra_success(): class ForbiddenExtra(BaseModel): foo = 'whatever' class Config: extra = Extra.forbid m = ForbiddenExtra() assert m.foo == 'whatever' m = ForbiddenExtra(foo=1) assert m.foo == '1' def test_forbidden_extra_fails(): class ForbiddenExtra(BaseModel): foo = 'whatever' class Config: extra = Extra.forbid with pytest.raises(ValidationError) as exc_info: ForbiddenExtra(foo='ok', bar='wrong', spam='xx') assert exc_info.value.errors() == [ {'loc': ('bar',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, {'loc': ('spam',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, ] def test_disallow_mutation(): class Model(BaseModel): a: float model = Model(a=0.2) with pytest.raises(ValueError, match='"Model" object has no field "b"'): model.b = 2 def test_extra_allowed(): class Model(BaseModel): a: float class Config: extra = Extra.allow model = Model(a=0.2, b=0.1) assert model.b == 0.1 assert not hasattr(model, 'c') model.c = 1 assert hasattr(model, 'c') assert model.c == 1 def test_extra_ignored(): class Model(BaseModel): a: float class Config: extra = Extra.ignore model = Model(a=0.2, b=0.1) assert not hasattr(model, 'b') with pytest.raises(ValueError, match='"Model" object has no field "c"'): model.c = 1 def test_set_attr(): m = UltraSimpleModel(a=10.2) assert m.dict() == {'a': 10.2, 'b': 10} m.b = 20 assert m.dict() == {'a': 10.2, 'b': 20} def test_set_attr_invalid(): class UltraSimpleModel(BaseModel): a: float = ... b: int = 10 m = UltraSimpleModel(a=10.2) assert m.dict() == {'a': 10.2, 'b': 10} with pytest.raises(ValueError) as exc_info: m.c = 20 assert '"UltraSimpleModel" object has no field "c"' in exc_info.value.args[0] def test_any(): class AnyModel(BaseModel): a: Any = 10 b: object = 20 m = AnyModel() assert m.a == 10 assert m.b == 20 m = AnyModel(a='foobar', b='barfoo') assert m.a == 'foobar' assert m.b == 'barfoo' def test_alias(): class SubModel(BaseModel): c = 'barfoo' class Config: fields = {'c': {'alias': '_c'}} class Model(BaseModel): a = 'foobar' b: SubModel = SubModel() class Config: fields = {'a': {'alias': '_a'}} assert Model().a == 'foobar' assert Model().b.c == 'barfoo' assert Model().dict() == {'a': 'foobar', 'b': {'c': 'barfoo'}} assert Model(_a='different').a == 'different' assert Model(b={'_c': 'different'}).b.c == 'different' assert Model(_a='different', b={'_c': 'different'}).dict() == {'a': 'different', 'b': {'c': 'different'}} assert Model(_a='different', b={'_c': 'different'}).dict(by_alias=True) == { '_a': 'different', 'b': {'_c': 'different'}, } def test_population_by_field_name(): class Model(BaseModel): a: str class Config: allow_population_by_field_name = True fields = {'a': {'alias': '_a'}} assert Model(a='different').a == 'different' assert Model(a='different').dict() == {'a': 'different'} assert Model(a='different').dict(by_alias=True) == {'_a': 'different'} def test_field_order(): class Model(BaseModel): c: float b: int = 10 a: str d: dict = {} assert list(Model.__fields__.keys()) == ['c', 'b', 'a', 'd'] def test_required(): # same as below but defined here so class definition occurs inside the test class Model(BaseModel): a: float = Required b: int = 10 m = Model(a=10.2) assert m.dict() == dict(a=10.2, b=10) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_mutability(): class TestModel(BaseModel): a: int = 10 class Config: allow_mutation = True extra = Extra.forbid frozen = False m = TestModel() assert m.a == 10 m.a = 11 assert m.a == 11 @pytest.mark.parametrize('allow_mutation_, frozen_', [(False, False), (False, True), (True, True)]) def test_immutability(allow_mutation_, frozen_): class TestModel(BaseModel): a: int = 10 class Config: allow_mutation = allow_mutation_ extra = Extra.forbid frozen = frozen_ m = TestModel() assert m.a == 10 with pytest.raises(TypeError) as exc_info: m.a = 11 assert '"TestModel" is immutable and does not support item assignment' in exc_info.value.args[0] def test_not_frozen_are_not_hashable(): class TestModel(BaseModel): a: int = 10 m = TestModel() with pytest.raises(TypeError) as exc_info: hash(m) assert "unhashable type: 'TestModel'" in exc_info.value.args[0] def test_with_declared_hash(): class Foo(BaseModel): x: int def __hash__(self): return self.x**2 class Bar(Foo): y: int def __hash__(self): return self.y**3 class Buz(Bar): z: int assert hash(Foo(x=2)) == 4 assert hash(Bar(x=2, y=3)) == 27 assert hash(Buz(x=2, y=3, z=4)) == 27 def test_frozen_with_hashable_fields_are_hashable(): class TestModel(BaseModel): a: int = 10 class Config: frozen = True m = TestModel() assert m.__hash__ is not None assert isinstance(hash(m), int) def test_frozen_with_unhashable_fields_are_not_hashable(): class TestModel(BaseModel): a: int = 10 y: List[int] = [1, 2, 3] class Config: frozen = True m = TestModel() with pytest.raises(TypeError) as exc_info: hash(m) assert "unhashable type: 'list'" in exc_info.value.args[0] def test_hash_function_give_different_result_for_different_object(): class TestModel(BaseModel): a: int = 10 class Config: frozen = True m = TestModel() m2 = TestModel() m3 = TestModel(a=11) assert hash(m) == hash(m2) assert hash(m) != hash(m3) # Redefined `TestModel` class TestModel(BaseModel): a: int = 10 class Config: frozen = True m4 = TestModel() assert hash(m) != hash(m4) def test_const_validates(): class Model(BaseModel): a: int = Field(3, const=True) m = Model(a=3) assert m.a == 3 def test_const_uses_default(): class Model(BaseModel): a: int = Field(3, const=True) m = Model() assert m.a == 3 def test_const_validates_after_type_validators(): # issue #1410 class Model(BaseModel): a: int = Field(3, const=True) m = Model(a='3') assert m.a == 3 def test_const_with_wrong_value(): class Model(BaseModel): a: int = Field(3, const=True) with pytest.raises(ValidationError) as exc_info: Model(a=4) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'unexpected value; permitted: 3', 'type': 'value_error.const', 'ctx': {'given': 4, 'permitted': [3]}, } ] def test_const_with_validator(): class Model(BaseModel): a: int = Field(3, const=True) @validator('a') def validate(v): return v with pytest.raises(ValidationError) as exc_info: Model(a=4) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'unexpected value; permitted: 3', 'type': 'value_error.const', 'ctx': {'given': 4, 'permitted': [3]}, } ] def test_const_list(): class SubModel(BaseModel): b: int class Model(BaseModel): a: List[SubModel] = Field([SubModel(b=1), SubModel(b=2), SubModel(b=3)], const=True) b: List[SubModel] = Field([{'b': 4}, {'b': 5}, {'b': 6}], const=True) m = Model() assert m.a == [SubModel(b=1), SubModel(b=2), SubModel(b=3)] assert m.b == [SubModel(b=4), SubModel(b=5), SubModel(b=6)] assert m.schema() == { 'definitions': { 'SubModel': { 'properties': {'b': {'title': 'B', 'type': 'integer'}}, 'required': ['b'], 'title': 'SubModel', 'type': 'object', } }, 'properties': { 'a': { 'const': [SubModel(b=1), SubModel(b=2), SubModel(b=3)], 'default': [{'b': 1}, {'b': 2}, {'b': 3}], 'items': {'$ref': '#/definitions/SubModel'}, 'title': 'A', 'type': 'array', }, 'b': { 'const': [{'b': 4}, {'b': 5}, {'b': 6}], 'default': [{'b': 4}, {'b': 5}, {'b': 6}], 'items': {'$ref': '#/definitions/SubModel'}, 'title': 'B', 'type': 'array', }, }, 'title': 'Model', 'type': 'object', } def test_const_list_with_wrong_value(): class SubModel(BaseModel): b: int class Model(BaseModel): a: List[SubModel] = Field([SubModel(b=1), SubModel(b=2), SubModel(b=3)], const=True) b: List[SubModel] = Field([{'b': 4}, {'b': 5}, {'b': 6}], const=True) with pytest.raises(ValidationError) as exc_info: Model(a=[{'b': 3}, {'b': 1}, {'b': 2}], b=[{'b': 6}, {'b': 5}]) assert exc_info.value.errors() == [ { 'ctx': { 'given': [{'b': 3}, {'b': 1}, {'b': 2}], 'permitted': [[SubModel(b=1), SubModel(b=2), SubModel(b=3)]], }, 'loc': ('a',), 'msg': 'unexpected value; permitted: [SubModel(b=1), SubModel(b=2), SubModel(b=3)]', 'type': 'value_error.const', }, { 'ctx': {'given': [{'b': 6}, {'b': 5}], 'permitted': [[{'b': 4}, {'b': 5}, {'b': 6}]]}, 'loc': ('b',), 'msg': "unexpected value; permitted: [{'b': 4}, {'b': 5}, {'b': 6}]", 'type': 'value_error.const', }, ] assert exc_info.value.json().startswith('[') with pytest.raises(ValidationError) as exc_info: Model(a=[SubModel(b=3), SubModel(b=1), SubModel(b=2)], b=[SubModel(b=3), SubModel(b=1)]) assert exc_info.value.errors() == [ { 'ctx': { 'given': [SubModel(b=3), SubModel(b=1), SubModel(b=2)], 'permitted': [[SubModel(b=1), SubModel(b=2), SubModel(b=3)]], }, 'loc': ('a',), 'msg': 'unexpected value; permitted: [SubModel(b=1), SubModel(b=2), SubModel(b=3)]', 'type': 'value_error.const', }, { 'ctx': {'given': [SubModel(b=3), SubModel(b=1)], 'permitted': [[{'b': 4}, {'b': 5}, {'b': 6}]]}, 'loc': ('b',), 'msg': "unexpected value; permitted: [{'b': 4}, {'b': 5}, {'b': 6}]", 'type': 'value_error.const', }, ] assert exc_info.value.json().startswith('[') def test_const_validation_json_serializable(): class SubForm(BaseModel): field: int class Form(BaseModel): field1: SubForm = Field({'field': 2}, const=True) field2: List[SubForm] = Field([{'field': 2}], const=True) with pytest.raises(ValidationError) as exc_info: # Fails Form(field1={'field': 1}, field2=[{'field': 1}]) # This should not raise an Json error exc_info.value.json() class ValidateAssignmentModel(BaseModel): a: int = 2 b: constr(min_length=1) class Config: validate_assignment = True def test_validating_assignment_pass(): p = ValidateAssignmentModel(a=5, b='hello') p.a = 2 assert p.a == 2 assert p.dict() == {'a': 2, 'b': 'hello'} p.b = 'hi' assert p.b == 'hi' assert p.dict() == {'a': 2, 'b': 'hi'} def test_validating_assignment_fail(): p = ValidateAssignmentModel(a=5, b='hello') with pytest.raises(ValidationError) as exc_info: p.a = 'b' assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: p.b = '' assert exc_info.value.errors() == [ { 'loc': ('b',), 'msg': 'ensure this value has at least 1 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 1}, } ] def test_validating_assignment_pre_root_validator_fail(): class Model(BaseModel): current_value: float = Field(..., alias='current') max_value: float class Config: validate_assignment = True @root_validator(pre=True) def values_are_not_string(cls, values): if any(isinstance(x, str) for x in values.values()): raise ValueError('values cannot be a string') return values m = Model(current=100, max_value=200) with pytest.raises(ValidationError) as exc_info: m.current_value = '100' assert exc_info.value.errors() == [ { 'loc': ('__root__',), 'msg': 'values cannot be a string', 'type': 'value_error', } ] def test_validating_assignment_post_root_validator_fail(): class Model(BaseModel): current_value: float = Field(..., alias='current') max_value: float class Config: validate_assignment = True @root_validator def current_lessequal_max(cls, values): current_value = values.get('current_value') max_value = values.get('max_value') if current_value > max_value: raise ValueError('current_value cannot be greater than max_value') return values @root_validator(skip_on_failure=True) def current_lessequal_300(cls, values): current_value = values.get('current_value') if current_value > 300: raise ValueError('current_value cannot be greater than 300') return values @root_validator def current_lessequal_500(cls, values): current_value = values.get('current_value') if current_value > 500: raise ValueError('current_value cannot be greater than 500') return values m = Model(current=100, max_value=200) m.current_value = '100' with pytest.raises(ValidationError) as exc_info: m.current_value = 1000 assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'current_value cannot be greater than max_value', 'type': 'value_error'}, { 'loc': ('__root__',), 'msg': 'current_value cannot be greater than 500', 'type': 'value_error', }, ] def test_root_validator_many_values_change(): """It should run root_validator on assignment and update ALL concerned fields""" class Rectangle(BaseModel): width: float height: float area: float = None class Config: validate_assignment = True @root_validator def set_area(cls, values): values['area'] = values['width'] * values['height'] return values r = Rectangle(width=1, height=1) assert r.area == 1 r.height = 5 assert r.area == 5 def test_enum_values(): FooEnum = Enum('FooEnum', {'foo': 'foo', 'bar': 'bar'}) class Model(BaseModel): foo: FooEnum = None class Config: use_enum_values = True m = Model(foo='foo') # this is the actual value, so has not "values" field assert not isinstance(m.foo, FooEnum) assert m.foo == 'foo' def test_literal_enum_values(): FooEnum = Enum('FooEnum', {'foo': 'foo_value', 'bar': 'bar_value'}) class Model(BaseModel): baz: Literal[FooEnum.foo] boo: str = 'hoo' class Config: use_enum_values = True m = Model(baz=FooEnum.foo) assert m.dict() == {'baz': 'foo_value', 'boo': 'hoo'} assert m.baz.value == 'foo_value' with pytest.raises(ValidationError) as exc_info: Model(baz=FooEnum.bar) assert exc_info.value.errors() == [ { 'loc': ('baz',), 'msg': "unexpected value; permitted: ", 'type': 'value_error.const', 'ctx': {'given': FooEnum.bar, 'permitted': (FooEnum.foo,)}, }, ] def test_enum_raw(): FooEnum = Enum('FooEnum', {'foo': 'foo', 'bar': 'bar'}) class Model(BaseModel): foo: FooEnum = None m = Model(foo='foo') assert isinstance(m.foo, FooEnum) assert m.foo != 'foo' assert m.foo.value == 'foo' def test_set_tuple_values(): class Model(BaseModel): foo: set bar: tuple m = Model(foo=['a', 'b'], bar=['c', 'd']) assert m.foo == {'a', 'b'} assert m.bar == ('c', 'd') assert m.dict() == {'foo': {'a', 'b'}, 'bar': ('c', 'd')} def test_default_copy(): class User(BaseModel): friends: List[int] = [] u1 = User() u2 = User() assert u1.friends is not u2.friends class ArbitraryType: pass def test_arbitrary_type_allowed_validation_success(): class ArbitraryTypeAllowedModel(BaseModel): t: ArbitraryType class Config: arbitrary_types_allowed = True arbitrary_type_instance = ArbitraryType() m = ArbitraryTypeAllowedModel(t=arbitrary_type_instance) assert m.t == arbitrary_type_instance def test_arbitrary_type_allowed_validation_fails(): class ArbitraryTypeAllowedModel(BaseModel): t: ArbitraryType class Config: arbitrary_types_allowed = True class C: pass with pytest.raises(ValidationError) as exc_info: ArbitraryTypeAllowedModel(t=C()) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'instance of ArbitraryType expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'ArbitraryType'}, } ] def test_arbitrary_types_not_allowed(): with pytest.raises(RuntimeError) as exc_info: class ArbitraryTypeNotAllowedModel(BaseModel): t: ArbitraryType assert exc_info.value.args[0].startswith('no validator found for') def test_type_type_validation_success(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] arbitrary_type_class = ArbitraryType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class def test_type_type_subclass_validation_success(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] class ArbitrarySubType(ArbitraryType): pass arbitrary_type_class = ArbitrarySubType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class def test_type_type_validation_fails_for_instance(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] class C: pass with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=C) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'subclass of ArbitraryType expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'ArbitraryType'}, } ] def test_type_type_validation_fails_for_basic_type(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=1) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'subclass of ArbitraryType expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'ArbitraryType'}, } ] @pytest.mark.parametrize('bare_type', [type, Type]) def test_bare_type_type_validation_success(bare_type): class ArbitraryClassAllowedModel(BaseModel): t: bare_type arbitrary_type_class = ArbitraryType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class @pytest.mark.parametrize('bare_type', [type, Type]) def test_bare_type_type_validation_fails(bare_type): class ArbitraryClassAllowedModel(BaseModel): t: bare_type arbitrary_type = ArbitraryType() with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=arbitrary_type) assert exc_info.value.errors() == [{'loc': ('t',), 'msg': 'a class is expected', 'type': 'type_error.class'}] def test_annotation_field_name_shadows_attribute(): with pytest.raises(NameError): # When defining a model that has an attribute with the name of a built-in attribute, an exception is raised class BadModel(BaseModel): schema: str # This conflicts with the BaseModel's schema() class method def test_value_field_name_shadows_attribute(): # When defining a model that has an attribute with the name of a built-in attribute, an exception is raised with pytest.raises(NameError): class BadModel(BaseModel): schema = 'abc' # This conflicts with the BaseModel's schema() class method def test_class_var(): class MyModel(BaseModel): a: ClassVar b: ClassVar[int] = 1 c: int = 2 assert list(MyModel.__fields__.keys()) == ['c'] class MyOtherModel(MyModel): a = '' b = 2 assert list(MyOtherModel.__fields__.keys()) == ['c'] def test_fields_set(): class MyModel(BaseModel): a: int b: int = 2 m = MyModel(a=5) assert m.__fields_set__ == {'a'} m.b = 2 assert m.__fields_set__ == {'a', 'b'} m = MyModel(a=5, b=2) assert m.__fields_set__ == {'a', 'b'} def test_exclude_unset_dict(): class MyModel(BaseModel): a: int b: int = 2 m = MyModel(a=5) assert m.dict(exclude_unset=True) == {'a': 5} m = MyModel(a=5, b=3) assert m.dict(exclude_unset=True) == {'a': 5, 'b': 3} def test_exclude_unset_recursive(): class ModelA(BaseModel): a: int b: int = 1 class ModelB(BaseModel): c: int d: int = 2 e: ModelA m = ModelB(c=5, e={'a': 0}) assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} assert m.dict(exclude_unset=True) == {'c': 5, 'e': {'a': 0}} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} def test_dict_exclude_unset_populated_by_alias(): class MyModel(BaseModel): a: str = Field('default', alias='alias_a') b: str = Field('default', alias='alias_b') class Config: allow_population_by_field_name = True m = MyModel(alias_a='a') assert m.dict(exclude_unset=True) == {'a': 'a'} assert m.dict(exclude_unset=True, by_alias=True) == {'alias_a': 'a'} def test_dict_exclude_unset_populated_by_alias_with_extra(): class MyModel(BaseModel): a: str = Field('default', alias='alias_a') b: str = Field('default', alias='alias_b') class Config: extra = 'allow' m = MyModel(alias_a='a', c='c') assert m.dict(exclude_unset=True) == {'a': 'a', 'c': 'c'} assert m.dict(exclude_unset=True, by_alias=True) == {'alias_a': 'a', 'c': 'c'} def test_exclude_defaults(): class Model(BaseModel): mandatory: str nullable_mandatory: Optional[str] = ... facultative: str = 'x' nullable_facultative: Optional[str] = None m = Model(mandatory='a', nullable_mandatory=None) assert m.dict(exclude_defaults=True) == { 'mandatory': 'a', 'nullable_mandatory': None, } m = Model(mandatory='a', nullable_mandatory=None, facultative='y', nullable_facultative=None) assert m.dict(exclude_defaults=True) == { 'mandatory': 'a', 'nullable_mandatory': None, 'facultative': 'y', } m = Model(mandatory='a', nullable_mandatory=None, facultative='y', nullable_facultative='z') assert m.dict(exclude_defaults=True) == { 'mandatory': 'a', 'nullable_mandatory': None, 'facultative': 'y', 'nullable_facultative': 'z', } def test_dir_fields(): class MyModel(BaseModel): attribute_a: int attribute_b: int = 2 m = MyModel(attribute_a=5) assert 'dict' in dir(m) assert 'json' in dir(m) assert 'attribute_a' in dir(m) assert 'attribute_b' in dir(m) def test_dict_with_extra_keys(): class MyModel(BaseModel): a: str = Field(None, alias='alias_a') class Config: extra = Extra.allow m = MyModel(extra_key='extra') assert m.dict() == {'a': None, 'extra_key': 'extra'} assert m.dict(by_alias=True) == {'alias_a': None, 'extra_key': 'extra'} def test_root(): class MyModel(BaseModel): __root__: str m = MyModel(__root__='a') assert m.dict() == {'__root__': 'a'} assert m.__root__ == 'a' def test_root_list(): class MyModel(BaseModel): __root__: List[str] m = MyModel(__root__=['a']) assert m.dict() == {'__root__': ['a']} assert m.__root__ == ['a'] def test_root_nested(): class MyList(BaseModel): __root__: List[str] class MyModel(BaseModel): my_list: MyList my_list = MyList(__root__=['pika']) assert MyModel(my_list=my_list).dict() == {'my_list': ['pika']} def test_encode_nested_root(): house_dict = {'pets': ['dog', 'cats']} class Pets(BaseModel): __root__: List[str] class House(BaseModel): pets: Pets assert House(**house_dict).dict() == house_dict class PetsDeep(BaseModel): __root__: Pets class HouseDeep(BaseModel): pets: PetsDeep assert HouseDeep(**house_dict).dict() == house_dict def test_root_failed(): with pytest.raises(ValueError, match='__root__ cannot be mixed with other fields'): class MyModel(BaseModel): __root__: str a: str def test_root_undefined_failed(): class MyModel(BaseModel): a: List[str] with pytest.raises(ValidationError) as exc_info: MyModel(__root__=['a']) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_parse_root_as_mapping(): class MyModel(BaseModel): __root__: Mapping[str, str] assert MyModel.parse_obj({1: 2}).__root__ == {'1': '2'} with pytest.raises(ValidationError) as exc_info: MyModel.parse_obj({'__root__': {'1': '2'}}) assert exc_info.value.errors() == [ {'loc': ('__root__', '__root__'), 'msg': 'str type expected', 'type': 'type_error.str'} ] def test_parse_obj_non_mapping_root(): class MyModel(BaseModel): __root__: List[str] assert MyModel.parse_obj(['a']).__root__ == ['a'] assert MyModel.parse_obj({'__root__': ['a']}).__root__ == ['a'] with pytest.raises(ValidationError) as exc_info: MyModel.parse_obj({'__not_root__': ['a']}) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid list', 'type': 'type_error.list'} ] with pytest.raises(ValidationError): MyModel.parse_obj({'__root__': ['a'], 'other': 1}) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid list', 'type': 'type_error.list'} ] def test_parse_obj_nested_root(): class Pokemon(BaseModel): name: str level: int class Pokemons(BaseModel): __root__: List[Pokemon] class Player(BaseModel): rank: int pokemons: Pokemons class Players(BaseModel): __root__: Dict[str, Player] class Tournament(BaseModel): players: Players city: str payload = { 'players': { 'Jane': { 'rank': 1, 'pokemons': [ { 'name': 'Pikachu', 'level': 100, }, { 'name': 'Bulbasaur', 'level': 13, }, ], }, 'Tarzan': { 'rank': 2, 'pokemons': [ { 'name': 'Jigglypuff', 'level': 7, }, ], }, }, 'city': 'Qwerty', } tournament = Tournament.parse_obj(payload) assert tournament.city == 'Qwerty' assert len(tournament.players.__root__) == 2 assert len(tournament.players.__root__['Jane'].pokemons.__root__) == 2 assert tournament.players.__root__['Jane'].pokemons.__root__[0].name == 'Pikachu' def test_untouched_types(): from pydantic import BaseModel class _ClassPropertyDescriptor: def __init__(self, getter): self.getter = getter def __get__(self, instance, owner): return self.getter(owner) classproperty = _ClassPropertyDescriptor class Model(BaseModel): class Config: keep_untouched = (classproperty,) @classproperty def class_name(cls) -> str: return cls.__name__ assert Model.class_name == 'Model' assert Model().class_name == 'Model' def test_custom_types_fail_without_keep_untouched(): from pydantic import BaseModel class _ClassPropertyDescriptor: def __init__(self, getter): self.getter = getter def __get__(self, instance, owner): return self.getter(owner) classproperty = _ClassPropertyDescriptor with pytest.raises(RuntimeError) as e: class Model(BaseModel): @classproperty def class_name(cls) -> str: return cls.__name__ Model.class_name assert str(e.value) == ( "no validator found for ." "_ClassPropertyDescriptor'>, see `arbitrary_types_allowed` in Config" ) class Model(BaseModel): class Config: arbitrary_types_allowed = True @classproperty def class_name(cls) -> str: return cls.__name__ with pytest.raises(AttributeError) as e: Model.class_name assert str(e.value) == "type object 'Model' has no attribute 'class_name'" def test_model_iteration(): class Foo(BaseModel): a: int = 1 b: int = 2 class Bar(BaseModel): c: int d: Foo m = Bar(c=3, d={}) assert m.dict() == {'c': 3, 'd': {'a': 1, 'b': 2}} assert list(m) == [('c', 3), ('d', Foo())] assert dict(m) == {'c': 3, 'd': Foo()} @pytest.mark.parametrize( 'exclude,expected,raises_match', [ pytest.param( {'foos': {0: {'a'}, 1: {'a'}}}, {'c': 3, 'foos': [{'b': 2}, {'b': 4}]}, None, id='excluding fields of indexed list items', ), pytest.param( {'foos': {'a'}}, TypeError, 'expected integer keys', id='should fail trying to exclude string keys on list field (1).', ), pytest.param( {'foos': {0: ..., 'a': ...}}, TypeError, 'expected integer keys', id='should fail trying to exclude string keys on list field (2).', ), pytest.param( {'foos': {0: 1}}, TypeError, 'Unexpected type', id='should fail using integer key to specify list item field name (1)', ), pytest.param( {'foos': {'__all__': 1}}, TypeError, 'Unexpected type', id='should fail using integer key to specify list item field name (2)', ), pytest.param( {'foos': {'__all__': {'a'}}}, {'c': 3, 'foos': [{'b': 2}, {'b': 4}]}, None, id='using "__all__" to exclude specific nested field', ), pytest.param( {'foos': {0: {'b'}, '__all__': {'a'}}}, {'c': 3, 'foos': [{}, {'b': 4}]}, None, id='using "__all__" to exclude specific nested field in combination with more specific exclude', ), pytest.param( {'foos': {'__all__'}}, {'c': 3, 'foos': []}, None, id='using "__all__" to exclude all list items', ), pytest.param( {'foos': {1, '__all__'}}, {'c': 3, 'foos': []}, None, id='using "__all__" and other items should get merged together, still excluding all list items', ), pytest.param( {'foos': {1: {'a'}, -1: {'b'}}}, {'c': 3, 'foos': [{'a': 1, 'b': 2}, {}]}, None, id='using negative and positive indexes, referencing the same items should merge excludes', ), ], ) def test_model_export_nested_list(exclude, expected, raises_match): class Foo(BaseModel): a: int = 1 b: int = 2 class Bar(BaseModel): c: int foos: List[Foo] m = Bar(c=3, foos=[Foo(a=1, b=2), Foo(a=3, b=4)]) if isinstance(expected, type) and issubclass(expected, Exception): with pytest.raises(expected, match=raises_match): m.dict(exclude=exclude) else: original_exclude = deepcopy(exclude) assert m.dict(exclude=exclude) == expected assert exclude == original_exclude @pytest.mark.parametrize( 'excludes,expected', [ pytest.param( {'bars': {0}}, {'a': 1, 'bars': [{'y': 2}, {'w': -1, 'z': 3}]}, id='excluding first item from list field using index', ), pytest.param({'bars': {'__all__'}}, {'a': 1, 'bars': []}, id='using "__all__" to exclude all list items'), pytest.param( {'bars': {'__all__': {'w'}}}, {'a': 1, 'bars': [{'x': 1}, {'y': 2}, {'z': 3}]}, id='exclude single dict key from all list items', ), ], ) def test_model_export_dict_exclusion(excludes, expected): class Foo(BaseModel): a: int = 1 bars: List[Dict[str, int]] m = Foo(a=1, bars=[{'w': 0, 'x': 1}, {'y': 2}, {'w': -1, 'z': 3}]) original_excludes = deepcopy(excludes) assert m.dict(exclude=excludes) == expected assert excludes == original_excludes def test_model_exclude_config_field_merging(): """Test merging field exclude values from config.""" class Model(BaseModel): b: int = Field(2, exclude=...) class Config: fields = { 'b': {'exclude': ...}, } assert Model.__fields__['b'].field_info.exclude is ... class Model(BaseModel): b: int = Field(2, exclude={'a': {'test'}}) class Config: fields = { 'b': {'exclude': ...}, } assert Model.__fields__['b'].field_info.exclude == {'a': {'test'}} class Model(BaseModel): b: int = Field(2, exclude={'foo'}) class Config: fields = { 'b': {'exclude': {'bar'}}, } assert Model.__fields__['b'].field_info.exclude == {'foo': ..., 'bar': ...} def test_model_exclude_copy_on_model_validation(): """When `Config.copy_on_model_validation` is set, it should keep private attributes and excluded fields""" class User(BaseModel): _priv: int = PrivateAttr() id: int username: str password: SecretStr = Field(exclude=True) hobbies: List[str] my_user = User(id=42, username='JohnDoe', password='hashedpassword', hobbies=['scuba diving']) my_user._priv = 13 assert my_user.id == 42 assert my_user.password.get_secret_value() == 'hashedpassword' assert my_user.dict() == {'id': 42, 'username': 'JohnDoe', 'hobbies': ['scuba diving']} class Transaction(BaseModel): id: str user: User = Field(..., exclude={'username'}) value: int class Config: fields = {'value': {'exclude': True}} t = Transaction( id='1234567890', user=my_user, value=9876543210, ) assert t.user is not my_user assert t.user.hobbies == ['scuba diving'] assert t.user.hobbies is my_user.hobbies # `Config.copy_on_model_validation` does a shallow copy assert t.user._priv == 13 assert t.user.password.get_secret_value() == 'hashedpassword' assert t.dict() == {'id': '1234567890', 'user': {'id': 42, 'hobbies': ['scuba diving']}} def test_model_exclude_copy_on_model_validation_shallow(): """When `Config.copy_on_model_validation` is set and `Config.copy_on_model_validation_shallow` is set, do the same as the previous test but perform a shallow copy""" class User(BaseModel): class Config: copy_on_model_validation = 'shallow' hobbies: List[str] my_user = User(hobbies=['scuba diving']) class Transaction(BaseModel): user: User = Field(...) t = Transaction(user=my_user) assert t.user is not my_user assert t.user.hobbies is my_user.hobbies # unlike above, this should be a shallow copy @pytest.mark.parametrize('comv_value', [True, False]) def test_copy_on_model_validation_warning(comv_value): class User(BaseModel): class Config: # True interpreted as 'shallow', False interpreted as 'none' copy_on_model_validation = comv_value hobbies: List[str] my_user = User(hobbies=['scuba diving']) class Transaction(BaseModel): user: User with pytest.warns(DeprecationWarning, match="`copy_on_model_validation` should be a string: 'deep', 'shallow' or"): t = Transaction(user=my_user) if comv_value: assert t.user is not my_user else: assert t.user is my_user assert t.user.hobbies is my_user.hobbies def test_validation_deep_copy(): """By default, Config.copy_on_model_validation should do a deep copy""" class A(BaseModel): name: str class Config: copy_on_model_validation = 'deep' class B(BaseModel): list_a: List[A] a = A(name='a') b = B(list_a=[a]) assert b.list_a == [A(name='a')] a.name = 'b' assert b.list_a == [A(name='a')] @pytest.mark.parametrize( 'kinds', [ {'sub_fields', 'model_fields', 'model_config', 'sub_config', 'combined_config'}, {'sub_fields', 'model_fields', 'combined_config'}, {'sub_fields', 'model_fields'}, {'combined_config'}, {'model_config', 'sub_config'}, {'model_config', 'sub_fields'}, {'model_fields', 'sub_config'}, ], ) @pytest.mark.parametrize( 'exclude,expected', [ (None, {'a': 0, 'c': {'a': [3, 5], 'c': 'foobar'}, 'd': {'c': 'foobar'}}), ({'c', 'd'}, {'a': 0}), ({'a': ..., 'c': ..., 'd': {'a': ..., 'c': ...}}, {'d': {}}), ], ) def test_model_export_exclusion_with_fields_and_config(kinds, exclude, expected): """Test that exporting models with fields using the export parameter works.""" class ChildConfig: pass if 'sub_config' in kinds: ChildConfig.fields = {'b': {'exclude': ...}, 'a': {'exclude': {1}}} class ParentConfig: pass if 'combined_config' in kinds: ParentConfig.fields = { 'b': {'exclude': ...}, 'c': {'exclude': {'b': ..., 'a': {1}}}, 'd': {'exclude': {'a': ..., 'b': ...}}, } elif 'model_config' in kinds: ParentConfig.fields = {'b': {'exclude': ...}, 'd': {'exclude': {'a'}}} class Sub(BaseModel): a: List[int] = Field([3, 4, 5], exclude={1} if 'sub_fields' in kinds else None) b: int = Field(4, exclude=... if 'sub_fields' in kinds else None) c: str = 'foobar' Config = ChildConfig class Model(BaseModel): a: int = 0 b: int = Field(2, exclude=... if 'model_fields' in kinds else None) c: Sub = Sub() d: Sub = Field(Sub(), exclude={'a'} if 'model_fields' in kinds else None) Config = ParentConfig m = Model() assert m.dict(exclude=exclude) == expected, 'Unexpected model export result' def test_model_export_exclusion_inheritance(): class Sub(BaseModel): s1: str = 'v1' s2: str = 'v2' s3: str = 'v3' s4: str = Field('v4', exclude=...) class Parent(BaseModel): a: int b: int = Field(..., exclude=...) c: int d: int s: Sub = Sub() class Config: fields = {'a': {'exclude': ...}, 's': {'exclude': {'s1'}}} class Child(Parent): class Config: fields = {'c': {'exclude': ...}, 's': {'exclude': {'s2'}}} actual = Child(a=0, b=1, c=2, d=3).dict() expected = {'d': 3, 's': {'s3': 'v3'}} assert actual == expected, 'Unexpected model export result' def test_model_export_with_true_instead_of_ellipsis(): class Sub(BaseModel): s1: int = 1 class Model(BaseModel): a: int = 2 b: int = Field(3, exclude=True) c: int = Field(4) s: Sub = Sub() class Config: fields = {'c': {'exclude': True}} m = Model() assert m.dict(exclude={'s': True}) == {'a': 2} def test_model_export_inclusion(): class Sub(BaseModel): s1: str = 'v1' s2: str = 'v2' s3: str = 'v3' s4: str = 'v4' class Model(BaseModel): a: Sub = Sub() b: Sub = Field(Sub(), include={'s1'}) c: Sub = Field(Sub(), include={'s1', 's2'}) class Config: fields = {'a': {'include': {'s2', 's1', 's3'}}, 'b': {'include': {'s1', 's2', 's3', 's4'}}} Model.__fields__['a'].field_info.include == {'s1': ..., 's2': ..., 's3': ...} Model.__fields__['b'].field_info.include == {'s1': ...} Model.__fields__['c'].field_info.include == {'s1': ..., 's2': ...} actual = Model().dict(include={'a': {'s3', 's4'}, 'b': ..., 'c': ...}) # s1 included via field, s2 via config and s3 via .dict call: expected = {'a': {'s3': 'v3'}, 'b': {'s1': 'v1'}, 'c': {'s1': 'v1', 's2': 'v2'}} assert actual == expected, 'Unexpected model export result' def test_model_export_inclusion_inheritance(): class Sub(BaseModel): s1: str = Field('v1', include=...) s2: str = Field('v2', include=...) s3: str = Field('v3', include=...) s4: str = 'v4' class Parent(BaseModel): a: int b: int c: int s: Sub = Field(Sub(), include={'s1', 's2'}) # overrides includes set in Sub model class Config: # b will be included since fields are set idependently fields = {'b': {'include': ...}} class Child(Parent): class Config: # b is still included even if it doesn't occur here since fields # are still considered separately. # s however, is merged, resulting in only s1 being included. fields = {'a': {'include': ...}, 's': {'include': {'s1'}}} actual = Child(a=0, b=1, c=2).dict() expected = {'a': 0, 'b': 1, 's': {'s1': 'v1'}} assert actual == expected, 'Unexpected model export result' def test_custom_init_subclass_params(): class DerivedModel(BaseModel): def __init_subclass__(cls, something): cls.something = something # if this raises a TypeError, then there is a regression of issue 867: # pydantic.main.MetaModel.__new__ should include **kwargs at the end of the # method definition and pass them on to the super call at the end in order # to allow the special method __init_subclass__ to be defined with custom # parameters on extended BaseModel classes. class NewModel(DerivedModel, something=2): something = 1 assert NewModel.something == 2 def test_update_forward_refs_does_not_modify_module_dict(): class MyModel(BaseModel): field: Optional['MyModel'] # noqa: F821 MyModel.update_forward_refs() assert 'MyModel' not in sys.modules[MyModel.__module__].__dict__ def test_two_defaults(): with pytest.raises(ValueError, match='^cannot specify both default and default_factory$'): class Model(BaseModel): a: int = Field(default=3, default_factory=lambda: 3) def test_default_factory(): class ValueModel(BaseModel): uid: UUID = uuid4() m1 = ValueModel() m2 = ValueModel() assert m1.uid == m2.uid class DynamicValueModel(BaseModel): uid: UUID = Field(default_factory=uuid4) m1 = DynamicValueModel() m2 = DynamicValueModel() assert isinstance(m1.uid, UUID) assert m1.uid != m2.uid # With a callable: we still should be able to set callables as defaults class FunctionModel(BaseModel): a: int = 1 uid: Callable[[], UUID] = Field(uuid4) m = FunctionModel() assert m.uid is uuid4 # Returning a singleton from a default_factory is supported class MySingleton: pass MY_SINGLETON = MySingleton() class SingletonFieldModel(BaseModel): singleton: MySingleton = Field(default_factory=lambda: MY_SINGLETON) class Config: arbitrary_types_allowed = True assert SingletonFieldModel().singleton is SingletonFieldModel().singleton def test_default_factory_called_once(): """It should call only once the given factory by default""" class Seq: def __init__(self): self.v = 0 def __call__(self): self.v += 1 return self.v class MyModel(BaseModel): id: int = Field(default_factory=Seq()) m1 = MyModel() assert m1.id == 1 m2 = MyModel() assert m2.id == 2 assert m1.id == 1 def test_default_factory_called_once_2(): """It should call only once the given factory by default""" v = 0 def factory(): nonlocal v v += 1 return v class MyModel(BaseModel): id: int = Field(default_factory=factory) m1 = MyModel() assert m1.id == 1 m2 = MyModel() assert m2.id == 2 def test_default_factory_validate_children(): class Child(BaseModel): x: int class Parent(BaseModel): children: List[Child] = Field(default_factory=list) Parent(children=[{'x': 1}, {'x': 2}]) with pytest.raises(ValidationError) as exc_info: Parent(children=[{'x': 1}, {'y': 2}]) assert exc_info.value.errors() == [ {'loc': ('children', 1, 'x'), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_default_factory_parse(): class Inner(BaseModel): val: int = Field(0) class Outer(BaseModel): inner_1: Inner = Field(default_factory=Inner) inner_2: Inner = Field(Inner()) default = Outer().dict() parsed = Outer.parse_obj(default) assert parsed.dict() == {'inner_1': {'val': 0}, 'inner_2': {'val': 0}} assert repr(parsed) == 'Outer(inner_1=Inner(val=0), inner_2=Inner(val=0))' def test_none_min_max_items(): # None default class Foo(BaseModel): foo: List = Field(None) bar: List = Field(None, min_items=0) baz: List = Field(None, max_items=10) f1 = Foo() f2 = Foo(bar=None) f3 = Foo(baz=None) f4 = Foo(bar=None, baz=None) for f in (f1, f2, f3, f4): assert f.foo is None assert f.bar is None assert f.baz is None def test_reuse_same_field(): required_field = Field(...) class Model1(BaseModel): required: str = required_field class Model2(BaseModel): required: str = required_field with pytest.raises(ValidationError): Model1.parse_obj({}) with pytest.raises(ValidationError): Model2.parse_obj({}) def test_base_config_type_hinting(): class M(BaseModel): a: int get_type_hints(M.__config__) def test_allow_mutation_field(): """assigning a allow_mutation=False field should raise a TypeError""" class Entry(BaseModel): id: float = Field(allow_mutation=False) val: float class Config: validate_assignment = True r = Entry(id=1, val=100) assert r.val == 100 r.val = 101 assert r.val == 101 assert r.id == 1 with pytest.raises(TypeError, match='"id" has allow_mutation set to False and cannot be assigned'): r.id = 2 def test_repr_field(): class Model(BaseModel): a: int = Field() b: int = Field(repr=True) c: int = Field(repr=False) m = Model(a=1, b=2, c=3) assert repr(m) == 'Model(a=1, b=2)' assert repr(m.__fields__['a'].field_info) == 'FieldInfo(default=PydanticUndefined, extra={})' assert repr(m.__fields__['b'].field_info) == 'FieldInfo(default=PydanticUndefined, extra={})' assert repr(m.__fields__['c'].field_info) == 'FieldInfo(default=PydanticUndefined, repr=False, extra={})' def test_inherited_model_field_copy(): """It should copy models used as fields by default""" class Image(BaseModel): path: str def __hash__(self): return id(self) class Item(BaseModel): images: List[Image] image_1 = Image(path='my_image1.png') image_2 = Image(path='my_image2.png') item = Item(images={image_1, image_2}) assert image_1 in item.images assert id(image_1) != id(item.images[0]) assert id(image_2) != id(item.images[1]) def test_inherited_model_field_untouched(): """It should not copy models used as fields if explicitly asked""" class Image(BaseModel): path: str def __hash__(self): return id(self) class Config: copy_on_model_validation = 'none' class Item(BaseModel): images: List[Image] image_1 = Image(path='my_image1.png') image_2 = Image(path='my_image2.png') item = Item(images=(image_1, image_2)) assert image_1 in item.images assert id(image_1) == id(item.images[0]) assert id(image_2) == id(item.images[1]) def test_mapping_retains_type_subclass(): class CustomMap(dict): pass class Model(BaseModel): x: Mapping[str, Mapping[str, int]] m = Model(x=CustomMap(outer=CustomMap(inner=42))) assert isinstance(m.x, CustomMap) assert isinstance(m.x['outer'], CustomMap) assert m.x['outer']['inner'] == 42 def test_mapping_retains_type_defaultdict(): class Model(BaseModel): x: Mapping[str, int] d = defaultdict(int) d[1] = '2' d['3'] m = Model(x=d) assert isinstance(m.x, defaultdict) assert m.x['1'] == 2 assert m.x['3'] == 0 def test_mapping_retains_type_fallback_error(): class CustomMap(dict): def __init__(self, *args, **kwargs): if args or kwargs: raise TypeError('test') super().__init__(*args, **kwargs) class Model(BaseModel): x: Mapping[str, int] d = CustomMap() d['one'] = 1 d['two'] = 2 with pytest.raises(RuntimeError, match="Could not convert dictionary to 'CustomMap'"): Model(x=d) def test_typing_coercion_dict(): class Model(BaseModel): x: Dict[str, int] m = Model(x={'one': 1, 'two': 2}) assert repr(m) == "Model(x={'one': 1, 'two': 2})" def test_typing_non_coercion_of_dict_subclasses(): KT = TypeVar('KT') VT = TypeVar('VT') class MyDict(Dict[KT, VT]): def __repr__(self): return f'MyDict({super().__repr__()})' class Model(BaseModel): a: MyDict b: MyDict[str, int] c: Dict[str, int] d: Mapping[str, int] assert ( repr(Model(a=MyDict({'a': 1}), b=MyDict({'a': '1'}), c=MyDict({'a': '1'}), d=MyDict({'a': '1'}))) == "Model(a=MyDict({'a': 1}), b=MyDict({'a': 1}), c={'a': 1}, d=MyDict({'a': 1}))" ) def test_typing_coercion_defaultdict(): class Model(BaseModel): x: DefaultDict[int, str] d = defaultdict(str) d['1'] m = Model(x=d) m.x['a'] assert repr(m) == "Model(x=defaultdict(, {1: '', 'a': ''}))" def test_typing_coercion_counter(): class Model(BaseModel): x: Counter[str] assert Model.__fields__['x'].type_ is int assert repr(Model(x={'a': 10})) == "Model(x=Counter({'a': 10}))" def test_typing_counter_value_validation(): class Model(BaseModel): x: Counter[str] with pytest.raises(ValidationError) as exc_info: Model(x={'a': 'a'}) assert exc_info.value.errors() == [ { 'loc': ('x', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer', } ] def test_class_kwargs_config(): class Base(BaseModel, extra='forbid', alias_generator=str.upper): a: int assert Base.__config__.extra is Extra.forbid assert Base.__config__.alias_generator is str.upper assert Base.__fields__['a'].alias == 'A' class Model(Base, extra='allow'): b: int assert Model.__config__.extra is Extra.allow # overwritten as intended assert Model.__config__.alias_generator is str.upper # inherited as intended assert Model.__fields__['b'].alias == 'B' # alias_generator still works def test_class_kwargs_config_json_encoders(): class Model(BaseModel, json_encoders={int: str}): pass assert Model.__config__.json_encoders == {int: str} def test_class_kwargs_config_and_attr_conflict(): with pytest.raises( TypeError, match='Specifying config in two places is ambiguous, use either Config attribute or class kwargs' ): class Model(BaseModel, extra='allow'): b: int class Config: extra = 'forbid' def test_class_kwargs_custom_config(): class Base(BaseModel): class Config(BaseConfig): some_config = 'value' class Model(Base, some_config='new_value'): a: int assert Model.__config__.some_config == 'new_value' @pytest.mark.skipif(sys.version_info < (3, 10), reason='need 3.10 version') def test_new_union_origin(): """On 3.10+, origin of `int | str` is `types.UnionType`, not `typing.Union`""" class Model(BaseModel): x: int | str assert Model(x=3).x == 3 assert Model(x='3').x == 3 assert Model(x='pika').x == 'pika' assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'x': {'title': 'X', 'anyOf': [{'type': 'integer'}, {'type': 'string'}]}}, 'required': ['x'], } def test_annotated_class(): class PydanticModel(BaseModel): foo: str = '123' PydanticAlias = Annotated[PydanticModel, 'bar baz'] pa = PydanticAlias() assert isinstance(pa, PydanticModel) pa.__doc__ = 'qwe' assert repr(pa) == "PydanticModel(foo='123')" assert pa.__doc__ == 'qwe' @pytest.mark.parametrize( 'ann', [Final, Final[int]], ids=['no-arg', 'with-arg'], ) @pytest.mark.parametrize( 'value', [None, Field(...)], ids=['none', 'field'], ) def test_final_field_decl_withou_default_val(ann, value): class Model(BaseModel): a: ann if value is not None: a = value Model.update_forward_refs(ann=ann) assert 'a' not in Model.__class_vars__ assert 'a' in Model.__fields__ assert Model.__fields__['a'].final @pytest.mark.parametrize( 'ann', [Final, Final[int]], ids=['no-arg', 'with-arg'], ) def test_final_field_decl_with_default_val(ann): class Model(BaseModel): a: ann = 10 Model.update_forward_refs(ann=ann) assert 'a' in Model.__class_vars__ assert 'a' not in Model.__fields__ def test_final_field_reassignment(): class Model(BaseModel): a: Final[int] obj = Model(a=10) with pytest.raises( TypeError, match=r'^"Model" object "a" field is final and does not support reassignment$', ): obj.a = 20 def test_field_by_default_is_not_final(): class Model(BaseModel): a: int assert not Model.__fields__['a'].final pydantic-1.10.14/tests/test_model_signature.py000066400000000000000000000123301455251250200214250ustar00rootroot00000000000000from inspect import Parameter, Signature, signature from typing import Any, Iterable, Optional, Union from typing_extensions import Annotated from pydantic import BaseModel, Extra, Field, create_model def _equals(a: Union[str, Iterable[str]], b: Union[str, Iterable[str]]) -> bool: """ compare strings with spaces removed """ if isinstance(a, str) and isinstance(b, str): return a.replace(' ', '') == b.replace(' ', '') elif isinstance(a, Iterable) and isinstance(b, Iterable): return all(_equals(a_, b_) for a_, b_ in zip(a, b)) else: raise TypeError(f'arguments must be both strings or both lists, not {type(a)}, {type(b)}') def test_model_signature(): class Model(BaseModel): a: float = Field(..., title='A') b = Field(10) sig = signature(Model) assert sig != signature(BaseModel) assert _equals(map(str, sig.parameters.values()), ('a: float', 'b: int = 10')) assert _equals(str(sig), '(*, a: float, b: int = 10) -> None') def test_custom_init_signature(): class MyModel(BaseModel): id: int name: str = 'John Doe' f__: str = Field(..., alias='foo') class Config: extra = Extra.allow def __init__(self, id: int = 1, bar=2, *, baz: Any, **data): super().__init__(id=id, **data) self.bar = bar self.baz = baz sig = signature(MyModel) assert _equals( map(str, sig.parameters.values()), ('id: int = 1', 'bar=2', 'baz: Any', "name: str = 'John Doe'", 'foo: str', '**data'), ) assert _equals(str(sig), "(id: int = 1, bar=2, *, baz: Any, name: str = 'John Doe', foo: str, **data) -> None") def test_custom_init_signature_with_no_var_kw(): class Model(BaseModel): a: float b: int = 2 c: int def __init__(self, a: float, b: int): super().__init__(a=a, b=b, c=1) class Config: extra = Extra.allow assert _equals(str(signature(Model)), '(a: float, b: int) -> None') def test_invalid_identifiers_signature(): model = create_model( 'Model', **{'123 invalid identifier!': Field(123, alias='valid_identifier'), '!': Field(0, alias='yeah')} ) assert _equals(str(signature(model)), '(*, valid_identifier: int = 123, yeah: int = 0) -> None') model = create_model('Model', **{'123 invalid identifier!': 123, '!': Field(0, alias='yeah')}) assert _equals(str(signature(model)), '(*, yeah: int = 0, **extra_data: Any) -> None') def test_use_field_name(): class Foo(BaseModel): foo: str = Field(..., alias='this is invalid') class Config: allow_population_by_field_name = True assert _equals(str(signature(Foo)), '(*, foo: str) -> None') def test_does_not_use_reserved_word(): class Foo(BaseModel): from_: str = Field(..., alias='from') class Config: allow_population_by_field_name = True assert _equals(str(signature(Foo)), '(*, from_: str) -> None') def test_extra_allow_no_conflict(): class Model(BaseModel): spam: str class Config: extra = Extra.allow assert _equals(str(signature(Model)), '(*, spam: str, **extra_data: Any) -> None') def test_extra_allow_conflict(): class Model(BaseModel): extra_data: str class Config: extra = Extra.allow assert _equals(str(signature(Model)), '(*, extra_data: str, **extra_data_: Any) -> None') def test_extra_allow_conflict_twice(): class Model(BaseModel): extra_data: str extra_data_: str class Config: extra = Extra.allow assert _equals(str(signature(Model)), '(*, extra_data: str, extra_data_: str, **extra_data__: Any) -> None') def test_extra_allow_conflict_custom_signature(): class Model(BaseModel): extra_data: int def __init__(self, extra_data: int = 1, **foobar: Any): super().__init__(extra_data=extra_data, **foobar) class Config: extra = Extra.allow assert _equals(str(signature(Model)), '(extra_data: int = 1, **foobar: Any) -> None') def test_signature_is_class_only(): class Model(BaseModel): foo: int = 123 def __call__(self, a: int) -> bool: pass assert _equals(str(signature(Model)), '(*, foo: int = 123) -> None') assert _equals(str(signature(Model())), '(a: int) -> bool') assert not hasattr(Model(), '__signature__') def test_optional_field(): class Model(BaseModel): foo: Optional[int] = None assert signature(Model) == Signature( [Parameter('foo', Parameter.KEYWORD_ONLY, default=None, annotation=Optional[int])], return_annotation=None ) def test_annotated_field(): class Model(BaseModel): foo: Annotated[int, 'foo'] = 1 assert signature(Model) == Signature( [Parameter('foo', Parameter.KEYWORD_ONLY, default=1, annotation=Annotated[int, 'foo'])], return_annotation=None ) def test_annotated_optional_field(): class Model(BaseModel): foo: Annotated[Optional[int], 'foo'] = None assert signature(Model) == Signature( [Parameter('foo', Parameter.KEYWORD_ONLY, default=None, annotation=Annotated[Optional[int], 'foo'])], return_annotation=None, ) pydantic-1.10.14/tests/test_networks.py000066400000000000000000000722071455251250200201310ustar00rootroot00000000000000import pytest from pydantic import ( AmqpDsn, AnyHttpUrl, AnyUrl, BaseModel, CockroachDsn, EmailStr, FileUrl, HttpUrl, KafkaDsn, MongoDsn, NameEmail, PostgresDsn, RedisDsn, ValidationError, stricturl, ) from pydantic.networks import validate_email try: import email_validator except ImportError: email_validator = None @pytest.mark.parametrize( 'value', [ 'http://example.org', 'http://test', 'http://localhost', 'https://example.org/whatever/next/', 'postgres://user:pass@localhost:5432/app', 'postgres://just-user@localhost:5432/app', 'postgresql+asyncpg://user:pass@localhost:5432/app', 'postgresql+pg8000://user:pass@localhost:5432/app', 'postgresql+psycopg://postgres:postgres@localhost:5432/hatch', 'postgresql+psycopg2://postgres:postgres@localhost:5432/hatch', 'postgresql+psycopg2cffi://user:pass@localhost:5432/app', 'postgresql+py-postgresql://user:pass@localhost:5432/app', 'postgresql+pygresql://user:pass@localhost:5432/app', 'foo-bar://example.org', 'foo.bar://example.org', 'foo0bar://example.org', 'https://example.org', 'http://localhost', 'http://localhost/', 'http://localhost:8000', 'http://localhost:8000/', 'https://foo_bar.example.com/', 'ftp://example.org', 'ftps://example.org', 'http://example.co.jp', 'http://www.example.com/a%C2%B1b', 'http://www.example.com/~username/', 'http://info.example.com?fred', 'http://info.example.com/?fred', 'http://xn--mgbh0fb.xn--kgbechtv/', 'http://example.com/blue/red%3Fand+green', 'http://www.example.com/?array%5Bkey%5D=value', 'http://xn--rsum-bpad.example.org/', 'http://123.45.67.8/', 'http://123.45.67.8:8329/', 'http://[2001:db8::ff00:42]:8329', 'http://[2001::1]:8329', 'http://[2001:db8::1]/', 'http://www.example.com:8000/foo', 'http://www.cwi.nl:80/%7Eguido/Python.html', 'https://www.python.org/путь', 'http://андрей@example.com', AnyUrl('https://example.com', scheme='https', host='example.com'), 'https://exam_ple.com/', 'http://twitter.com/@handle/', 'http://11.11.11.11.example.com/action', 'http://abc.11.11.11.11.example.com/action', 'http://example#', 'http://example/#', 'http://example/#fragment', 'http://example/?#', 'http://example.org/path#', 'http://example.org/path#fragment', 'http://example.org/path?query#', 'http://example.org/path?query#fragment', 'file://localhost/foo/bar', ], ) def test_any_url_success(value): class Model(BaseModel): v: AnyUrl assert Model(v=value).v, value @pytest.mark.parametrize( 'value,err_type,err_msg,err_ctx', [ ('http:///example.com/', 'value_error.url.host', 'URL host invalid', None), ('https:///example.com/', 'value_error.url.host', 'URL host invalid', None), ('http://.example.com:8000/foo', 'value_error.url.host', 'URL host invalid', None), ('https://example.org\\', 'value_error.url.host', 'URL host invalid', None), ('https://exampl$e.org', 'value_error.url.host', 'URL host invalid', None), ('http://??', 'value_error.url.host', 'URL host invalid', None), ('http://.', 'value_error.url.host', 'URL host invalid', None), ('http://..', 'value_error.url.host', 'URL host invalid', None), ( 'https://example.org more', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: ' more'", {'extra': ' more'}, ), ('$https://example.org', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('../icons/logo.gif', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('abc', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('..', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('/', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('+http://example.com/', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('ht*tp://example.com/', 'value_error.url.scheme', 'invalid or missing URL scheme', None), (' ', 'value_error.any_str.min_length', 'ensure this value has at least 1 characters', {'limit_value': 1}), ('', 'value_error.any_str.min_length', 'ensure this value has at least 1 characters', {'limit_value': 1}), (None, 'type_error.none.not_allowed', 'none is not an allowed value', None), ( 'http://2001:db8::ff00:42:8329', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: ':db8::ff00:42:8329'", {'extra': ':db8::ff00:42:8329'}, ), ('http://[192.168.1.1]:8329', 'value_error.url.host', 'URL host invalid', None), ('http://example.com:99999', 'value_error.url.port', 'URL port invalid, port cannot exceed 65535', None), ( 'http://example##', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: '#'", {'extra': '#'}, ), ( 'http://example/##', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: '#'", {'extra': '#'}, ), ('file:///foo/bar', 'value_error.url.host', 'URL host invalid', None), ], ) def test_any_url_invalid(value, err_type, err_msg, err_ctx): class Model(BaseModel): v: AnyUrl with pytest.raises(ValidationError) as exc_info: Model(v=value) assert len(exc_info.value.errors()) == 1, exc_info.value.errors() error = exc_info.value.errors()[0] # debug(error) assert error['type'] == err_type, value assert error['msg'] == err_msg, value assert error.get('ctx') == err_ctx, value def validate_url(s): class Model(BaseModel): v: AnyUrl return Model(v=s).v def test_any_url_parts(): url = validate_url('http://example.org') assert str(url) == 'http://example.org' assert repr(url) == "AnyUrl('http://example.org', scheme='http', host='example.org', tld='org', host_type='domain')" assert url.scheme == 'http' assert url.host == 'example.org' assert url.tld == 'org' assert url.host_type == 'domain' assert url.port is None assert url == AnyUrl('http://example.org', scheme='https', host='example.org') def test_url_repr(): url = validate_url('http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit') assert str(url) == 'http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit' assert repr(url) == ( "AnyUrl('http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit', " "scheme='http', user='user', password='password', host='example.org', tld='org', host_type='domain', " "port='1234', path='/the/path/', query='query=here', fragment='fragment=is;this=bit')" ) assert url.scheme == 'http' assert url.user == 'user' assert url.password == 'password' assert url.host == 'example.org' assert url.host_type == 'domain' assert url.port == '1234' assert url.path == '/the/path/' assert url.query == 'query=here' assert url.fragment == 'fragment=is;this=bit' def test_ipv4_port(): url = validate_url('ftp://123.45.67.8:8329/') assert url.scheme == 'ftp' assert url.host == '123.45.67.8' assert url.host_type == 'ipv4' assert url.port == '8329' assert url.user is None assert url.password is None def test_ipv4_no_port(): url = validate_url('ftp://123.45.67.8') assert url.scheme == 'ftp' assert url.host == '123.45.67.8' assert url.host_type == 'ipv4' assert url.port is None assert url.user is None assert url.password is None def test_ipv6_port(): url = validate_url('wss://[2001:db8::ff00:42]:8329') assert url.scheme == 'wss' assert url.host == '[2001:db8::ff00:42]' assert url.host_type == 'ipv6' assert url.port == '8329' def test_int_domain(): url = validate_url('https://£££.org') assert url.host == 'xn--9aaa.org' assert url.host_type == 'int_domain' assert str(url) == 'https://xn--9aaa.org' def test_co_uk(): url = validate_url('http://example.co.uk') assert str(url) == 'http://example.co.uk' assert url.scheme == 'http' assert url.host == 'example.co.uk' assert url.tld == 'uk' # wrong but no better solution assert url.host_type == 'domain' def test_user_no_password(): url = validate_url('http://user:@example.org') assert url.user == 'user' assert url.password == '' assert url.host == 'example.org' def test_user_info_no_user(): url = validate_url('http://:password@example.org') assert url.user == '' assert url.password == 'password' assert url.host == 'example.org' def test_at_in_path(): url = validate_url('https://twitter.com/@handle') assert url.scheme == 'https' assert url.host == 'twitter.com' assert url.user is None assert url.password is None assert url.path == '/@handle' def test_fragment_without_query(): url = validate_url('https://docs.pydantic.dev/usage/types/#constrained-types') assert url.scheme == 'https' assert url.host == 'docs.pydantic.dev' assert url.path == '/usage/types/' assert url.query is None assert url.fragment == 'constrained-types' @pytest.mark.parametrize( 'value', [ 'http://example.org', 'http://example.org/foobar', 'http://example.org.', 'http://example.org./foobar', 'HTTP://EXAMPLE.ORG', 'https://example.org', 'https://example.org?a=1&b=2', 'https://example.org#a=3;b=3', 'https://foo_bar.example.com/', 'https://exam_ple.com/', # should perhaps fail? I think it's contrary to the RFC but chrome allows it 'https://example.xn--p1ai', 'https://example.xn--vermgensberatung-pwb', 'https://example.xn--zfr164b', ], ) def test_http_url_success(value): class Model(BaseModel): v: HttpUrl assert Model(v=value).v == value @pytest.mark.parametrize( 'value,err_type,err_msg,err_ctx', [ ( 'ftp://example.com/', 'value_error.url.scheme', 'URL scheme not permitted', {'allowed_schemes': {'https', 'http'}}, ), ('http://foobar/', 'value_error.url.host', 'URL host invalid, top level domain required', None), ('http://localhost/', 'value_error.url.host', 'URL host invalid, top level domain required', None), ('https://example.123', 'value_error.url.host', 'URL host invalid, top level domain required', None), ('https://example.ab123', 'value_error.url.host', 'URL host invalid, top level domain required', None), ( 'x' * 2084, 'value_error.any_str.max_length', 'ensure this value has at most 2083 characters', {'limit_value': 2083}, ), ], ) def test_http_url_invalid(value, err_type, err_msg, err_ctx): class Model(BaseModel): v: HttpUrl with pytest.raises(ValidationError) as exc_info: Model(v=value) assert len(exc_info.value.errors()) == 1, exc_info.value.errors() error = exc_info.value.errors()[0] assert error['type'] == err_type, value assert error['msg'] == err_msg, value assert error.get('ctx') == err_ctx, value @pytest.mark.parametrize( 'input,output', [ (' https://www.example.com \n', 'https://www.example.com'), (b'https://www.example.com', 'https://www.example.com'), # https://www.xudongz.com/blog/2017/idn-phishing/ accepted but converted ('https://www.аррӏе.com/', 'https://www.xn--80ak6aa92e.com/'), ('https://exampl£e.org', 'https://xn--example-gia.org'), ('https://example.珠宝', 'https://example.xn--pbt977c'), ('https://example.vermögensberatung', 'https://example.xn--vermgensberatung-pwb'), ('https://example.рф', 'https://example.xn--p1ai'), ('https://exampl£e.珠宝', 'https://xn--example-gia.xn--pbt977c'), ], ) def test_coerse_url(input, output): class Model(BaseModel): v: HttpUrl assert Model(v=input).v == output @pytest.mark.parametrize( 'input,output', [ (' https://www.example.com \n', 'com'), (b'https://www.example.com', 'com'), ('https://www.example.com?param=value', 'com'), ('https://example.珠宝', 'xn--pbt977c'), ('https://exampl£e.珠宝', 'xn--pbt977c'), ('https://example.vermögensberatung', 'xn--vermgensberatung-pwb'), ('https://example.рф', 'xn--p1ai'), ('https://example.рф?param=value', 'xn--p1ai'), ], ) def test_parses_tld(input, output): class Model(BaseModel): v: HttpUrl assert Model(v=input).v.tld == output @pytest.mark.parametrize( 'value', ['file:///foo/bar', 'file://localhost/foo/bar', 'file:////localhost/foo/bar'], ) def test_file_url_success(value): class Model(BaseModel): v: FileUrl assert Model(v=value).v == value def test_get_default_parts(): class MyConnectionString(AnyUrl): @staticmethod def get_default_parts(parts): # get default parts allows to generate custom conn strings to services return { 'user': 'admin', 'password': '123', } class C(BaseModel): connection: MyConnectionString c = C(connection='protocol://service:8080') assert c.connection == 'protocol://admin:123@service:8080' assert c.connection.user == 'admin' assert c.connection.password == '123' @pytest.mark.parametrize( 'url,port', [ ('https://www.example.com', '443'), ('https://www.example.com:443', '443'), ('https://www.example.com:8089', '8089'), ('http://www.example.com', '80'), ('http://www.example.com:80', '80'), ('http://www.example.com:8080', '8080'), ], ) def test_http_urls_default_port(url, port): class Model(BaseModel): v: HttpUrl m = Model(v=url) assert m.v.port == port assert m.v == url @pytest.mark.parametrize( 'dsn', [ 'postgres://user:pass@localhost:5432/app', 'postgresql://user:pass@localhost:5432/app', 'postgresql+asyncpg://user:pass@localhost:5432/app', 'postgres://user:pass@host1.db.net,host2.db.net:6432/app', ], ) def test_postgres_dsns(dsn): class Model(BaseModel): a: PostgresDsn assert Model(a=dsn).a == dsn @pytest.mark.parametrize( 'dsn,error_message', ( ( 'postgres://user:pass@host1.db.net:4321,/foo/bar:5432/app', {'loc': ('a',), 'msg': 'URL host invalid', 'type': 'value_error.url.host'}, ), ( 'postgres://user:pass@host1.db.net,/app', {'loc': ('a',), 'msg': 'URL host invalid', 'type': 'value_error.url.host'}, ), ( 'postgres://user:pass@/foo/bar:5432,host1.db.net:4321/app', {'loc': ('a',), 'msg': 'URL host invalid', 'type': 'value_error.url.host'}, ), ( 'postgres://localhost:5432/app', {'loc': ('a',), 'msg': 'userinfo required in URL but missing', 'type': 'value_error.url.userinfo'}, ), ( 'postgres://user@/foo/bar:5432/app', {'loc': ('a',), 'msg': 'URL host invalid', 'type': 'value_error.url.host'}, ), ( 'http://example.org', { 'loc': ('a',), 'msg': 'URL scheme not permitted', 'type': 'value_error.url.scheme', 'ctx': {'allowed_schemes': PostgresDsn.allowed_schemes}, }, ), ), ) def test_postgres_dsns_validation_error(dsn, error_message): class Model(BaseModel): a: PostgresDsn with pytest.raises(ValidationError) as exc_info: Model(a=dsn) error = exc_info.value.errors()[0] assert error == error_message def test_multihost_postgres_dsns(): class Model(BaseModel): a: PostgresDsn any_multihost_url = Model(a='postgres://user:pass@host1.db.net:4321,host2.db.net:6432/app').a assert any_multihost_url == 'postgres://user:pass@host1.db.net:4321,host2.db.net:6432/app' assert any_multihost_url.scheme == 'postgres' assert any_multihost_url.host is None assert any_multihost_url.host_type is None assert any_multihost_url.tld is None assert any_multihost_url.port is None assert any_multihost_url.path == '/app' assert any_multihost_url.hosts == [ {'host': 'host1.db.net', 'port': '4321', 'tld': 'net', 'host_type': 'domain', 'rebuild': False}, {'host': 'host2.db.net', 'port': '6432', 'tld': 'net', 'host_type': 'domain', 'rebuild': False}, ] any_multihost_url = Model(a='postgres://user:pass@host.db.net:4321/app').a assert any_multihost_url.scheme == 'postgres' assert any_multihost_url == 'postgres://user:pass@host.db.net:4321/app' assert any_multihost_url.host == 'host.db.net' assert any_multihost_url.host_type == 'domain' assert any_multihost_url.tld == 'net' assert any_multihost_url.port == '4321' assert any_multihost_url.path == '/app' assert any_multihost_url.hosts is None def test_cockroach_dsns(): class Model(BaseModel): a: CockroachDsn assert Model(a='cockroachdb://user:pass@localhost:5432/app').a == 'cockroachdb://user:pass@localhost:5432/app' assert ( Model(a='cockroachdb+psycopg2://user:pass@localhost:5432/app').a == 'cockroachdb+psycopg2://user:pass@localhost:5432/app' ) assert ( Model(a='cockroachdb+asyncpg://user:pass@localhost:5432/app').a == 'cockroachdb+asyncpg://user:pass@localhost:5432/app' ) with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' assert exc_info.value.json().startswith('[') with pytest.raises(ValidationError) as exc_info: Model(a='cockroachdb://localhost:5432/app') error = exc_info.value.errors()[0] assert error == {'loc': ('a',), 'msg': 'userinfo required in URL but missing', 'type': 'value_error.url.userinfo'} with pytest.raises(ValidationError) as exc_info: Model(a='cockroachdb://user@/foo/bar:5432/app') error = exc_info.value.errors()[0] assert error == {'loc': ('a',), 'msg': 'URL host invalid', 'type': 'value_error.url.host'} def test_amqp_dsns(): class Model(BaseModel): a: AmqpDsn m = Model(a='amqp://user:pass@localhost:1234/app') assert m.a == 'amqp://user:pass@localhost:1234/app' assert m.a.user == 'user' assert m.a.password == 'pass' m = Model(a='amqps://user:pass@localhost:5432//') assert m.a == 'amqps://user:pass@localhost:5432//' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' # Password is not required for AMQP protocol m = Model(a='amqp://localhost:1234/app') assert m.a == 'amqp://localhost:1234/app' assert m.a.user is None assert m.a.password is None # Only schema is required for AMQP protocol. # https://www.rabbitmq.com/uri-spec.html m = Model(a='amqps://') assert m.a.scheme == 'amqps' assert m.a.host is None assert m.a.port is None assert m.a.path is None def test_redis_dsns(): class Model(BaseModel): a: RedisDsn m = Model(a='redis://user:pass@localhost:1234/app') assert m.a == 'redis://user:pass@localhost:1234/app' assert m.a.user == 'user' assert m.a.password == 'pass' m = Model(a='rediss://user:pass@localhost:1234/app') assert m.a == 'rediss://user:pass@localhost:1234/app' m = Model(a='rediss://:pass@localhost:1234') assert m.a == 'rediss://:pass@localhost:1234/0' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' # Password is not required for Redis protocol m = Model(a='redis://localhost:1234/app') assert m.a == 'redis://localhost:1234/app' assert m.a.user is None assert m.a.password is None # Only schema is required for Redis protocol. Otherwise it will be set to default # https://www.iana.org/assignments/uri-schemes/prov/redis m = Model(a='rediss://') assert m.a.scheme == 'rediss' assert m.a.host == 'localhost' assert m.a.port == '6379' assert m.a.path == '/0' def test_mongodb_dsns(): class Model(BaseModel): a: MongoDsn # TODO: Need to unit tests about "Replica Set", "Sharded cluster" and other deployment modes of MongoDB m = Model(a='mongodb://user:pass@localhost:1234/app') assert m.a == 'mongodb://user:pass@localhost:1234/app' assert m.a.user == 'user' assert m.a.password == 'pass' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' # Password is not required for MongoDB protocol m = Model(a='mongodb://localhost:1234/app') assert m.a == 'mongodb://localhost:1234/app' assert m.a.user is None assert m.a.password is None # Only schema and host is required for MongoDB protocol m = Model(a='mongodb://localhost') assert m.a.scheme == 'mongodb' assert m.a.host == 'localhost' assert m.a.port == '27017' def test_kafka_dsns(): class Model(BaseModel): a: KafkaDsn m = Model(a='kafka://') assert m.a.scheme == 'kafka' assert m.a.host == 'localhost' assert m.a.port == '9092' assert m.a == 'kafka://localhost:9092' m = Model(a='kafka://kafka1') assert m.a == 'kafka://kafka1:9092' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' m = Model(a='kafka://kafka3:9093') assert m.a.user is None assert m.a.password is None def test_custom_schemes(): class Model(BaseModel): v: stricturl(strip_whitespace=False, allowed_schemes={'ws', 'wss'}) # noqa: F821 class Model2(BaseModel): v: stricturl(host_required=False, allowed_schemes={'foo'}) # noqa: F821 assert Model(v='ws://example.org').v == 'ws://example.org' assert Model2(v='foo:///foo/bar').v == 'foo:///foo/bar' with pytest.raises(ValidationError): Model(v='http://example.org') with pytest.raises(ValidationError): Model(v='ws://example.org ') with pytest.raises(ValidationError): Model(v='ws:///foo/bar') @pytest.mark.parametrize( 'kwargs,expected', [ (dict(scheme='ws', user='foo', host='example.net'), 'ws://foo@example.net'), (dict(scheme='ws', user='foo', password='x', host='example.net'), 'ws://foo:x@example.net'), (dict(scheme='ws', host='example.net', query='a=b', fragment='c=d'), 'ws://example.net?a=b#c=d'), (dict(scheme='http', host='example.net', port='1234'), 'http://example.net:1234'), ], ) def test_build_url(kwargs, expected): assert AnyUrl(None, **kwargs) == expected @pytest.mark.parametrize( 'kwargs,expected', [ (dict(scheme='http', host='example.net'), 'http://example.net'), (dict(scheme='https', host='example.net'), 'https://example.net'), (dict(scheme='http', user='foo', host='example.net'), 'http://foo@example.net'), (dict(scheme='https', user='foo', host='example.net'), 'https://foo@example.net'), (dict(scheme='http', user='foo', host='example.net', port='123'), 'http://foo@example.net:123'), (dict(scheme='https', user='foo', host='example.net', port='123'), 'https://foo@example.net:123'), (dict(scheme='http', user='foo', password='x', host='example.net'), 'http://foo:x@example.net'), (dict(scheme='http2', user='foo', password='x', host='example.net'), 'http2://foo:x@example.net'), (dict(scheme='http', host='example.net', query='a=b', fragment='c=d'), 'http://example.net?a=b#c=d'), (dict(scheme='http2', host='example.net', query='a=b', fragment='c=d'), 'http2://example.net?a=b#c=d'), (dict(scheme='http', host='example.net', port='1234'), 'http://example.net:1234'), (dict(scheme='https', host='example.net', port='1234'), 'https://example.net:1234'), ], ) @pytest.mark.parametrize('klass', [AnyHttpUrl, HttpUrl]) def test_build_any_http_url(klass, kwargs, expected): assert klass(None, **kwargs) == expected @pytest.mark.parametrize( 'klass, kwargs,expected', [ (AnyHttpUrl, dict(scheme='http', user='foo', host='example.net', port='80'), 'http://foo@example.net:80'), (AnyHttpUrl, dict(scheme='https', user='foo', host='example.net', port='443'), 'https://foo@example.net:443'), (HttpUrl, dict(scheme='http', user='foo', host='example.net', port='80'), 'http://foo@example.net'), (HttpUrl, dict(scheme='https', user='foo', host='example.net', port='443'), 'https://foo@example.net'), ], ) def test_build_http_url_port(klass, kwargs, expected): assert klass(None, **kwargs) == expected def test_son(): class Model(BaseModel): v: HttpUrl m = Model(v='http://foo@example.net') assert m.json() == '{"v": "http://foo@example.net"}' assert m.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'v': {'title': 'V', 'minLength': 1, 'maxLength': 2083, 'type': 'string', 'format': 'uri'}}, 'required': ['v'], } @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize( 'value,name,email', [ ('foobar@example.com', 'foobar', 'foobar@example.com'), ('s@muelcolvin.com', 's', 's@muelcolvin.com'), ('Samuel Colvin ', 'Samuel Colvin', 's@muelcolvin.com'), ('foobar ', 'foobar', 'foobar@example.com'), (' foo.bar@example.com', 'foo.bar', 'foo.bar@example.com'), ('foo.bar@example.com ', 'foo.bar', 'foo.bar@example.com'), ('foo BAR ', 'foo BAR', 'foobar@example.com'), ('FOO bar ', 'FOO bar', 'foobar@example.com'), (' ', 'FOOBAR', 'FOOBAR@example.com'), ('ñoñó@example.com', 'ñoñó', 'ñoñó@example.com'), ('我買@example.com', '我買', '我買@example.com'), ('甲斐黒川日本@example.com', '甲斐黒川日本', '甲斐黒川日本@example.com'), ( 'чебурашкаящик-с-апельсинами.рф@example.com', 'чебурашкаящик-с-апельсинами.рф', 'чебурашкаящик-с-апельсинами.рф@example.com', ), ('उदाहरण.परीक्ष@domain.with.idn.tld', 'उदाहरण.परीक्ष', 'उदाहरण.परीक्ष@domain.with.idn.tld'), ('foo.bar@example.com', 'foo.bar', 'foo.bar@example.com'), ('foo.bar@exam-ple.com ', 'foo.bar', 'foo.bar@exam-ple.com'), ('ιωάννης@εεττ.gr', 'ιωάννης', 'ιωάννης@εεττ.gr'), ], ) def test_address_valid(value, name, email): assert validate_email(value) == (name, email) @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize( 'value', [ 'f oo.bar@example.com ', 'foo.bar@exam\nple.com ', 'foobar', 'foobar ', 'foobar <' + 'a' * 4096 + '@example.com>', ], ) def test_address_invalid(value): with pytest.raises(ValueError): validate_email(value) @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed(): with pytest.raises(ImportError): validate_email('s@muelcolvin.com') @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_email_str(): class Model(BaseModel): v: EmailStr assert Model(v=EmailStr('foo@example.org')).v == 'foo@example.org' assert Model(v='foo@example.org').v == 'foo@example.org' @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_name_email(): class Model(BaseModel): v: NameEmail assert str(Model(v=NameEmail('foo bar', 'foobaR@example.com')).v) == 'foo bar ' assert str(Model(v='foo bar ').v) == 'foo bar ' assert NameEmail('foo bar', 'foobaR@example.com') == NameEmail('foo bar', 'foobaR@example.com') assert NameEmail('foo bar', 'foobaR@example.com') != NameEmail('foo bar', 'different@example.com') pydantic-1.10.14/tests/test_networks_ipaddress.py000066400000000000000000000416351455251250200221700ustar00rootroot00000000000000from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network import pytest from pydantic import BaseModel, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, ValidationError # # ipaddress.IPv4Address # ipaddress.IPv6Address # pydantic.IPvAnyAddress # @pytest.mark.parametrize( 'value,cls', [ ('0.0.0.0', IPv4Address), ('1.1.1.1', IPv4Address), ('10.10.10.10', IPv4Address), ('192.168.0.1', IPv4Address), ('255.255.255.255', IPv4Address), ('::1:0:1', IPv6Address), ('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', IPv6Address), (b'\x00\x00\x00\x00', IPv4Address), (b'\x01\x01\x01\x01', IPv4Address), (b'\n\n\n\n', IPv4Address), (b'\xc0\xa8\x00\x01', IPv4Address), (b'\xff\xff\xff\xff', IPv4Address), (b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01', IPv6Address), (b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Address), (0, IPv4Address), (16_843_009, IPv4Address), (168_430_090, IPv4Address), (3_232_235_521, IPv4Address), (4_294_967_295, IPv4Address), (4_294_967_297, IPv6Address), (340_282_366_920_938_463_463_374_607_431_768_211_455, IPv6Address), (IPv4Address('192.168.0.1'), IPv4Address), (IPv6Address('::1:0:1'), IPv6Address), ], ) def test_ipaddress_success(value, cls): class Model(BaseModel): ip: IPvAnyAddress assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value', [ '0.0.0.0', '1.1.1.1', '10.10.10.10', '192.168.0.1', '255.255.255.255', b'\x00\x00\x00\x00', b'\x01\x01\x01\x01', b'\n\n\n\n', b'\xc0\xa8\x00\x01', b'\xff\xff\xff\xff', 0, 16_843_009, 168_430_090, 3_232_235_521, 4_294_967_295, IPv4Address('0.0.0.0'), IPv4Address('1.1.1.1'), IPv4Address('10.10.10.10'), IPv4Address('192.168.0.1'), IPv4Address('255.255.255.255'), ], ) def test_ipv4address_success(value): class Model(BaseModel): ipv4: IPv4Address assert Model(ipv4=value).ipv4 == IPv4Address(value) @pytest.mark.parametrize( 'value', [ '::1:0:1', 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01', b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', 4_294_967_297, 340_282_366_920_938_463_463_374_607_431_768_211_455, IPv6Address('::1:0:1'), IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'), ], ) def test_ipv6address_success(value): class Model(BaseModel): ipv6: IPv6Address assert Model(ipv6=value).ipv6 == IPv6Address(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( '192.168.0.1.1.1', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( -1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ], ) def test_ipaddress_fails(value, errors): class Model(BaseModel): ip: IPvAnyAddress with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ( '192.168.0.1.1.1', [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), (-1, [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}]), ( 2**32 + 1, [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ( IPv6Address('::0:1:0'), [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ], ) def test_ipv4address_fails(value, errors): class Model(BaseModel): ipv4: IPv4Address with pytest.raises(ValidationError) as exc_info: Model(ipv4=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ( '192.168.0.1.1.1', [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), (-1, [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}]), ( 2**128 + 1, [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ( IPv4Address('192.168.0.1'), [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ], ) def test_ipv6address_fails(value, errors): class Model(BaseModel): ipv6: IPv6Address with pytest.raises(ValidationError) as exc_info: Model(ipv6=value) assert exc_info.value.errors() == errors # # ipaddress.IPv4Network # ipaddress.IPv6Network # pydantic.IPvAnyNetwork # @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Network), ('192.168.128.0/30', IPv4Network), ('2001:db00::0/120', IPv6Network), (2**32 - 1, IPv4Network), # no mask equals to mask /32 (20_282_409_603_651_670_423_947_251_286_015, IPv6Network), # /128 (b'\xff\xff\xff\xff', IPv4Network), # /32 (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Network), (('192.168.0.0', 24), IPv4Network), (('2001:db00::0', 120), IPv6Network), (IPv4Network('192.168.0.0/24'), IPv4Network), ], ) def test_ipnetwork_success(value, cls): class Model(BaseModel): ip: IPvAnyNetwork = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Network), ('192.168.128.0/30', IPv4Network), (2**32 - 1, IPv4Network), # no mask equals to mask /32 (b'\xff\xff\xff\xff', IPv4Network), # /32 (('192.168.0.0', 24), IPv4Network), (IPv4Network('192.168.0.0/24'), IPv4Network), ], ) def test_ip_v4_network_success(value, cls): class Model(BaseModel): ip: IPv4Network = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('2001:db00::0/120', IPv6Network), (20_282_409_603_651_670_423_947_251_286_015, IPv6Network), # /128 (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Network), (('2001:db00::0', 120), IPv6Network), (IPv6Network('2001:db00::0/120'), IPv6Network), ], ) def test_ip_v6_network_success(value, cls): class Model(BaseModel): ip: IPv6Network = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( -1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ], ) def test_ipnetwork_fails(value, errors): class Model(BaseModel): ip: IPvAnyNetwork = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}]), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ( '2001:db00::1/120', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ], ) def test_ip_v4_network_fails(value, errors): class Model(BaseModel): ip: IPv4Network = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}]), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ( '192.168.0.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ], ) def test_ip_v6_network_fails(value, errors): class Model(BaseModel): ip: IPv6Network = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors # # ipaddress.IPv4Interface # ipaddress.IPv6Interface # pydantic.IPvAnyInterface # @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Interface), ('192.168.0.1/24', IPv4Interface), ('192.168.128.0/30', IPv4Interface), ('192.168.128.1/30', IPv4Interface), ('2001:db00::0/120', IPv6Interface), ('2001:db00::1/120', IPv6Interface), (2**32 - 1, IPv4Interface), # no mask equals to mask /32 (2**32 - 1, IPv4Interface), # so ``strict`` has no effect (20_282_409_603_651_670_423_947_251_286_015, IPv6Interface), # /128 (20_282_409_603_651_670_423_947_251_286_014, IPv6Interface), (b'\xff\xff\xff\xff', IPv4Interface), # /32 (b'\xff\xff\xff\xff', IPv4Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (('192.168.0.0', 24), IPv4Interface), (('192.168.0.1', 24), IPv4Interface), (('2001:db00::0', 120), IPv6Interface), (('2001:db00::1', 120), IPv6Interface), (IPv4Interface('192.168.0.0/24'), IPv4Interface), (IPv4Interface('192.168.0.1/24'), IPv4Interface), (IPv6Interface('2001:db00::0/120'), IPv6Interface), (IPv6Interface('2001:db00::1/120'), IPv6Interface), ], ) def test_ipinterface_success(value, cls): class Model(BaseModel): ip: IPvAnyInterface = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Interface), ('192.168.0.1/24', IPv4Interface), ('192.168.128.0/30', IPv4Interface), ('192.168.128.1/30', IPv4Interface), (2**32 - 1, IPv4Interface), # no mask equals to mask /32 (2**32 - 1, IPv4Interface), # so ``strict`` has no effect (b'\xff\xff\xff\xff', IPv4Interface), # /32 (b'\xff\xff\xff\xff', IPv4Interface), (('192.168.0.0', 24), IPv4Interface), (('192.168.0.1', 24), IPv4Interface), (IPv4Interface('192.168.0.0/24'), IPv4Interface), (IPv4Interface('192.168.0.1/24'), IPv4Interface), ], ) def test_ip_v4_interface_success(value, cls): class Model(BaseModel): ip: IPv4Interface assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('2001:db00::0/120', IPv6Interface), ('2001:db00::1/120', IPv6Interface), (20_282_409_603_651_670_423_947_251_286_015, IPv6Interface), # /128 (20_282_409_603_651_670_423_947_251_286_014, IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (('2001:db00::0', 120), IPv6Interface), (('2001:db00::1', 120), IPv6Interface), (IPv6Interface('2001:db00::0/120'), IPv6Interface), (IPv6Interface('2001:db00::1/120'), IPv6Interface), ], ) def test_ip_v6_interface_success(value, cls): class Model(BaseModel): ip: IPv6Interface = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( '192.168.0.1.1.1/24', [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( -1, [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( 2**128 + 1, [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ], ) def test_ipinterface_fails(value, errors): class Model(BaseModel): ip: IPvAnyInterface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}]), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), ], ) def test_ip_v4_interface_fails(value, errors): class Model(BaseModel): ip: IPv4Interface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}]), ( 2**128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), ], ) def test_ip_v6_interface_fails(value, errors): class Model(BaseModel): ip: IPv6Interface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors pydantic-1.10.14/tests/test_orm_mode.py000066400000000000000000000206221455251250200200500ustar00rootroot00000000000000from types import SimpleNamespace from typing import Any, Dict, List import pytest from pydantic import BaseModel, ConfigError, ValidationError, root_validator from pydantic.utils import GetterDict def test_getdict(): class TestCls: a = 1 b: int def __init__(self): self.c = 3 @property def d(self): return 4 def __getattr__(self, key): if key == 'e': return 5 else: raise AttributeError() t = TestCls() gd = GetterDict(t) assert gd.keys() == ['a', 'c', 'd'] assert gd.get('a') == 1 assert gd['a'] == 1 with pytest.raises(KeyError): assert gd['foobar'] assert gd.get('b', None) is None assert gd.get('b', 1234) == 1234 assert gd.get('c', None) == 3 assert gd.get('d', None) == 4 assert gd.get('e', None) == 5 assert gd.get('f', 'missing') == 'missing' assert list(gd.values()) == [1, 3, 4] assert list(gd.items()) == [('a', 1), ('c', 3), ('d', 4)] assert list(gd) == ['a', 'c', 'd'] assert gd == {'a': 1, 'c': 3, 'd': 4} assert 'a' in gd assert len(gd) == 3 assert str(gd) == "{'a': 1, 'c': 3, 'd': 4}" assert repr(gd) == "GetterDict[TestCls]({'a': 1, 'c': 3, 'd': 4})" def test_orm_mode_root(): class PokemonCls: def __init__(self, *, en_name: str, jp_name: str): self.en_name = en_name self.jp_name = jp_name class Pokemon(BaseModel): en_name: str jp_name: str class Config: orm_mode = True class PokemonList(BaseModel): __root__: List[Pokemon] class Config: orm_mode = True pika = PokemonCls(en_name='Pikachu', jp_name='ピカチュウ') bulbi = PokemonCls(en_name='Bulbasaur', jp_name='フシギダネ') pokemons = PokemonList.from_orm([pika, bulbi]) assert pokemons.__root__ == [ Pokemon(en_name='Pikachu', jp_name='ピカチュウ'), Pokemon(en_name='Bulbasaur', jp_name='フシギダネ'), ] class PokemonDict(BaseModel): __root__: Dict[str, Pokemon] class Config: orm_mode = True pokemons = PokemonDict.from_orm({'pika': pika, 'bulbi': bulbi}) assert pokemons.__root__ == { 'pika': Pokemon(en_name='Pikachu', jp_name='ピカチュウ'), 'bulbi': Pokemon(en_name='Bulbasaur', jp_name='フシギダネ'), } def test_orm_mode(): class PetCls: def __init__(self, *, name: str, species: str): self.name = name self.species = species class PersonCls: def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): self.name = name self.age = age self.pets = pets class Pet(BaseModel): name: str species: str class Config: orm_mode = True class Person(BaseModel): name: str age: float = None pets: List[Pet] class Config: orm_mode = True bones = PetCls(name='Bones', species='dog') orion = PetCls(name='Orion', species='cat') anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) anna_model = Person.from_orm(anna) assert anna_model.dict() == { 'name': 'Anna', 'pets': [{'name': 'Bones', 'species': 'dog'}, {'name': 'Orion', 'species': 'cat'}], 'age': 20.0, } def test_not_orm_mode(): class Pet(BaseModel): name: str species: str with pytest.raises(ConfigError): Pet.from_orm(None) def test_object_with_getattr(): class FooGetAttr: def __getattr__(self, key: str): if key == 'foo': return 'Foo' else: raise AttributeError class Model(BaseModel): foo: str bar: int = 1 class Config: orm_mode = True class ModelInvalid(BaseModel): foo: str bar: int class Config: orm_mode = True foo = FooGetAttr() model = Model.from_orm(foo) assert model.foo == 'Foo' assert model.bar == 1 assert model.dict(exclude_unset=True) == {'foo': 'Foo'} with pytest.raises(ValidationError): ModelInvalid.from_orm(foo) def test_properties(): class XyProperty: x = 4 @property def y(self): return '5' class Model(BaseModel): x: int y: int class Config: orm_mode = True model = Model.from_orm(XyProperty()) assert model.x == 4 assert model.y == 5 def test_extra_allow(): class TestCls: x = 1 y = 2 class Model(BaseModel): x: int class Config: orm_mode = True extra = 'allow' model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1} def test_extra_forbid(): class TestCls: x = 1 y = 2 class Model(BaseModel): x: int class Config: orm_mode = True extra = 'forbid' model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1} def test_root_validator(): validator_value = None class TestCls: x = 1 y = 2 class Model(BaseModel): x: int y: int z: int @root_validator(pre=True) def change_input_data(cls, value): nonlocal validator_value validator_value = value return {**value, 'z': value['x'] + value['y']} class Config: orm_mode = True model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1, 'y': 2, 'z': 3} assert isinstance(validator_value, GetterDict) assert validator_value == {'x': 1, 'y': 2} def test_custom_getter_dict(): class TestCls: x = 1 y = 2 def custom_getter_dict(obj): assert isinstance(obj, TestCls) return {'x': 42, 'y': 24} class Model(BaseModel): x: int y: int class Config: orm_mode = True getter_dict = custom_getter_dict model = Model.from_orm(TestCls()) assert model.dict() == {'x': 42, 'y': 24} def test_custom_getter_dict_derived_model_class(): class CustomCollection: __custom__ = True def __iter__(self): yield from range(5) class Example: def __init__(self, *args, **kwargs): self.col = CustomCollection() self.id = 1 self.name = 'name' class MyGetterDict(GetterDict): def get(self, key: Any, default: Any = None) -> Any: res = getattr(self._obj, key, default) if hasattr(res, '__custom__'): return list(res) return res class ExampleBase(BaseModel): name: str col: List[int] class ExampleOrm(ExampleBase): id: int class Config: orm_mode = True getter_dict = MyGetterDict model = ExampleOrm.from_orm(Example()) assert model.dict() == {'name': 'name', 'col': [0, 1, 2, 3, 4], 'id': 1} def test_recursive_parsing(): class Getter(GetterDict): # try to read the modified property name # either as an attribute or as a key def get(self, key, default): key = key + key try: v = self._obj[key] return Getter(v) if isinstance(v, dict) else v except TypeError: return getattr(self._obj, key, default) except KeyError: return default class Model(BaseModel): class Config: orm_mode = True getter_dict = Getter class ModelA(Model): a: int class ModelB(Model): b: ModelA # test recursive parsing with object attributes dct = dict(bb=SimpleNamespace(aa=1)) assert ModelB.from_orm(dct) == ModelB(b=ModelA(a=1)) # test recursive parsing with dict keys obj = dict(bb=dict(aa=1)) assert ModelB.from_orm(obj) == ModelB(b=ModelA(a=1)) def test_nested_orm(): class User(BaseModel): first_name: str last_name: str class Config: orm_mode = True class State(BaseModel): user: User class Config: orm_mode = True # Pass an "orm instance" State.from_orm(SimpleNamespace(user=SimpleNamespace(first_name='John', last_name='Appleseed'))) # Pass dictionary data directly State(**{'user': {'first_name': 'John', 'last_name': 'Appleseed'}}) pydantic-1.10.14/tests/test_parse.py000066400000000000000000000124711455251250200173640ustar00rootroot00000000000000import json import pickle from typing import List, Tuple, Union import pytest from pydantic import BaseModel, Field, Protocol, ValidationError, parse_obj_as class Model(BaseModel): a: float b: int = 10 def test_obj(): m = Model.parse_obj(dict(a=10.2)) assert str(m) == 'a=10.2 b=10' def test_parse_obj_fails(): with pytest.raises(ValidationError) as exc_info: Model.parse_obj([1, 2, 3]) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'Model expected dict not list', 'type': 'type_error'} ] def test_parse_obj_submodel(): m = Model.parse_obj(Model(a=10.2)) assert m.dict() == {'a': 10.2, 'b': 10} def test_parse_obj_wrong_model(): class Foo(BaseModel): c = 123 with pytest.raises(ValidationError) as exc_info: Model.parse_obj(Foo()) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_parse_obj_root(): class MyModel(BaseModel): __root__: str m = MyModel.parse_obj('a') assert m.dict() == {'__root__': 'a'} assert m.__root__ == 'a' assert MyModel.parse_obj(m) == m def test_parse_root_list(): class MyModel(BaseModel): __root__: List[str] m = MyModel.parse_obj(['a']) assert m.dict() == {'__root__': ['a']} assert m.__root__ == ['a'] def test_parse_nested_root_list(): class NestedData(BaseModel): id: str class NestedModel(BaseModel): __root__: List[NestedData] class MyModel(BaseModel): nested: NestedModel m = MyModel.parse_obj({'nested': [{'id': 'foo'}]}) assert isinstance(m.nested, NestedModel) assert isinstance(m.nested.__root__[0], NestedData) def test_parse_nested_root_tuple(): class NestedData(BaseModel): id: str class NestedModel(BaseModel): __root__: Tuple[int, NestedData] class MyModel(BaseModel): nested: List[NestedModel] data = [0, {'id': 'foo'}] m = MyModel.parse_obj({'nested': [data]}) assert isinstance(m.nested[0], NestedModel) assert isinstance(m.nested[0].__root__[1], NestedData) nested = parse_obj_as(NestedModel, data) assert isinstance(nested, NestedModel) def test_parse_nested_custom_root(): class NestedModel(BaseModel): __root__: List[str] class MyModel(BaseModel): __root__: NestedModel nested = ['foo', 'bar'] m = MyModel.parse_obj(nested) assert isinstance(m, MyModel) assert isinstance(m.__root__, NestedModel) assert isinstance(m.__root__.__root__, List) assert isinstance(m.__root__.__root__[0], str) def test_json(): assert Model.parse_raw('{"a": 12, "b": 8}') == Model(a=12, b=8) def test_json_ct(): assert Model.parse_raw('{"a": 12, "b": 8}', content_type='application/json') == Model(a=12, b=8) def test_pickle_ct(): data = pickle.dumps(dict(a=12, b=8)) assert Model.parse_raw(data, content_type='application/pickle', allow_pickle=True) == Model(a=12, b=8) def test_pickle_proto(): data = pickle.dumps(dict(a=12, b=8)) assert Model.parse_raw(data, proto=Protocol.pickle, allow_pickle=True) == Model(a=12, b=8) def test_pickle_not_allowed(): data = pickle.dumps(dict(a=12, b=8)) with pytest.raises(RuntimeError): Model.parse_raw(data, proto=Protocol.pickle) def test_bad_ct(): with pytest.raises(ValidationError) as exc_info: Model.parse_raw('{"a": 12, "b": 8}', content_type='application/missing') assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'Unknown content-type: application/missing', 'type': 'type_error'} ] def test_bad_proto(): with pytest.raises(ValidationError) as exc_info: Model.parse_raw('{"a": 12, "b": 8}', proto='foobar') assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'Unknown protocol: foobar', 'type': 'type_error'}] def test_file_json(tmpdir): p = tmpdir.join('test.json') p.write('{"a": 12, "b": 8}') assert Model.parse_file(str(p)) == Model(a=12, b=8) def test_file_json_no_ext(tmpdir): p = tmpdir.join('test') p.write('{"a": 12, "b": 8}') assert Model.parse_file(str(p)) == Model(a=12, b=8) def test_file_json_loads(tmp_path): def custom_json_loads(*args, **kwargs): data = json.loads(*args, **kwargs) data['a'] = 99 return data class Example(BaseModel): a: int class Config: json_loads = custom_json_loads p = tmp_path / 'test_json_loads.json' p.write_text('{"a": 12}') assert Example.parse_file(p) == Example(a=99) def test_file_pickle(tmpdir): p = tmpdir.join('test.pkl') p.write_binary(pickle.dumps(dict(a=12, b=8))) assert Model.parse_file(str(p), allow_pickle=True) == Model(a=12, b=8) def test_file_pickle_no_ext(tmpdir): p = tmpdir.join('test') p.write_binary(pickle.dumps(dict(a=12, b=8))) assert Model.parse_file(str(p), content_type='application/pickle', allow_pickle=True) == Model(a=12, b=8) def test_const_differentiates_union(): class SubModelA(BaseModel): key: str = Field('A', const=True) foo: int class SubModelB(BaseModel): key: str = Field('B', const=True) foo: int class Model(BaseModel): a: Union[SubModelA, SubModelB] m = Model.parse_obj({'a': {'key': 'B', 'foo': 3}}) assert isinstance(m.a, SubModelB) pydantic-1.10.14/tests/test_private_attributes.py000066400000000000000000000152441455251250200221730ustar00rootroot00000000000000from typing import ClassVar, Generic, TypeVar import pytest from pydantic import BaseModel, Extra, PrivateAttr from pydantic.fields import Undefined from pydantic.generics import GenericModel def test_private_attribute(): default = {'a': {}} class Model(BaseModel): __foo__ = PrivateAttr(default) assert Model.__slots__ == {'__foo__'} assert repr(Model.__foo__) == "" assert Model.__private_attributes__ == {'__foo__': PrivateAttr(default)} m = Model() assert m.__foo__ == default assert m.__foo__ is not default assert m.__foo__['a'] is not default['a'] m.__foo__ = None assert m.__foo__ is None assert m.dict() == {} assert m.__dict__ == {} def test_private_attribute_factory(): default = {'a': {}} def factory(): return default class Model(BaseModel): __foo__ = PrivateAttr(default_factory=factory) assert Model.__slots__ == {'__foo__'} assert repr(Model.__foo__) == "" assert Model.__private_attributes__ == {'__foo__': PrivateAttr(default_factory=factory)} m = Model() assert m.__foo__ == default assert m.__foo__ is default assert m.__foo__['a'] is default['a'] m.__foo__ = None assert m.__foo__ is None assert m.dict() == {} assert m.__dict__ == {} def test_private_attribute_annotation(): class Model(BaseModel): """The best model""" __foo__: str class Config: underscore_attrs_are_private = True assert Model.__slots__ == {'__foo__'} assert repr(Model.__foo__) == "" assert Model.__private_attributes__ == {'__foo__': PrivateAttr(Undefined)} assert repr(Model.__doc__) == "'The best model'" m = Model() with pytest.raises(AttributeError): m.__foo__ m.__foo__ = '123' assert m.__foo__ == '123' m.__foo__ = None assert m.__foo__ is None del m.__foo__ with pytest.raises(AttributeError): m.__foo__ m.__foo__ = '123' assert m.__foo__ == '123' assert m.dict() == {} assert m.__dict__ == {} def test_underscore_attrs_are_private(): class Model(BaseModel): __foo__: str = 'abc' __bar__: ClassVar[str] = 'cba' class Config: underscore_attrs_are_private = True assert Model.__slots__ == {'__foo__'} assert repr(Model.__foo__) == "" assert Model.__bar__ == 'cba' assert Model.__private_attributes__ == {'__foo__': PrivateAttr('abc')} m = Model() assert m.__foo__ == 'abc' m.__foo__ = None assert m.__foo__ is None with pytest.raises(ValueError, match='"Model" object has no field "__bar__"'): m.__bar__ = 1 def test_private_attribute_intersection_with_extra_field(): class Model(BaseModel): __foo__ = PrivateAttr('private_attribute') class Config: extra = Extra.allow assert Model.__slots__ == {'__foo__'} m = Model(__foo__='field') assert m.__foo__ == 'private_attribute' assert m.__dict__ == m.dict() == {'__foo__': 'field'} m.__foo__ = 'still_private' assert m.__foo__ == 'still_private' assert m.__dict__ == m.dict() == {'__foo__': 'field'} def test_private_attribute_invalid_name(): with pytest.raises( NameError, match='Private attributes "foo" must not be a valid field name; ' 'Use sunder or dunder names, e. g. "_foo" or "__foo__"', ): class Model(BaseModel): foo = PrivateAttr() def test_slots_are_ignored(): class Model(BaseModel): __slots__ = ( 'foo', '_bar', ) def __init__(self): super().__init__() for attr_ in self.__slots__: object.__setattr__(self, attr_, 'spam') assert Model.__private_attributes__ == {} assert set(Model.__slots__) == {'foo', '_bar'} m = Model() for attr in Model.__slots__: assert object.__getattribute__(m, attr) == 'spam' with pytest.raises(ValueError, match=f'"Model" object has no field "{attr}"'): setattr(m, attr, 'not spam') def test_default_and_default_factory_used_error(): with pytest.raises(ValueError, match='cannot specify both default and default_factory'): PrivateAttr(default=123, default_factory=lambda: 321) def test_config_override_init(): class MyModel(BaseModel): x: str _private_attr: int def __init__(self, **data) -> None: super().__init__(**data) self._private_attr = 123 class Config: underscore_attrs_are_private = True m = MyModel(x='hello') assert m.dict() == {'x': 'hello'} assert m._private_attr == 123 def test_generic_private_attribute(): T = TypeVar('T') class Model(GenericModel, Generic[T]): value: T _private_value: T class Config: underscore_attrs_are_private = True m = Model[int](value=1, _private_attr=3) m._private_value = 3 assert m.dict() == {'value': 1} def test_private_attribute_multiple_inheritance(): # We need to test this since PrivateAttr uses __slots__ and that has some restrictions with regards to # multiple inheritance default = {'a': {}} class GrandParentModel(BaseModel): __foo__ = PrivateAttr(default) class ParentAModel(GrandParentModel): pass class ParentBModel(GrandParentModel): __bar__ = PrivateAttr(default) class Model(ParentAModel, ParentBModel): __baz__ = PrivateAttr(default) assert GrandParentModel.__slots__ == {'__foo__'} assert ParentBModel.__slots__ == {'__bar__'} assert Model.__slots__ == {'__baz__'} assert repr(Model.__foo__) == "" assert repr(Model.__bar__) == "" assert repr(Model.__baz__) == "" assert Model.__private_attributes__ == { '__foo__': PrivateAttr(default), '__bar__': PrivateAttr(default), '__baz__': PrivateAttr(default), } m = Model() assert m.__foo__ == default assert m.__foo__ is not default assert m.__foo__['a'] is not default['a'] assert m.__bar__ == default assert m.__bar__ is not default assert m.__bar__['a'] is not default['a'] assert m.__baz__ == default assert m.__baz__ is not default assert m.__baz__['a'] is not default['a'] m.__foo__ = None assert m.__foo__ is None m.__bar__ = None assert m.__bar__ is None m.__baz__ = None assert m.__baz__ is None assert m.dict() == {} assert m.__dict__ == {} pydantic-1.10.14/tests/test_rich_repr.py000066400000000000000000000012531455251250200202230ustar00rootroot00000000000000from datetime import datetime from typing import List, Optional from pydantic import BaseModel from pydantic.color import Color class User(BaseModel): id: int name: str = 'John Doe' signup_ts: Optional[datetime] = None friends: List[int] = [] def test_rich_repr() -> None: user = User(id=22) rich_repr = list(user.__rich_repr__()) assert rich_repr == [ ('id', 22), ('name', 'John Doe'), ('signup_ts', None), ('friends', []), ] def test_rich_repr_color() -> None: color = Color((10, 20, 30, 0.1)) rich_repr = list(color.__rich_repr__()) assert rich_repr == ['#0a141e1a', ('rgb', (10, 20, 30, 0.1))] pydantic-1.10.14/tests/test_schema.py000066400000000000000000002730311455251250200175130ustar00rootroot00000000000000import json import math import os import re import sys import tempfile from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum, IntEnum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import ( Any, Callable, Deque, Dict, FrozenSet, Generic, Iterable, List, NamedTuple, NewType, Optional, Pattern, Set, Tuple, Type, TypeVar, Union, ) from uuid import UUID import pytest from typing_extensions import Annotated, Literal from pydantic import BaseModel, Extra, Field, ValidationError, confrozenset, conlist, conset, validator from pydantic.color import Color from pydantic.dataclasses import dataclass from pydantic.fields import ModelField from pydantic.generics import GenericModel from pydantic.networks import AnyUrl, EmailStr, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, NameEmail, stricturl from pydantic.schema import ( get_flat_models_from_model, get_flat_models_from_models, get_model_name_map, model_process_schema, model_schema, schema, ) from pydantic.types import ( UUID1, UUID3, UUID4, UUID5, ConstrainedBytes, ConstrainedDate, ConstrainedDecimal, ConstrainedFloat, ConstrainedInt, ConstrainedStr, DirectoryPath, FilePath, Json, NegativeFloat, NegativeInt, NoneBytes, NoneStr, NoneStrBytes, NonNegativeFloat, NonNegativeInt, NonPositiveFloat, NonPositiveInt, PositiveFloat, PositiveInt, PyObject, SecretBytes, SecretStr, StrBytes, StrictBool, StrictStr, conbytes, condate, condecimal, confloat, conint, constr, ) try: import email_validator except ImportError: email_validator = None T = TypeVar('T') def test_key(): class ApplePie(BaseModel): """ This is a test. """ a: float b: int = 10 s = { 'type': 'object', 'properties': {'a': {'type': 'number', 'title': 'A'}, 'b': {'type': 'integer', 'title': 'B', 'default': 10}}, 'required': ['a'], 'title': 'ApplePie', 'description': 'This is a test.', } assert ApplePie.__schema_cache__.keys() == set() assert ApplePie.schema() == s assert ApplePie.__schema_cache__.keys() == {(True, '#/definitions/{model}')} assert ApplePie.schema() == s def test_by_alias(): class ApplePie(BaseModel): a: float b: int = 10 class Config: title = 'Apple Pie' fields = {'a': 'Snap', 'b': 'Crackle'} assert ApplePie.schema() == { 'type': 'object', 'title': 'Apple Pie', 'properties': { 'Snap': {'type': 'number', 'title': 'Snap'}, 'Crackle': {'type': 'integer', 'title': 'Crackle', 'default': 10}, }, 'required': ['Snap'], } assert list(ApplePie.schema(by_alias=True)['properties'].keys()) == ['Snap', 'Crackle'] assert list(ApplePie.schema(by_alias=False)['properties'].keys()) == ['a', 'b'] def test_ref_template(): class KeyLimePie(BaseModel): x: str = None class ApplePie(BaseModel): a: float = None key_lime: KeyLimePie = None class Config: title = 'Apple Pie' assert ApplePie.schema(ref_template='foobar/{model}.json') == { 'title': 'Apple Pie', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}, 'key_lime': {'$ref': 'foobar/KeyLimePie.json'}}, 'definitions': { 'KeyLimePie': { 'title': 'KeyLimePie', 'type': 'object', 'properties': {'x': {'title': 'X', 'type': 'string'}}, }, }, } assert ApplePie.schema()['properties']['key_lime'] == {'$ref': '#/definitions/KeyLimePie'} json_schema = ApplePie.schema_json(ref_template='foobar/{model}.json') assert 'foobar/KeyLimePie.json' in json_schema assert '#/definitions/KeyLimePie' not in json_schema def test_by_alias_generator(): class ApplePie(BaseModel): a: float b: int = 10 class Config: @staticmethod def alias_generator(x): return x.upper() assert ApplePie.schema() == { 'title': 'ApplePie', 'type': 'object', 'properties': {'A': {'title': 'A', 'type': 'number'}, 'B': {'title': 'B', 'default': 10, 'type': 'integer'}}, 'required': ['A'], } assert ApplePie.schema(by_alias=False)['properties'].keys() == {'a', 'b'} def test_sub_model(): class Foo(BaseModel): """hello""" b: float class Bar(BaseModel): a: int b: Foo = None assert Bar.schema() == { 'type': 'object', 'title': 'Bar', 'definitions': { 'Foo': { 'type': 'object', 'title': 'Foo', 'description': 'hello', 'properties': {'b': {'type': 'number', 'title': 'B'}}, 'required': ['b'], } }, 'properties': {'a': {'type': 'integer', 'title': 'A'}, 'b': {'$ref': '#/definitions/Foo'}}, 'required': ['a'], } def test_schema_class(): class Model(BaseModel): foo: int = Field(4, title='Foo is Great') bar: str = Field(..., description='this description of bar') with pytest.raises(ValidationError): Model() m = Model(bar=123) assert m.dict() == {'foo': 4, 'bar': '123'} assert Model.schema() == { 'type': 'object', 'title': 'Model', 'properties': { 'foo': {'type': 'integer', 'title': 'Foo is Great', 'default': 4}, 'bar': {'type': 'string', 'title': 'Bar', 'description': 'this description of bar'}, }, 'required': ['bar'], } def test_schema_repr(): s = Field(4, title='Foo is Great') assert str(s) == "default=4 title='Foo is Great' extra={}" assert repr(s) == "FieldInfo(default=4, title='Foo is Great', extra={})" def test_schema_class_by_alias(): class Model(BaseModel): foo: int = Field(4, alias='foofoo') assert list(Model.schema()['properties'].keys()) == ['foofoo'] assert list(Model.schema(by_alias=False)['properties'].keys()) == ['foo'] def test_choices(): FooEnum = Enum('FooEnum', {'foo': 'f', 'bar': 'b'}) BarEnum = IntEnum('BarEnum', {'foo': 1, 'bar': 2}) class SpamEnum(str, Enum): foo = 'f' bar = 'b' class Model(BaseModel): foo: FooEnum bar: BarEnum spam: SpamEnum = Field(None) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'foo': {'$ref': '#/definitions/FooEnum'}, 'bar': {'$ref': '#/definitions/BarEnum'}, 'spam': {'$ref': '#/definitions/SpamEnum'}, }, 'required': ['foo', 'bar'], 'definitions': { 'FooEnum': {'title': 'FooEnum', 'description': 'An enumeration.', 'enum': ['f', 'b']}, 'BarEnum': {'title': 'BarEnum', 'description': 'An enumeration.', 'type': 'integer', 'enum': [1, 2]}, 'SpamEnum': {'title': 'SpamEnum', 'description': 'An enumeration.', 'type': 'string', 'enum': ['f', 'b']}, }, } def test_enum_modify_schema(): class SpamEnum(str, Enum): foo = 'f' bar = 'b' @classmethod def __modify_schema__(cls, field_schema): field_schema['tsEnumNames'] = [e.name for e in cls] class Model(BaseModel): spam: SpamEnum = Field(None) assert Model.schema() == { 'definitions': { 'SpamEnum': { 'description': 'An enumeration.', 'enum': ['f', 'b'], 'title': 'SpamEnum', 'tsEnumNames': ['foo', 'bar'], 'type': 'string', } }, 'properties': {'spam': {'$ref': '#/definitions/SpamEnum'}}, 'title': 'Model', 'type': 'object', } def test_enum_schema_custom_field(): class FooBarEnum(str, Enum): foo = 'foo' bar = 'bar' class Model(BaseModel): pika: FooBarEnum = Field(alias='pikalias', title='Pikapika!', description='Pika is definitely the best!') bulbi: FooBarEnum = Field('foo', alias='bulbialias', title='Bulbibulbi!', description='Bulbi is not...') cara: FooBarEnum assert Model.schema() == { 'definitions': { 'FooBarEnum': { 'description': 'An enumeration.', 'enum': ['foo', 'bar'], 'title': 'FooBarEnum', 'type': 'string', } }, 'properties': { 'pikalias': { 'allOf': [{'$ref': '#/definitions/FooBarEnum'}], 'description': 'Pika is definitely the best!', 'title': 'Pikapika!', }, 'bulbialias': { 'allOf': [{'$ref': '#/definitions/FooBarEnum'}], 'description': 'Bulbi is not...', 'title': 'Bulbibulbi!', 'default': 'foo', }, 'cara': {'$ref': '#/definitions/FooBarEnum'}, }, 'required': ['pikalias', 'cara'], 'title': 'Model', 'type': 'object', } def test_enum_and_model_have_same_behaviour(): class Names(str, Enum): rick = 'Rick' morty = 'Morty' summer = 'Summer' class Pika(BaseModel): a: str class Foo(BaseModel): enum: Names titled_enum: Names = Field( ..., title='Title of enum', description='Description of enum', ) model: Pika titled_model: Pika = Field( ..., title='Title of model', description='Description of model', ) assert Foo.schema() == { 'definitions': { 'Pika': { 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], 'title': 'Pika', 'type': 'object', }, 'Names': { 'description': 'An enumeration.', 'enum': ['Rick', 'Morty', 'Summer'], 'title': 'Names', 'type': 'string', }, }, 'properties': { 'enum': {'$ref': '#/definitions/Names'}, 'model': {'$ref': '#/definitions/Pika'}, 'titled_enum': { 'allOf': [{'$ref': '#/definitions/Names'}], 'description': 'Description of enum', 'title': 'Title of enum', }, 'titled_model': { 'allOf': [{'$ref': '#/definitions/Pika'}], 'description': 'Description of model', 'title': 'Title of model', }, }, 'required': ['enum', 'titled_enum', 'model', 'titled_model'], 'title': 'Foo', 'type': 'object', } def test_enum_includes_extra_without_other_params(): class Names(str, Enum): rick = 'Rick' morty = 'Morty' summer = 'Summer' class Foo(BaseModel): enum: Names extra_enum: Names = Field(..., extra='Extra field') assert Foo.schema() == { 'definitions': { 'Names': { 'description': 'An enumeration.', 'enum': ['Rick', 'Morty', 'Summer'], 'title': 'Names', 'type': 'string', }, }, 'properties': { 'enum': {'$ref': '#/definitions/Names'}, 'extra_enum': {'allOf': [{'$ref': '#/definitions/Names'}], 'extra': 'Extra field'}, }, 'required': ['enum', 'extra_enum'], 'title': 'Foo', 'type': 'object', } def test_list_enum_schema_extras(): class FoodChoice(str, Enum): spam = 'spam' egg = 'egg' chips = 'chips' class Model(BaseModel): foods: List[FoodChoice] = Field(examples=[['spam', 'egg']]) assert Model.schema() == { 'definitions': { 'FoodChoice': { 'description': 'An enumeration.', 'enum': ['spam', 'egg', 'chips'], 'title': 'FoodChoice', 'type': 'string', } }, 'properties': { 'foods': {'type': 'array', 'items': {'$ref': '#/definitions/FoodChoice'}, 'examples': [['spam', 'egg']]}, }, 'required': ['foods'], 'title': 'Model', 'type': 'object', } def test_enum_schema_cleandoc(): class FooBar(str, Enum): """ This is docstring which needs to be cleaned up """ foo = 'foo' bar = 'bar' class Model(BaseModel): enum: FooBar assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'enum': {'$ref': '#/definitions/FooBar'}}, 'required': ['enum'], 'definitions': { 'FooBar': { 'title': 'FooBar', 'description': 'This is docstring which needs to be cleaned up', 'enum': ['foo', 'bar'], 'type': 'string', } }, } def test_json_schema(): class Model(BaseModel): a = b'foobar' b = Decimal('12.34') assert json.loads(Model.schema_json(indent=2)) == { 'title': 'Model', 'type': 'object', 'properties': { 'a': {'title': 'A', 'default': 'foobar', 'type': 'string', 'format': 'binary'}, 'b': {'title': 'B', 'default': 12.34, 'type': 'number'}, }, } def test_list_sub_model(): class Foo(BaseModel): a: float class Bar(BaseModel): b: List[Foo] assert Bar.schema() == { 'title': 'Bar', 'type': 'object', 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'type': 'number', 'title': 'A'}}, 'required': ['a'], } }, 'properties': {'b': {'type': 'array', 'items': {'$ref': '#/definitions/Foo'}, 'title': 'B'}}, 'required': ['b'], } def test_optional(): class Model(BaseModel): a: Optional[str] assert Model.schema() == {'title': 'Model', 'type': 'object', 'properties': {'a': {'type': 'string', 'title': 'A'}}} def test_any(): class Model(BaseModel): a: Any b: object assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'a': {'title': 'A'}, 'b': {'title': 'B'}, }, } def test_set(): class Model(BaseModel): a: Set[int] b: set c: set = {1} assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'array', 'uniqueItems': True, 'items': {'type': 'integer'}}, 'b': {'title': 'B', 'type': 'array', 'items': {}, 'uniqueItems': True}, 'c': {'title': 'C', 'type': 'array', 'items': {}, 'default': [1], 'uniqueItems': True}, }, 'required': ['a', 'b'], } def test_const_str(): class Model(BaseModel): a: str = Field('some string', const=True) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'const': 'some string', 'default': 'some string'}}, } def test_const_false(): class Model(BaseModel): a: str = Field('some string', const=False) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'default': 'some string'}}, } @pytest.mark.parametrize( 'field_type,extra_props', [ (tuple, {'items': {}}), ( Tuple[str, int, Union[str, int, float], float], { 'items': [ {'type': 'string'}, {'type': 'integer'}, {'anyOf': [{'type': 'string'}, {'type': 'integer'}, {'type': 'number'}]}, {'type': 'number'}, ], 'minItems': 4, 'maxItems': 4, }, ), (Tuple[str], {'items': [{'type': 'string'}], 'minItems': 1, 'maxItems': 1}), (Tuple[()], {'maxItems': 0, 'minItems': 0}), ], ) def test_tuple(field_type, extra_props): class Model(BaseModel): a: field_type assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array', **extra_props}}, 'required': ['a'], } def test_deque(): class Model(BaseModel): a: Deque[str] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array', 'items': {'type': 'string'}}}, 'required': ['a'], } def test_bool(): class Model(BaseModel): a: bool assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } def test_strict_bool(): class Model(BaseModel): a: StrictBool assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } def test_dict(): class Model(BaseModel): a: dict assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'object'}}, 'required': ['a'], } def test_list(): class Model(BaseModel): a: list assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array', 'items': {}}}, 'required': ['a'], } class Foo(BaseModel): a: float @pytest.mark.parametrize( 'field_type,expected_schema', [ ( Union[int, str], { 'properties': {'a': {'title': 'A', 'anyOf': [{'type': 'integer'}, {'type': 'string'}]}}, 'required': ['a'], }, ), ( List[int], {'properties': {'a': {'title': 'A', 'type': 'array', 'items': {'type': 'integer'}}}, 'required': ['a']}, ), ( Dict[str, Foo], { 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } }, 'properties': { 'a': {'title': 'A', 'type': 'object', 'additionalProperties': {'$ref': '#/definitions/Foo'}} }, 'required': ['a'], }, ), ( Union[None, Foo], { 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } }, 'properties': {'a': {'$ref': '#/definitions/Foo'}}, }, ), (Dict[str, Any], {'properties': {'a': {'title': 'A', 'type': 'object'}}, 'required': ['a']}), ], ) def test_list_union_dict(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object'} base_schema.update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (datetime, {'type': 'string', 'format': 'date-time'}), (date, {'type': 'string', 'format': 'date'}), (time, {'type': 'string', 'format': 'time'}), (timedelta, {'type': 'number', 'format': 'time-delta'}), ], ) def test_date_types(field_type, expected_schema): class Model(BaseModel): a: field_type attribute_schema = {'title': 'A'} attribute_schema.update(expected_schema) base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': attribute_schema}, 'required': ['a']} assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedDate, {}), (condate(), {}), ( condate(gt=date(2010, 1, 1), lt=date(2021, 2, 2)), {'exclusiveMinimum': '2010-01-01', 'exclusiveMaximum': '2021-02-02'}, ), (condate(ge=date(2010, 1, 1), le=date(2021, 2, 2)), {'minimum': '2010-01-01', 'maximum': '2021-02-02'}), ], ) def test_date_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type assert json.loads(Model.schema_json()) == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': 'date', **expected_schema}}, 'required': ['a'], } @pytest.mark.parametrize( 'field_type,expected_schema', [ (NoneStr, {'properties': {'a': {'title': 'A', 'type': 'string'}}}), (NoneBytes, {'properties': {'a': {'title': 'A', 'type': 'string', 'format': 'binary'}}}), ( StrBytes, { 'properties': { 'a': {'title': 'A', 'anyOf': [{'type': 'string'}, {'type': 'string', 'format': 'binary'}]} }, 'required': ['a'], }, ), ( NoneStrBytes, { 'properties': { 'a': {'title': 'A', 'anyOf': [{'type': 'string'}, {'type': 'string', 'format': 'binary'}]} } }, ), ], ) def test_str_basic_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object'} base_schema.update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (StrictStr, {'title': 'A', 'type': 'string'}), (ConstrainedStr, {'title': 'A', 'type': 'string'}), ( constr(min_length=3, max_length=5, regex='^text$'), {'title': 'A', 'type': 'string', 'minLength': 3, 'maxLength': 5, 'pattern': '^text$'}, ), ], ) def test_str_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type model_schema = Model.schema() assert model_schema['properties']['a'] == expected_schema base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': expected_schema}, 'required': ['a']} assert model_schema == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (AnyUrl, {'title': 'A', 'type': 'string', 'format': 'uri', 'minLength': 1, 'maxLength': 2**16}), ( stricturl(min_length=5, max_length=10), {'title': 'A', 'type': 'string', 'format': 'uri', 'minLength': 5, 'maxLength': 10}, ), ], ) def test_special_str_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize('field_type,expected_schema', [(EmailStr, 'email'), (NameEmail, 'name-email')]) def test_email_str_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize('field_type,inner_type', [(SecretBytes, 'string'), (SecretStr, 'string')]) def test_secret_types(field_type, inner_type): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': inner_type, 'writeOnly': True, 'format': 'password'}}, 'required': ['a'], } assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedInt, {}), (conint(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (conint(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (conint(multiple_of=5), {'multipleOf': 5}), (PositiveInt, {'exclusiveMinimum': 0}), (NegativeInt, {'exclusiveMaximum': 0}), (NonNegativeInt, {'minimum': 0}), (NonPositiveInt, {'maximum': 0}), ], ) def test_special_int_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'integer'}}, 'required': ['a'], } base_schema['properties']['a'].update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedFloat, {}), (confloat(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (confloat(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (confloat(multiple_of=5), {'multipleOf': 5}), (PositiveFloat, {'exclusiveMinimum': 0}), (NegativeFloat, {'exclusiveMaximum': 0}), (NonNegativeFloat, {'minimum': 0}), (NonPositiveFloat, {'maximum': 0}), (ConstrainedDecimal, {}), (condecimal(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (condecimal(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (condecimal(multiple_of=5), {'multipleOf': 5}), ], ) def test_special_float_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } base_schema['properties']['a'].update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [(UUID, 'uuid'), (UUID1, 'uuid1'), (UUID3, 'uuid3'), (UUID4, 'uuid4'), (UUID5, 'uuid5')], ) def test_uuid_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': ''}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [(FilePath, 'file-path'), (DirectoryPath, 'directory-path'), (Path, 'path')] ) def test_path_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': ''}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema def test_json_type(): class Model(BaseModel): a: Json b: Json[int] c: Json[Any] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'string', 'format': 'json-string'}, 'b': {'title': 'B', 'type': 'integer'}, 'c': {'title': 'C', 'type': 'string', 'format': 'json-string'}, }, 'required': ['b'], } def test_ipv4address_type(): class Model(BaseModel): ip_address: IPv4Address model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipv4'}}, 'required': ['ip_address'], } def test_ipv6address_type(): class Model(BaseModel): ip_address: IPv6Address model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipv6'}}, 'required': ['ip_address'], } def test_ipvanyaddress_type(): class Model(BaseModel): ip_address: IPvAnyAddress model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipvanyaddress'}}, 'required': ['ip_address'], } def test_ipv4interface_type(): class Model(BaseModel): ip_interface: IPv4Interface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipv4interface'}}, 'required': ['ip_interface'], } def test_ipv6interface_type(): class Model(BaseModel): ip_interface: IPv6Interface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipv6interface'}}, 'required': ['ip_interface'], } def test_ipvanyinterface_type(): class Model(BaseModel): ip_interface: IPvAnyInterface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipvanyinterface'}}, 'required': ['ip_interface'], } def test_ipv4network_type(): class Model(BaseModel): ip_network: IPv4Network model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipv4network'}}, 'required': ['ip_network'], } def test_ipv6network_type(): class Model(BaseModel): ip_network: IPv6Network model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipv6network'}}, 'required': ['ip_network'], } def test_ipvanynetwork_type(): class Model(BaseModel): ip_network: IPvAnyNetwork model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipvanynetwork'}}, 'required': ['ip_network'], } @pytest.mark.parametrize( 'type_,default_value', ( (Callable, ...), (Callable, lambda x: x), (Callable[[int], int], ...), (Callable[[int], int], lambda x: x), ), ) def test_callable_type(type_, default_value): class Model(BaseModel): callback: type_ = default_value foo: int with pytest.warns(UserWarning): model_schema = Model.schema() assert 'callback' not in model_schema['properties'] def test_error_non_supported_types(): class Model(BaseModel): a: PyObject with pytest.raises(ValueError): Model.schema() def create_testing_submodules(): base_path = Path(tempfile.mkdtemp()) mod_root_path = base_path / 'pydantic_schema_test' os.makedirs(mod_root_path, exist_ok=True) open(mod_root_path / '__init__.py', 'w').close() for mod in ['a', 'b', 'c']: module_name = 'module' + mod model_name = 'model' + mod + '.py' os.makedirs(mod_root_path / module_name, exist_ok=True) open(mod_root_path / module_name / '__init__.py', 'w').close() with open(mod_root_path / module_name / model_name, 'w') as f: f.write('from pydantic import BaseModel\n' 'class Model(BaseModel):\n' ' a: str\n') module_name = 'moduled' model_name = 'modeld.py' os.makedirs(mod_root_path / module_name, exist_ok=True) open(mod_root_path / module_name / '__init__.py', 'w').close() with open(mod_root_path / module_name / model_name, 'w') as f: f.write('from ..moduleb.modelb import Model') sys.path.insert(0, str(base_path)) def test_flat_models_unique_models(): create_testing_submodules() from pydantic_schema_test.modulea.modela import Model as ModelA from pydantic_schema_test.moduleb.modelb import Model as ModelB from pydantic_schema_test.moduled.modeld import Model as ModelD flat_models = get_flat_models_from_models([ModelA, ModelB, ModelD]) assert flat_models == {ModelA, ModelB} def test_flat_models_with_submodels(): class Foo(BaseModel): a: str class Bar(BaseModel): b: List[Foo] class Baz(BaseModel): c: Dict[str, Bar] flat_models = get_flat_models_from_model(Baz) assert flat_models == {Foo, Bar, Baz} def test_flat_models_with_submodels_from_sequence(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Ingredient(BaseModel): name: str class Pizza(BaseModel): name: str ingredients: List[Ingredient] flat_models = get_flat_models_from_models([Bar, Pizza]) assert flat_models == {Foo, Bar, Ingredient, Pizza} def test_model_name_maps(): create_testing_submodules() from pydantic_schema_test.modulea.modela import Model as ModelA from pydantic_schema_test.moduleb.modelb import Model as ModelB from pydantic_schema_test.modulec.modelc import Model as ModelC from pydantic_schema_test.moduled.modeld import Model as ModelD class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar flat_models = get_flat_models_from_models([Baz, ModelA, ModelB, ModelC, ModelD]) model_name_map = get_model_name_map(flat_models) assert model_name_map == { Foo: 'Foo', Bar: 'Bar', Baz: 'Baz', ModelA: 'pydantic_schema_test__modulea__modela__Model', ModelB: 'pydantic_schema_test__moduleb__modelb__Model', ModelC: 'pydantic_schema_test__modulec__modelc__Model', } def test_schema_overrides(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo = Foo(a='foo') class Baz(BaseModel): c: Optional[Bar] class Model(BaseModel): d: Baz model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'title': 'B', 'default': {'a': 'foo'}, 'allOf': [{'$ref': '#/definitions/Foo'}]}}, }, 'Baz': {'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/definitions/Bar'}}}, }, 'properties': {'d': {'$ref': '#/definitions/Baz'}}, 'required': ['d'], } def test_schema_overrides_w_union(): class Foo(BaseModel): pass class Bar(BaseModel): pass class Spam(BaseModel): a: Union[Foo, Bar] = Field(..., description='xxx') assert Spam.schema()['properties'] == { 'a': { 'title': 'A', 'description': 'xxx', 'anyOf': [{'$ref': '#/definitions/Foo'}, {'$ref': '#/definitions/Bar'}], }, } def test_schema_from_models(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar class Model(BaseModel): d: Baz class Ingredient(BaseModel): name: str class Pizza(BaseModel): name: str ingredients: List[Ingredient] model_schema = schema( [Model, Pizza], title='Multi-model schema', description='Single JSON Schema with multiple definitions' ) assert model_schema == { 'title': 'Multi-model schema', 'description': 'Single JSON Schema with multiple definitions', 'definitions': { 'Pizza': { 'title': 'Pizza', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'ingredients': { 'title': 'Ingredients', 'type': 'array', 'items': {'$ref': '#/definitions/Ingredient'}, }, }, 'required': ['name', 'ingredients'], }, 'Ingredient': { 'title': 'Ingredient', 'type': 'object', 'properties': {'name': {'title': 'Name', 'type': 'string'}}, 'required': ['name'], }, 'Model': { 'title': 'Model', 'type': 'object', 'properties': {'d': {'$ref': '#/definitions/Baz'}}, 'required': ['d'], }, 'Baz': { 'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/definitions/Bar'}}, 'required': ['c'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'$ref': '#/definitions/Foo'}}, 'required': ['b'], }, 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, }, } @pytest.mark.parametrize( 'ref_prefix,ref_template', [ # OpenAPI style ('#/components/schemas/', None), (None, '#/components/schemas/{model}'), # ref_prefix takes priority ('#/components/schemas/', '#/{model}/schemas/'), ], ) def test_schema_with_refs(ref_prefix, ref_template): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar model_schema = schema([Bar, Baz], ref_prefix=ref_prefix, ref_template=ref_template) assert model_schema == { 'definitions': { 'Baz': { 'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/components/schemas/Bar'}}, 'required': ['c'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'$ref': '#/components/schemas/Foo'}}, 'required': ['b'], }, 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, } } def test_schema_with_custom_ref_template(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar model_schema = schema([Bar, Baz], ref_template='/schemas/{model}.json#/') assert model_schema == { 'definitions': { 'Baz': { 'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '/schemas/Bar.json#/'}}, 'required': ['c'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'$ref': '/schemas/Foo.json#/'}}, 'required': ['b'], }, 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, } } def test_schema_ref_template_key_error(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar with pytest.raises(KeyError): schema([Bar, Baz], ref_template='/schemas/{bad_name}.json#/') def test_schema_no_definitions(): model_schema = schema([], title='Schema without definitions') assert model_schema == {'title': 'Schema without definitions'} def test_list_default(): class UserModel(BaseModel): friends: List[int] = [1] assert UserModel.schema() == { 'title': 'UserModel', 'type': 'object', 'properties': {'friends': {'title': 'Friends', 'default': [1], 'type': 'array', 'items': {'type': 'integer'}}}, } def test_enum_str_default(): class MyEnum(str, Enum): FOO = 'foo' class UserModel(BaseModel): friends: MyEnum = MyEnum.FOO assert UserModel.schema()['properties']['friends']['default'] is MyEnum.FOO.value def test_enum_int_default(): class MyEnum(IntEnum): FOO = 1 class UserModel(BaseModel): friends: MyEnum = MyEnum.FOO assert UserModel.schema()['properties']['friends']['default'] is MyEnum.FOO.value def test_dict_default(): class UserModel(BaseModel): friends: Dict[str, float] = {'a': 1.1, 'b': 2.2} assert UserModel.schema() == { 'title': 'UserModel', 'type': 'object', 'properties': { 'friends': { 'title': 'Friends', 'default': {'a': 1.1, 'b': 2.2}, 'type': 'object', 'additionalProperties': {'type': 'number'}, } }, } def test_model_default(): """Make sure inner model types are encoded properly""" class Inner(BaseModel): a: Dict[Path, str] = {Path(): ''} class Outer(BaseModel): inner: Inner = Inner() assert Outer.schema() == { 'definitions': { 'Inner': { 'properties': { 'a': { 'additionalProperties': {'type': 'string'}, 'default': {'.': ''}, 'title': 'A', 'type': 'object', } }, 'title': 'Inner', 'type': 'object', } }, 'properties': { 'inner': {'allOf': [{'$ref': '#/definitions/Inner'}], 'default': {'a': {'.': ''}}, 'title': 'Inner'} }, 'title': 'Outer', 'type': 'object', } @pytest.mark.parametrize( 'kwargs,type_,expected_extra', [ ({'max_length': 5}, str, {'type': 'string', 'maxLength': 5}), ({}, constr(max_length=6), {'type': 'string', 'maxLength': 6}), ({'min_length': 2}, str, {'type': 'string', 'minLength': 2}), ({'max_length': 5}, bytes, {'type': 'string', 'maxLength': 5, 'format': 'binary'}), ({'regex': '^foo$'}, str, {'type': 'string', 'pattern': '^foo$'}), ({'gt': 2}, int, {'type': 'integer', 'exclusiveMinimum': 2}), ({'lt': 5}, int, {'type': 'integer', 'exclusiveMaximum': 5}), ({'ge': 2}, int, {'type': 'integer', 'minimum': 2}), ({'le': 5}, int, {'type': 'integer', 'maximum': 5}), ({'multiple_of': 5}, int, {'type': 'integer', 'multipleOf': 5}), ({'gt': 2}, float, {'type': 'number', 'exclusiveMinimum': 2}), ({'lt': 5}, float, {'type': 'number', 'exclusiveMaximum': 5}), ({'ge': 2}, float, {'type': 'number', 'minimum': 2}), ({'le': 5}, float, {'type': 'number', 'maximum': 5}), ({'gt': -math.inf}, float, {'type': 'number'}), ({'lt': math.inf}, float, {'type': 'number'}), ({'ge': -math.inf}, float, {'type': 'number'}), ({'le': math.inf}, float, {'type': 'number'}), ({'multiple_of': 5}, float, {'type': 'number', 'multipleOf': 5}), ({'gt': 2}, Decimal, {'type': 'number', 'exclusiveMinimum': 2}), ({'lt': 5}, Decimal, {'type': 'number', 'exclusiveMaximum': 5}), ({'ge': 2}, Decimal, {'type': 'number', 'minimum': 2}), ({'le': 5}, Decimal, {'type': 'number', 'maximum': 5}), ({'multiple_of': 5}, Decimal, {'type': 'number', 'multipleOf': 5}), ], ) def test_constraints_schema(kwargs, type_, expected_extra): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) expected_schema = { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A title', 'description': 'A description', 'default': 'foo'}}, } expected_schema['properties']['a'].update(expected_extra) assert Foo.schema() == expected_schema @pytest.mark.parametrize( 'kwargs,type_', [ ({'max_length': 5}, int), ({'min_length': 2}, float), ({'max_length': 5}, Decimal), ({'allow_mutation': False}, bool), ({'regex': '^foo$'}, int), ({'gt': 2}, str), ({'lt': 5}, bytes), ({'ge': 2}, str), ({'le': 5}, bool), ({'gt': 0}, Callable), ({'gt': 0}, Callable[[int], int]), ({'gt': 0}, conlist(int, min_items=4)), ({'gt': 0}, conset(int, min_items=4)), ({'gt': 0}, confrozenset(int, min_items=4)), ], ) def test_unenforced_constraints_schema(kwargs, type_): with pytest.raises(ValueError, match='On field "a" the following field constraints are set but not enforced'): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) @pytest.mark.parametrize( 'kwargs,type_,value', [ ({'max_length': 5}, str, 'foo'), ({'min_length': 2}, str, 'foo'), ({'max_length': 5}, bytes, b'foo'), ({'regex': '^foo$'}, str, 'foo'), ({'gt': 2}, int, 3), ({'lt': 5}, int, 3), ({'ge': 2}, int, 3), ({'ge': 2}, int, 2), ({'gt': 2}, int, '3'), ({'le': 5}, int, 3), ({'le': 5}, int, 5), ({'gt': 2}, float, 3.0), ({'gt': 2}, float, 2.1), ({'lt': 5}, float, 3.0), ({'lt': 5}, float, 4.9), ({'ge': 2}, float, 3.0), ({'ge': 2}, float, 2.0), ({'le': 5}, float, 3.0), ({'le': 5}, float, 5.0), ({'gt': 2}, float, 3), ({'gt': 2}, float, '3'), ({'gt': 2}, Decimal, Decimal(3)), ({'lt': 5}, Decimal, Decimal(3)), ({'ge': 2}, Decimal, Decimal(3)), ({'ge': 2}, Decimal, Decimal(2)), ({'le': 5}, Decimal, Decimal(3)), ({'le': 5}, Decimal, Decimal(5)), ], ) def test_constraints_schema_validation(kwargs, type_, value): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) assert Foo(a=value) @pytest.mark.parametrize( 'kwargs,type_,value', [ ({'max_length': 5}, str, 'foobar'), ({'min_length': 2}, str, 'f'), ({'regex': '^foo$'}, str, 'bar'), ({'gt': 2}, int, 2), ({'lt': 5}, int, 5), ({'ge': 2}, int, 1), ({'le': 5}, int, 6), ({'gt': 2}, float, 2.0), ({'lt': 5}, float, 5.0), ({'ge': 2}, float, 1.9), ({'le': 5}, float, 5.1), ({'gt': 2}, Decimal, Decimal(2)), ({'lt': 5}, Decimal, Decimal(5)), ({'ge': 2}, Decimal, Decimal(1)), ({'le': 5}, Decimal, Decimal(6)), ], ) def test_constraints_schema_validation_raises(kwargs, type_, value): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) with pytest.raises(ValidationError): Foo(a=value) def test_schema_kwargs(): class Foo(BaseModel): a: str = Field('foo', examples=['bar']) assert Foo.schema() == { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'type': 'string', 'title': 'A', 'default': 'foo', 'examples': ['bar']}}, } def test_schema_dict_constr(): regex_str = r'^([a-zA-Z_][a-zA-Z0-9_]*)$' ConStrType = constr(regex=regex_str) ConStrKeyDict = Dict[ConStrType, str] class Foo(BaseModel): a: ConStrKeyDict = {} assert Foo.schema() == { 'title': 'Foo', 'type': 'object', 'properties': { 'a': { 'type': 'object', 'title': 'A', 'default': {}, 'additionalProperties': {'type': 'string'}, 'patternProperties': {regex_str: {'type': 'string'}}, } }, } @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedBytes, {'title': 'A', 'type': 'string', 'format': 'binary'}), ( conbytes(min_length=3, max_length=5), {'title': 'A', 'type': 'string', 'format': 'binary', 'minLength': 3, 'maxLength': 5}, ), ], ) def test_bytes_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema def test_optional_dict(): class Model(BaseModel): something: Optional[Dict[str, Any]] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'title': 'Something', 'type': 'object'}}, } assert Model().dict() == {'something': None} assert Model(something={'foo': 'Bar'}).dict() == {'something': {'foo': 'Bar'}} def test_optional_validator(): class Model(BaseModel): something: Optional[str] @validator('something', always=True) def check_something(cls, v): assert v is None or 'x' not in v, 'should not contain x' return v assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'title': 'Something', 'type': 'string'}}, } assert Model().dict() == {'something': None} assert Model(something=None).dict() == {'something': None} assert Model(something='hello').dict() == {'something': 'hello'} def test_field_with_validator(): class Model(BaseModel): something: Optional[int] = None @validator('something') def check_field(cls, v, *, values, config, field): return v assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'type': 'integer', 'title': 'Something'}}, } def test_unparameterized_schema_generation(): class FooList(BaseModel): d: List class BarList(BaseModel): d: list assert model_schema(FooList) == { 'title': 'FooList', 'type': 'object', 'properties': {'d': {'items': {}, 'title': 'D', 'type': 'array'}}, 'required': ['d'], } foo_list_schema = model_schema(FooList) bar_list_schema = model_schema(BarList) bar_list_schema['title'] = 'FooList' # to check for equality assert foo_list_schema == bar_list_schema class FooDict(BaseModel): d: Dict class BarDict(BaseModel): d: dict model_schema(Foo) assert model_schema(FooDict) == { 'title': 'FooDict', 'type': 'object', 'properties': {'d': {'title': 'D', 'type': 'object'}}, 'required': ['d'], } foo_dict_schema = model_schema(FooDict) bar_dict_schema = model_schema(BarDict) bar_dict_schema['title'] = 'FooDict' # to check for equality assert foo_dict_schema == bar_dict_schema def test_known_model_optimization(): class Dep(BaseModel): number: int class Model(BaseModel): dep: Dep dep_l: List[Dep] expected = { 'title': 'Model', 'type': 'object', 'properties': { 'dep': {'$ref': '#/definitions/Dep'}, 'dep_l': {'title': 'Dep L', 'type': 'array', 'items': {'$ref': '#/definitions/Dep'}}, }, 'required': ['dep', 'dep_l'], 'definitions': { 'Dep': { 'title': 'Dep', 'type': 'object', 'properties': {'number': {'title': 'Number', 'type': 'integer'}}, 'required': ['number'], } }, } assert Model.schema() == expected def test_root(): class Model(BaseModel): __root__: str assert Model.schema() == {'title': 'Model', 'type': 'string'} def test_root_list(): class Model(BaseModel): __root__: List[str] assert Model.schema() == {'title': 'Model', 'type': 'array', 'items': {'type': 'string'}} def test_root_nested_model(): class NestedModel(BaseModel): a: str class Model(BaseModel): __root__: List[NestedModel] assert Model.schema() == { 'title': 'Model', 'type': 'array', 'items': {'$ref': '#/definitions/NestedModel'}, 'definitions': { 'NestedModel': { 'title': 'NestedModel', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], } }, } def test_new_type_schema(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) c_type = NewType('c_type', str) class Model(BaseModel): a: a_type b: b_type c: c_type assert Model.schema() == { 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'integer'}, 'c': {'title': 'C', 'type': 'string'}, }, 'required': ['a', 'b', 'c'], 'title': 'Model', 'type': 'object', } def test_literal_schema(): class Model(BaseModel): a: Literal[1] b: Literal['a'] c: Literal['a', 1] d: Literal['a', Literal['b'], 1, 2] assert Model.schema() == { 'properties': { 'a': {'title': 'A', 'type': 'integer', 'enum': [1]}, 'b': {'title': 'B', 'type': 'string', 'enum': ['a']}, 'c': {'title': 'C', 'anyOf': [{'type': 'string', 'enum': ['a']}, {'type': 'integer', 'enum': [1]}]}, 'd': { 'title': 'D', 'anyOf': [ {'type': 'string', 'enum': ['a', 'b']}, {'type': 'integer', 'enum': [1, 2]}, ], }, }, 'required': ['a', 'b', 'c', 'd'], 'title': 'Model', 'type': 'object', } def test_literal_enum(): class MyEnum(str, Enum): FOO = 'foo' BAR = 'bar' class Model(BaseModel): kind: Literal[MyEnum.FOO] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'kind': {'title': 'Kind', 'enum': ['foo'], 'type': 'string'}}, 'required': ['kind'], } def test_color_type(): class Model(BaseModel): color: Color model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'color': {'title': 'Color', 'type': 'string', 'format': 'color'}}, 'required': ['color'], } def test_model_with_schema_extra(): class Model(BaseModel): a: str class Config: schema_extra = {'examples': [{'a': 'Foo'}]} assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], 'examples': [{'a': 'Foo'}], } def test_model_with_schema_extra_callable(): class Model(BaseModel): name: str = None class Config: @staticmethod def schema_extra(schema, model_class): schema.pop('properties') schema['type'] = 'override' assert model_class is Model assert Model.schema() == {'title': 'Model', 'type': 'override'} def test_model_with_schema_extra_callable_no_model_class(): class Model(BaseModel): name: str = None class Config: @staticmethod def schema_extra(schema): schema.pop('properties') schema['type'] = 'override' assert Model.schema() == {'title': 'Model', 'type': 'override'} def test_model_with_schema_extra_callable_classmethod(): class Model(BaseModel): name: str = None class Config: type = 'foo' @classmethod def schema_extra(cls, schema, model_class): schema.pop('properties') schema['type'] = cls.type assert model_class is Model assert Model.schema() == {'title': 'Model', 'type': 'foo'} def test_model_with_schema_extra_callable_instance_method(): class Model(BaseModel): name: str = None class Config: def schema_extra(schema, model_class): schema.pop('properties') schema['type'] = 'override' assert model_class is Model assert Model.schema() == {'title': 'Model', 'type': 'override'} def test_model_with_extra_forbidden(): class Model(BaseModel): a: str class Config: extra = Extra.forbid assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], 'additionalProperties': False, } @pytest.mark.parametrize( 'annotation,kwargs,field_schema', [ (int, dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'integer'}), (Optional[int], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'integer'}), ( Tuple[int, ...], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'array', 'items': {'exclusiveMinimum': 0, 'type': 'integer'}}, ), ( Tuple[int, int, int], dict(gt=0), { 'title': 'A', 'type': 'array', 'items': [ {'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'integer'}, ], 'minItems': 3, 'maxItems': 3, }, ), ( Union[int, float], dict(gt=0), { 'title': 'A', 'anyOf': [{'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'number'}], }, ), ( List[int], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'array', 'items': {'exclusiveMinimum': 0, 'type': 'integer'}}, ), ( Dict[str, int], dict(gt=0), { 'title': 'A', 'exclusiveMinimum': 0, 'type': 'object', 'additionalProperties': {'exclusiveMinimum': 0, 'type': 'integer'}, }, ), ( Union[str, int], dict(gt=0, max_length=5), {'title': 'A', 'anyOf': [{'maxLength': 5, 'type': 'string'}, {'exclusiveMinimum': 0, 'type': 'integer'}]}, ), ], ) def test_enforced_constraints(annotation, kwargs, field_schema): class Model(BaseModel): a: annotation = Field(..., **kwargs) schema = Model.schema() # debug(schema['properties']['a']) assert schema['properties']['a'] == field_schema def test_real_vs_phony_constraints(): class Model1(BaseModel): foo: int = Field(..., gt=123) class Config: title = 'Test Model' class Model2(BaseModel): foo: int = Field(..., exclusiveMinimum=123) class Config: title = 'Test Model' with pytest.raises(ValidationError, match='ensure this value is greater than 123'): Model1(foo=122) assert Model2(foo=122).dict() == {'foo': 122} assert ( Model1.schema() == Model2.schema() == { 'title': 'Test Model', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'exclusiveMinimum': 123, 'type': 'integer'}}, 'required': ['foo'], } ) def test_subfield_field_info(): class MyModel(BaseModel): entries: Dict[str, List[int]] assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': { 'entries': { 'title': 'Entries', 'type': 'object', 'additionalProperties': {'type': 'array', 'items': {'type': 'integer'}}, } }, 'required': ['entries'], } def test_dataclass(): @dataclass class Model: a: bool assert schema([Model]) == { 'definitions': { 'Model': { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } } } assert model_schema(Model) == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } def test_schema_attributes(): class ExampleEnum(Enum): """This is a test description.""" gt = 'GT' lt = 'LT' ge = 'GE' le = 'LE' max_length = 'ML' multiple_of = 'MO' regex = 'RE' class Example(BaseModel): example: ExampleEnum assert Example.schema() == { 'title': 'Example', 'type': 'object', 'properties': {'example': {'$ref': '#/definitions/ExampleEnum'}}, 'required': ['example'], 'definitions': { 'ExampleEnum': { 'title': 'ExampleEnum', 'description': 'This is a test description.', 'enum': ['GT', 'LT', 'GE', 'LE', 'ML', 'MO', 'RE'], } }, } def test_model_process_schema_enum(): class SpamEnum(str, Enum): foo = 'f' bar = 'b' model_schema, _, _ = model_process_schema(SpamEnum, model_name_map={}) assert model_schema == {'title': 'SpamEnum', 'description': 'An enumeration.', 'type': 'string', 'enum': ['f', 'b']} def test_path_modify_schema(): class MyPath(Path): @classmethod def __modify_schema__(cls, schema): schema.update(foobar=123) class Model(BaseModel): path1: Path path2: MyPath path3: List[MyPath] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'path1': {'title': 'Path1', 'type': 'string', 'format': 'path'}, 'path2': {'title': 'Path2', 'type': 'string', 'format': 'path', 'foobar': 123}, 'path3': {'title': 'Path3', 'type': 'array', 'items': {'type': 'string', 'format': 'path', 'foobar': 123}}, }, 'required': ['path1', 'path2', 'path3'], } def test_frozen_set(): class Model(BaseModel): a: FrozenSet[int] = frozenset({1, 2, 3}) b: FrozenSet = frozenset({1, 2, 3}) c: frozenset = frozenset({1, 2, 3}) d: frozenset = ... assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'a': { 'title': 'A', 'default': [1, 2, 3], 'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True, }, 'b': {'title': 'B', 'default': [1, 2, 3], 'type': 'array', 'items': {}, 'uniqueItems': True}, 'c': {'title': 'C', 'default': [1, 2, 3], 'type': 'array', 'items': {}, 'uniqueItems': True}, 'd': {'title': 'D', 'type': 'array', 'items': {}, 'uniqueItems': True}, }, 'required': ['d'], } def test_iterable(): class Model(BaseModel): a: Iterable[int] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array', 'items': {'type': 'integer'}}}, 'required': ['a'], } def test_new_type(): new_type = NewType('NewStr', str) class Model(BaseModel): a: new_type assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], } def test_multiple_models_with_same_name(create_module): module = create_module( # language=Python """ from pydantic import BaseModel class ModelOne(BaseModel): class NestedModel(BaseModel): a: float nested: NestedModel class ModelTwo(BaseModel): class NestedModel(BaseModel): b: float nested: NestedModel class NestedModel(BaseModel): c: float """ ) models = [module.ModelOne, module.ModelTwo, module.NestedModel] model_names = set(schema(models)['definitions'].keys()) expected_model_names = { 'ModelOne', 'ModelTwo', f'{module.__name__}__ModelOne__NestedModel', f'{module.__name__}__ModelTwo__NestedModel', f'{module.__name__}__NestedModel', } assert model_names == expected_model_names def test_multiple_enums_with_same_name(create_module): module_1 = create_module( # language=Python """ from enum import Enum from pydantic import BaseModel class MyEnum(str, Enum): a = 'a' b = 'b' c = 'c' class MyModel(BaseModel): my_enum_1: MyEnum """ ) module_2 = create_module( # language=Python """ from enum import Enum from pydantic import BaseModel class MyEnum(str, Enum): d = 'd' e = 'e' f = 'f' class MyModel(BaseModel): my_enum_2: MyEnum """ ) class Model(BaseModel): my_model_1: module_1.MyModel my_model_2: module_2.MyModel assert len(Model.schema()['definitions']) == 4 assert set(Model.schema()['definitions']) == { f'{module_1.__name__}__MyEnum', f'{module_1.__name__}__MyModel', f'{module_2.__name__}__MyEnum', f'{module_2.__name__}__MyModel', } def test_schema_for_generic_field(): T = TypeVar('T') class GenModel(Generic[T]): def __init__(self, data: Any): self.data = data @classmethod def __get_validators__(cls): yield cls.validate @classmethod def validate(cls, v: Any): return v class Model(BaseModel): data: GenModel[str] data1: GenModel assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'data': {'allOf': [{'type': 'string'}], 'title': 'Data'}, 'data1': { 'title': 'Data1', }, }, 'required': ['data', 'data1'], } class GenModelModified(GenModel, Generic[T]): @classmethod def __modify_schema__(cls, field_schema): field_schema.pop('allOf', None) field_schema.update(anyOf=[{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]) class ModelModified(BaseModel): data: GenModelModified[str] data1: GenModelModified assert ModelModified.schema() == { 'title': 'ModelModified', 'type': 'object', 'properties': { 'data': {'title': 'Data', 'anyOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, 'data1': {'title': 'Data1', 'anyOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, }, 'required': ['data', 'data1'], } def test_namedtuple_default(): class Coordinates(NamedTuple): x: float y: float class LocationBase(BaseModel): coords: Coordinates = Coordinates(0, 0) assert LocationBase.schema() == { 'title': 'LocationBase', 'type': 'object', 'properties': { 'coords': { 'title': 'Coords', 'default': Coordinates(x=0, y=0), 'type': 'array', 'items': [{'title': 'X', 'type': 'number'}, {'title': 'Y', 'type': 'number'}], 'minItems': 2, 'maxItems': 2, } }, } def test_advanced_generic_schema(): T = TypeVar('T') K = TypeVar('K') class Gen(Generic[T]): def __init__(self, data: Any): self.data = data @classmethod def __get_validators__(cls): yield cls.validate @classmethod def validate(cls, v: Any): return v @classmethod def __modify_schema__(cls, field_schema): the_type = field_schema.pop('allOf', [{'type': 'string'}])[0] field_schema.update(title='Gen title', anyOf=[the_type, {'type': 'array', 'items': the_type}]) class GenTwoParams(Generic[T, K]): def __init__(self, x: str, y: Any): self.x = x self.y = y @classmethod def __get_validators__(cls): yield cls.validate @classmethod def validate(cls, v: Any): return cls(*v) @classmethod def __modify_schema__(cls, field_schema): field_schema.update(examples='examples') class CustomType(Enum): A = 'a' B = 'b' @classmethod def __modify_schema__(cls, field_schema): field_schema.update(title='CustomType title', type='string') class Model(BaseModel): data0: Gen data1: Gen[CustomType] = Field(title='Data1 title', description='Data 1 description') data2: GenTwoParams[CustomType, UUID4] = Field(title='Data2 title', description='Data 2') # check Tuple because changes in code touch that type data3: Tuple data4: Tuple[CustomType] data5: Tuple[CustomType, str] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'data0': { 'anyOf': [{'type': 'string'}, {'items': {'type': 'string'}, 'type': 'array'}], 'title': 'Gen title', }, 'data1': { 'title': 'Gen title', 'description': 'Data 1 description', 'anyOf': [ {'$ref': '#/definitions/CustomType'}, {'type': 'array', 'items': {'$ref': '#/definitions/CustomType'}}, ], }, 'data2': { 'allOf': [ { 'items': [{'$ref': '#/definitions/CustomType'}, {'format': 'uuid4', 'type': 'string'}], 'type': 'array', } ], 'title': 'Data2 title', 'description': 'Data 2', 'examples': 'examples', }, 'data3': {'title': 'Data3', 'type': 'array', 'items': {}}, 'data4': { 'title': 'Data4', 'type': 'array', 'items': [{'$ref': '#/definitions/CustomType'}], 'minItems': 1, 'maxItems': 1, }, 'data5': { 'title': 'Data5', 'type': 'array', 'items': [{'$ref': '#/definitions/CustomType'}, {'type': 'string'}], 'minItems': 2, 'maxItems': 2, }, }, 'required': ['data0', 'data1', 'data2', 'data3', 'data4', 'data5'], 'definitions': { 'CustomType': { 'title': 'CustomType title', 'description': 'An enumeration.', 'enum': ['a', 'b'], 'type': 'string', } }, } def test_nested_generic(): """ Test a nested BaseModel that is also a Generic """ class Ref(BaseModel, Generic[T]): uuid: str def resolve(self) -> T: ... class Model(BaseModel): ref: Ref['Model'] # noqa assert Model.schema() == { 'title': 'Model', 'type': 'object', 'definitions': { 'Ref': { 'title': 'Ref', 'type': 'object', 'properties': { 'uuid': {'title': 'Uuid', 'type': 'string'}, }, 'required': ['uuid'], }, }, 'properties': { 'ref': {'$ref': '#/definitions/Ref'}, }, 'required': ['ref'], } def test_nested_generic_model(): """ Test a nested GenericModel """ class Box(GenericModel, Generic[T]): uuid: str data: T class Model(BaseModel): box_str: Box[str] box_int: Box[int] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'definitions': { 'Box_str_': Box[str].schema(), 'Box_int_': Box[int].schema(), }, 'properties': { 'box_str': {'$ref': '#/definitions/Box_str_'}, 'box_int': {'$ref': '#/definitions/Box_int_'}, }, 'required': ['box_str', 'box_int'], } def test_complex_nested_generic(): """ Handle a union of a generic. """ class Ref(BaseModel, Generic[T]): uuid: str def resolve(self) -> T: ... class Model(BaseModel): uuid: str model: Union[Ref['Model'], 'Model'] # noqa def resolve(self) -> 'Model': # noqa ... Model.update_forward_refs() assert Model.schema() == { 'definitions': { 'Model': { 'title': 'Model', 'type': 'object', 'properties': { 'uuid': {'title': 'Uuid', 'type': 'string'}, 'model': { 'title': 'Model', 'anyOf': [ {'$ref': '#/definitions/Ref'}, {'$ref': '#/definitions/Model'}, ], }, }, 'required': ['uuid', 'model'], }, 'Ref': { 'title': 'Ref', 'type': 'object', 'properties': { 'uuid': {'title': 'Uuid', 'type': 'string'}, }, 'required': ['uuid'], }, }, '$ref': '#/definitions/Model', } def test_schema_with_field_parameter(): class RestrictedAlphabetStr(str): @classmethod def __modify_schema__(cls, field_schema, field: Optional[ModelField]): assert isinstance(field, ModelField) alphabet = field.field_info.extra['alphabet'] field_schema['examples'] = [c * 3 for c in alphabet] class MyModel(BaseModel): value: RestrictedAlphabetStr = Field(alphabet='ABC') assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': { 'value': {'title': 'Value', 'alphabet': 'ABC', 'examples': ['AAA', 'BBB', 'CCC'], 'type': 'string'} }, 'required': ['value'], } def test_discriminated_union(): class BlackCat(BaseModel): pet_type: Literal['cat'] color: Literal['black'] class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] class Cat(BaseModel): __root__: Union[BlackCat, WhiteCat] = Field(..., discriminator='color') class Dog(BaseModel): pet_type: Literal['dog'] class Lizard(BaseModel): pet_type: Literal['reptile', 'lizard'] class Model(BaseModel): pet: Union[Cat, Dog, Lizard] = Field(..., discriminator='pet_type') assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'pet': { 'title': 'Pet', 'discriminator': { 'propertyName': 'pet_type', 'mapping': { 'cat': '#/definitions/Cat', 'dog': '#/definitions/Dog', 'reptile': '#/definitions/Lizard', 'lizard': '#/definitions/Lizard', }, }, 'oneOf': [ {'$ref': '#/definitions/Cat'}, {'$ref': '#/definitions/Dog'}, {'$ref': '#/definitions/Lizard'}, ], } }, 'required': ['pet'], 'definitions': { 'BlackCat': { 'title': 'BlackCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, }, 'required': ['pet_type', 'color'], }, 'WhiteCat': { 'title': 'WhiteCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['white'], 'type': 'string'}, }, 'required': ['pet_type', 'color'], }, 'Cat': { 'title': 'Cat', 'discriminator': { 'propertyName': 'color', 'mapping': {'black': '#/definitions/BlackCat', 'white': '#/definitions/WhiteCat'}, }, 'oneOf': [{'$ref': '#/definitions/BlackCat'}, {'$ref': '#/definitions/WhiteCat'}], }, 'Dog': { 'title': 'Dog', 'type': 'object', 'properties': {'pet_type': {'title': 'Pet Type', 'enum': ['dog'], 'type': 'string'}}, 'required': ['pet_type'], }, 'Lizard': { 'title': 'Lizard', 'type': 'object', 'properties': {'pet_type': {'title': 'Pet Type', 'enum': ['reptile', 'lizard'], 'type': 'string'}}, 'required': ['pet_type'], }, }, } def test_discriminated_annotated_union(): class BlackCatWithHeight(BaseModel): pet_type: Literal['cat'] color: Literal['black'] info: Literal['height'] black_infos: str class BlackCatWithWeight(BaseModel): pet_type: Literal['cat'] color: Literal['black'] info: Literal['weight'] black_infos: str BlackCat = Annotated[Union[BlackCatWithHeight, BlackCatWithWeight], Field(discriminator='info')] class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] white_infos: str Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal['dog'] dog_name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Model(BaseModel): pet: Pet number: int assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'pet': { 'title': 'Pet', 'discriminator': { 'propertyName': 'pet_type', 'mapping': { 'cat': { 'BlackCatWithHeight': {'$ref': '#/definitions/BlackCatWithHeight'}, 'BlackCatWithWeight': {'$ref': '#/definitions/BlackCatWithWeight'}, 'WhiteCat': {'$ref': '#/definitions/WhiteCat'}, }, 'dog': '#/definitions/Dog', }, }, 'oneOf': [ { 'oneOf': [ { 'oneOf': [ {'$ref': '#/definitions/BlackCatWithHeight'}, {'$ref': '#/definitions/BlackCatWithWeight'}, ] }, {'$ref': '#/definitions/WhiteCat'}, ] }, {'$ref': '#/definitions/Dog'}, ], }, 'number': {'title': 'Number', 'type': 'integer'}, }, 'required': ['pet', 'number'], 'definitions': { 'BlackCatWithHeight': { 'title': 'BlackCatWithHeight', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, 'info': {'title': 'Info', 'enum': ['height'], 'type': 'string'}, 'black_infos': {'title': 'Black Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'info', 'black_infos'], }, 'BlackCatWithWeight': { 'title': 'BlackCatWithWeight', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, 'info': {'title': 'Info', 'enum': ['weight'], 'type': 'string'}, 'black_infos': {'title': 'Black Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'info', 'black_infos'], }, 'WhiteCat': { 'title': 'WhiteCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['white'], 'type': 'string'}, 'white_infos': {'title': 'White Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'white_infos'], }, 'Dog': { 'title': 'Dog', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['dog'], 'type': 'string'}, 'dog_name': {'title': 'Dog Name', 'type': 'string'}, }, 'required': ['pet_type', 'dog_name'], }, }, } def test_discriminated_annotated_union_enum(): class PetType(Enum): cat = 'cat' dog = 'dog' class PetColor(str, Enum): black = 'black' white = 'white' class PetInfo(Enum): height = 0 weight = 1 class BlackCatWithHeight(BaseModel): pet_type: Literal[PetType.cat] color: Literal[PetColor.black] info: Literal[PetInfo.height] black_infos: str class BlackCatWithWeight(BaseModel): pet_type: Literal[PetType.cat] color: Literal[PetColor.black] info: Literal[PetInfo.weight] black_infos: str BlackCat = Annotated[Union[BlackCatWithHeight, BlackCatWithWeight], Field(discriminator='info')] class WhiteCat(BaseModel): pet_type: Literal[PetType.cat] color: Literal[PetColor.white] white_infos: str Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal[PetType.dog] dog_name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Model(BaseModel): pet: Pet number: int assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'pet': { 'title': 'Pet', 'discriminator': { 'propertyName': 'pet_type', 'mapping': { 'cat': { 'BlackCatWithHeight': {'$ref': '#/definitions/BlackCatWithHeight'}, 'BlackCatWithWeight': {'$ref': '#/definitions/BlackCatWithWeight'}, 'WhiteCat': {'$ref': '#/definitions/WhiteCat'}, }, 'dog': '#/definitions/Dog', }, }, 'oneOf': [ { 'oneOf': [ { 'oneOf': [ {'$ref': '#/definitions/BlackCatWithHeight'}, {'$ref': '#/definitions/BlackCatWithWeight'}, ] }, {'$ref': '#/definitions/WhiteCat'}, ] }, {'$ref': '#/definitions/Dog'}, ], }, 'number': {'title': 'Number', 'type': 'integer'}, }, 'required': ['pet', 'number'], 'definitions': { 'BlackCatWithHeight': { 'title': 'BlackCatWithHeight', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, 'info': {'title': 'Info', 'enum': [0], 'type': 'integer'}, 'black_infos': {'title': 'Black Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'info', 'black_infos'], }, 'BlackCatWithWeight': { 'title': 'BlackCatWithWeight', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, 'info': {'title': 'Info', 'enum': [1], 'type': 'integer'}, 'black_infos': {'title': 'Black Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'info', 'black_infos'], }, 'WhiteCat': { 'title': 'WhiteCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['white'], 'type': 'string'}, 'white_infos': {'title': 'White Infos', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'white_infos'], }, 'Dog': { 'title': 'Dog', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['dog'], 'type': 'string'}, 'dog_name': {'title': 'Dog Name', 'type': 'string'}, }, 'required': ['pet_type', 'dog_name'], }, }, } def test_alias_same(): class Cat(BaseModel): pet_type: Literal['cat'] = Field(alias='typeOfPet') c: str class Dog(BaseModel): pet_type: Literal['dog'] = Field(alias='typeOfPet') d: str class Model(BaseModel): pet: Union[Cat, Dog] = Field(discriminator='pet_type') number: int assert Model.schema() == { 'type': 'object', 'title': 'Model', 'properties': { 'number': {'title': 'Number', 'type': 'integer'}, 'pet': { 'oneOf': [{'$ref': '#/definitions/Cat'}, {'$ref': '#/definitions/Dog'}], 'discriminator': { 'mapping': {'cat': '#/definitions/Cat', 'dog': '#/definitions/Dog'}, 'propertyName': 'typeOfPet', }, 'title': 'Pet', }, }, 'required': ['pet', 'number'], 'definitions': { 'Cat': { 'properties': { 'c': {'title': 'C', 'type': 'string'}, 'typeOfPet': {'enum': ['cat'], 'title': 'Typeofpet', 'type': 'string'}, }, 'required': ['typeOfPet', 'c'], 'title': 'Cat', 'type': 'object', }, 'Dog': { 'properties': { 'd': {'title': 'D', 'type': 'string'}, 'typeOfPet': {'enum': ['dog'], 'title': 'Typeofpet', 'type': 'string'}, }, 'required': ['typeOfPet', 'd'], 'title': 'Dog', 'type': 'object', }, }, } def test_nested_python_dataclasses(): """ Test schema generation for nested python dataclasses """ from dataclasses import dataclass as python_dataclass @python_dataclass class ChildModel: name: str @python_dataclass class NestedModel: child: List[ChildModel] assert model_schema(dataclass(NestedModel)) == { 'title': 'NestedModel', 'type': 'object', 'properties': {'child': {'title': 'Child', 'type': 'array', 'items': {'$ref': '#/definitions/ChildModel'}}}, 'required': ['child'], 'definitions': { 'ChildModel': { 'title': 'ChildModel', 'type': 'object', 'properties': {'name': {'title': 'Name', 'type': 'string'}}, 'required': ['name'], } }, } def test_discriminated_union_in_list(): class BlackCat(BaseModel): pet_type: Literal['cat'] color: Literal['black'] black_name: str class WhiteCat(BaseModel): pet_type: Literal['cat'] color: Literal['white'] white_name: str Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')] class Dog(BaseModel): pet_type: Literal['dog'] name: str Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')] class Model(BaseModel): pets: Pet n: int assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'pets': { 'title': 'Pets', 'discriminator': { 'propertyName': 'pet_type', 'mapping': { 'cat': { 'BlackCat': {'$ref': '#/definitions/BlackCat'}, 'WhiteCat': {'$ref': '#/definitions/WhiteCat'}, }, 'dog': '#/definitions/Dog', }, }, 'oneOf': [ { 'oneOf': [ {'$ref': '#/definitions/BlackCat'}, {'$ref': '#/definitions/WhiteCat'}, ], }, {'$ref': '#/definitions/Dog'}, ], }, 'n': {'title': 'N', 'type': 'integer'}, }, 'required': ['pets', 'n'], 'definitions': { 'BlackCat': { 'title': 'BlackCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['black'], 'type': 'string'}, 'black_name': {'title': 'Black Name', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'black_name'], }, 'WhiteCat': { 'title': 'WhiteCat', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['cat'], 'type': 'string'}, 'color': {'title': 'Color', 'enum': ['white'], 'type': 'string'}, 'white_name': {'title': 'White Name', 'type': 'string'}, }, 'required': ['pet_type', 'color', 'white_name'], }, 'Dog': { 'title': 'Dog', 'type': 'object', 'properties': { 'pet_type': {'title': 'Pet Type', 'enum': ['dog'], 'type': 'string'}, 'name': {'title': 'Name', 'type': 'string'}, }, 'required': ['pet_type', 'name'], }, }, } def test_extra_inheritance(): class A(BaseModel): root: Optional[str] class Config: fields = { 'root': {'description': 'root path of data', 'level': 1}, } class Model(A): root: str = Field('asa', description='image height', level=3) m = Model() assert m.schema()['properties'] == { 'root': { 'title': 'Root', 'type': 'string', 'description': 'image height', 'default': 'asa', 'level': 3, } } def test_model_with_type_attributes(): class Foo: a: float class Bar(BaseModel): b: int class Baz(BaseModel): a: Type[Foo] b: Type[Bar] assert Baz.schema() == { 'title': 'Baz', 'type': 'object', 'properties': {'a': {'title': 'A'}, 'b': {'title': 'B'}}, 'required': ['a', 'b'], } @pytest.mark.parametrize( 'regex_val', [ '^text$', re.compile('^text$'), ], ) def test_constrained_str_class_dict(regex_val: Union[str, Pattern[str]]): class CustomStr(ConstrainedStr): regex = regex_val class Model(BaseModel): a: Dict[CustomStr, Any] json_schema = Model.schema() assert json_schema == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'patternProperties': {'^text$': {}}, 'title': 'A', 'type': 'object'}}, 'required': ['a'], } pydantic-1.10.14/tests/test_settings.py000066400000000000000000001064251455251250200201150ustar00rootroot00000000000000import os import sys import uuid from datetime import datetime, timezone from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union import pytest from pydantic import BaseModel, BaseSettings, Field, HttpUrl, Json, NoneStr, SecretStr, ValidationError, dataclasses from pydantic.env_settings import ( EnvSettingsSource, InitSettingsSource, SecretsSettingsSource, SettingsError, SettingsSourceCallable, read_env_file, ) try: import dotenv except ImportError: dotenv = None class SimpleSettings(BaseSettings): apple: str def test_sub_env(env): env.set('apple', 'hello') s = SimpleSettings() assert s.apple == 'hello' def test_sub_env_override(env): env.set('apple', 'hello') s = SimpleSettings(apple='goodbye') assert s.apple == 'goodbye' def test_sub_env_missing(): with pytest.raises(ValidationError) as exc_info: SimpleSettings() assert exc_info.value.errors() == [{'loc': ('apple',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_other_setting(): with pytest.raises(ValidationError): SimpleSettings(apple='a', foobar=42) def test_with_prefix(env): class Settings(BaseSettings): apple: str class Config: env_prefix = 'foobar_' with pytest.raises(ValidationError): Settings() env.set('foobar_apple', 'has_prefix') s = Settings() assert s.apple == 'has_prefix' def test_nested_env_with_basemodel(env): class TopValue(BaseModel): apple: str banana: str class Settings(BaseSettings): top: TopValue with pytest.raises(ValidationError): Settings() env.set('top', '{"banana": "secret_value"}') s = Settings(top={'apple': 'value'}) assert s.top == {'apple': 'value', 'banana': 'secret_value'} def test_merge_dict(env): class Settings(BaseSettings): top: Dict[str, str] with pytest.raises(ValidationError): Settings() env.set('top', '{"banana": "secret_value"}') s = Settings(top={'apple': 'value'}) assert s.top == {'apple': 'value', 'banana': 'secret_value'} def test_nested_env_delimiter(env): class SubSubValue(BaseSettings): v6: str class SubValue(BaseSettings): v4: str v5: int sub_sub: SubSubValue class TopValue(BaseSettings): v1: str v2: str v3: str sub: SubValue class Cfg(BaseSettings): v0: str v0_union: Union[SubValue, int] top: TopValue class Config: env_nested_delimiter = '__' env.set('top', '{"v1": "json-1", "v2": "json-2", "sub": {"v5": "xx"}}') env.set('top__sub__v5', '5') env.set('v0', '0') env.set('top__v2', '2') env.set('top__v3', '3') env.set('v0_union', '0') env.set('top__sub__sub_sub__v6', '6') env.set('top__sub__v4', '4') cfg = Cfg() assert cfg.dict() == { 'v0': '0', 'v0_union': 0, 'top': { 'v1': 'json-1', 'v2': '2', 'v3': '3', 'sub': {'v4': '4', 'v5': 5, 'sub_sub': {'v6': '6'}}, }, } def test_nested_env_delimiter_with_prefix(env): class Subsettings(BaseSettings): banana: str class Settings(BaseSettings): subsettings: Subsettings class Config: env_nested_delimiter = '_' env_prefix = 'myprefix_' env.set('myprefix_subsettings_banana', 'banana') s = Settings() assert s.subsettings.banana == 'banana' class Settings(BaseSettings): subsettings: Subsettings class Config: env_nested_delimiter = '_' env_prefix = 'myprefix__' env.set('myprefix__subsettings_banana', 'banana') s = Settings() assert s.subsettings.banana == 'banana' def test_nested_env_delimiter_complex_required(env): class Cfg(BaseSettings): v: str = 'default' class Config: env_nested_delimiter = '__' env.set('v__x', 'x') env.set('v__y', 'y') cfg = Cfg() assert cfg.dict() == {'v': 'default'} def test_nested_env_delimiter_aliases(env): class SubModel(BaseSettings): v1: str v2: str class Cfg(BaseSettings): sub_model: SubModel class Config: fields = {'sub_model': {'env': ['foo', 'bar']}} env_nested_delimiter = '__' env.set('foo__v1', '-1-') env.set('bar__v2', '-2-') assert Cfg().dict() == {'sub_model': {'v1': '-1-', 'v2': '-2-'}} class DateModel(BaseModel): pips: bool = False class ComplexSettings(BaseSettings): apples: List[str] = [] bananas: Set[int] = set() carrots: dict = {} date: DateModel = DateModel() def test_list(env): env.set('apples', '["russet", "granny smith"]') s = ComplexSettings() assert s.apples == ['russet', 'granny smith'] assert s.date.pips is False def test_set_dict_model(env): env.set('bananas', '[1, 2, 3, 3]') env.set('CARROTS', '{"a": null, "b": 4}') env.set('daTE', '{"pips": true}') s = ComplexSettings() assert s.bananas == {1, 2, 3} assert s.carrots == {'a': None, 'b': 4} assert s.date.pips is True def test_invalid_json(env): env.set('apples', '["russet", "granny smith",]') with pytest.raises(SettingsError, match='error parsing env var "apples"'): ComplexSettings() def test_required_sub_model(env): class Settings(BaseSettings): foobar: DateModel with pytest.raises(ValidationError): Settings() env.set('FOOBAR', '{"pips": "TRUE"}') s = Settings() assert s.foobar.pips is True def test_non_class(env): class Settings(BaseSettings): foobar: NoneStr env.set('FOOBAR', 'xxx') s = Settings() assert s.foobar == 'xxx' def test_env_str(env): class Settings(BaseSettings): apple: str = ... class Config: fields = {'apple': {'env': 'BOOM'}} env.set('BOOM', 'hello') assert Settings().apple == 'hello' def test_env_list(env): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': ['different1', 'different2']}} env.set('different1', 'value 1') env.set('different2', 'value 2') s = Settings() assert s.foobar == 'value 1' def test_env_list_field(env): class Settings(BaseSettings): foobar: str = Field(..., env='foobar_env_name') env.set('FOOBAR_ENV_NAME', 'env value') s = Settings() assert s.foobar == 'env value' def test_env_list_last(env): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': ['different2']}} env.set('different1', 'value 1') env.set('different2', 'value 2') s = Settings() assert s.foobar == 'value 2' assert Settings(foobar='abc').foobar == 'abc' def test_env_inheritance(env): class SettingsParent(BaseSettings): foobar: str = 'parent default' class Config: fields = {'foobar': {'env': 'different'}} class SettingsChild(SettingsParent): foobar: str = 'child default' assert SettingsParent().foobar == 'parent default' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' env.set('different', 'env value') assert SettingsParent().foobar == 'env value' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'env value' assert SettingsChild(foobar='abc').foobar == 'abc' def test_env_inheritance_field(env): class SettingsParent(BaseSettings): foobar: str = Field('parent default', env='foobar_env') class SettingsChild(SettingsParent): foobar: str = 'child default' assert SettingsParent().foobar == 'parent default' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' env.set('foobar_env', 'env value') assert SettingsParent().foobar == 'env value' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' def test_env_prefix_inheritance_config(env): env.set('foobar', 'foobar') env.set('prefix_foobar', 'prefix_foobar') env.set('foobar_parent_from_field', 'foobar_parent_from_field') env.set('foobar_child_from_field', 'foobar_child_from_field') env.set('foobar_parent_from_config', 'foobar_parent_from_config') env.set('foobar_child_from_config', 'foobar_child_from_config') # . Child prefix does not override explicit parent field config class Parent(BaseSettings): foobar: str = Field(None, env='foobar_parent_from_field') class Child(Parent): class Config: env_prefix = 'prefix_' assert Child().foobar == 'foobar_parent_from_field' # c. Child prefix does not override explicit parent class config class Parent(BaseSettings): foobar: str = None class Config: fields = { 'foobar': {'env': ['foobar_parent_from_config']}, } class Child(Parent): class Config: env_prefix = 'prefix_' assert Child().foobar == 'foobar_parent_from_config' # d. Child prefix overrides parent with implicit config class Parent(BaseSettings): foobar: str = None class Child(Parent): class Config: env_prefix = 'prefix_' assert Child().foobar == 'prefix_foobar' def test_env_inheritance_config(env): env.set('foobar', 'foobar') env.set('prefix_foobar', 'prefix_foobar') env.set('foobar_parent_from_field', 'foobar_parent_from_field') env.set('foobar_child_from_field', 'foobar_child_from_field') env.set('foobar_parent_from_config', 'foobar_parent_from_config') env.set('foobar_child_from_config', 'foobar_child_from_config') # a. Child class config overrides prefix and parent field config class Parent(BaseSettings): foobar: str = Field(None, env='foobar_parent_from_field') class Child(Parent): class Config: env_prefix = 'prefix_' fields = { 'foobar': {'env': ['foobar_child_from_config']}, } assert Child().foobar == 'foobar_child_from_config' # b. Child class config overrides prefix and parent class config class Parent(BaseSettings): foobar: str = None class Config: fields = { 'foobar': {'env': ['foobar_parent_from_config']}, } class Child(Parent): class Config: env_prefix = 'prefix_' fields = { 'foobar': {'env': ['foobar_child_from_config']}, } assert Child().foobar == 'foobar_child_from_config' # . Child class config overrides prefix and parent with implicit config class Parent(BaseSettings): foobar: Optional[str] class Child(Parent): class Config: env_prefix = 'prefix_' fields = { 'foobar': {'env': ['foobar_child_from_field']}, } assert Child().foobar == 'foobar_child_from_field' def test_env_invalid(env): with pytest.raises(TypeError, match=r'invalid field env: 123 \(int\); should be string, list or set'): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': 123}} def test_env_field(env): with pytest.raises(TypeError, match=r'invalid field env: 123 \(int\); should be string, list or set'): class Settings(BaseSettings): foobar: str = Field(..., env=123) def test_aliases_warning(env): with pytest.warns(FutureWarning, match='aliases are no longer used by BaseSettings'): class Settings(BaseSettings): foobar: str = 'default value' class Config: fields = {'foobar': 'foobar_alias'} assert Settings().foobar == 'default value' env.set('foobar_alias', 'xxx') assert Settings().foobar == 'default value' assert Settings(foobar_alias='42').foobar == '42' def test_aliases_no_warning(env): class Settings(BaseSettings): foobar: str = 'default value' class Config: fields = {'foobar': {'alias': 'foobar_alias', 'env': 'foobar_env'}} assert Settings().foobar == 'default value' assert Settings(foobar_alias='42').foobar == '42' env.set('foobar_alias', 'xxx') assert Settings().foobar == 'default value' env.set('foobar_env', 'xxx') assert Settings().foobar == 'xxx' assert Settings(foobar_alias='42').foobar == '42' def test_case_sensitive(monkeypatch): class Settings(BaseSettings): foo: str class Config: case_sensitive = True # Need to patch os.environ to get build to work on Windows, where os.environ is case insensitive monkeypatch.setattr(os, 'environ', value={'Foo': 'foo'}) with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_nested_dataclass(env): @dataclasses.dataclass class MyDataclass: foo: int bar: str class Settings(BaseSettings): n: MyDataclass env.set('N', '[123, "bar value"]') s = Settings() assert isinstance(s.n, MyDataclass) assert s.n.foo == 123 assert s.n.bar == 'bar value' def test_env_takes_precedence(env): class Settings(BaseSettings): foo: int bar: str class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return env_settings, init_settings env.set('BAR', 'env setting') s = Settings(foo='123', bar='argument') assert s.foo == 123 assert s.bar == 'env setting' def test_config_file_settings_nornir(env): """ See https://github.com/pydantic/pydantic/pull/341#issuecomment-450378771 """ def nornir_settings_source(settings: BaseSettings) -> Dict[str, Any]: return {'param_a': 'config a', 'param_b': 'config b', 'param_c': 'config c'} class Settings(BaseSettings): param_a: str param_b: str param_c: str class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return env_settings, init_settings, nornir_settings_source env.set('PARAM_C', 'env setting c') s = Settings(param_b='argument b', param_c='argument c') assert s.param_a == 'config a' assert s.param_b == 'argument b' assert s.param_c == 'env setting c' def test_env_union_with_complex_subfields_parses_json(env): class A(BaseSettings): a: str class B(BaseSettings): b: int class Settings(BaseSettings): content: Union[A, B, int] env.set('content', '{"a": "test"}') s = Settings() assert s.content == A(a='test') def test_env_union_with_complex_subfields_parses_plain_if_json_fails(env): class A(BaseSettings): a: str class B(BaseSettings): b: int class Settings(BaseSettings): content: Union[A, B, datetime] env.set('content', '2020-07-05T00:00:00Z') s = Settings() assert s.content == datetime(2020, 7, 5, 0, 0, tzinfo=timezone.utc) def test_env_union_without_complex_subfields_does_not_parse_json(env): class Settings(BaseSettings): content: Union[datetime, str] env.set('content', '2020-07-05T00:00:00Z') s = Settings() assert s.content == datetime(2020, 7, 5, 0, 0, tzinfo=timezone.utc) test_env_file = """\ # this is a comment A=good string # another one, followed by whitespace b='better string' c="best string" """ @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_config(env, tmp_path): p = tmp_path / '.env' p.write_text(test_env_file) class Settings(BaseSettings): a: str b: str c: str class Config: env_file = p env.set('A', 'overridden var') s = Settings() assert s.a == 'overridden var' assert s.b == 'better string' assert s.c == 'best string' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_config_case_sensitive(tmp_path): p = tmp_path / '.env' p.write_text(test_env_file) class Settings(BaseSettings): a: str b: str c: str class Config: env_file = p case_sensitive = True with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_export(env, tmp_path): p = tmp_path / '.env' p.write_text( """\ export A='good string' export B=better-string export C="best string" """ ) class Settings(BaseSettings): a: str b: str c: str class Config: env_file = p env.set('A', 'overridden var') s = Settings() assert s.a == 'overridden var' assert s.b == 'better-string' assert s.c == 'best string' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_config_custom_encoding(tmp_path): p = tmp_path / '.env' p.write_text('pika=p!±@', encoding='latin-1') class Settings(BaseSettings): pika: str class Config: env_file = p env_file_encoding = 'latin-1' s = Settings() assert s.pika == 'p!±@' @pytest.fixture def home_tmp(): tmp_filename = f'{uuid.uuid4()}.env' home_tmp_path = Path.home() / tmp_filename yield home_tmp_path, tmp_filename home_tmp_path.unlink() @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_home_directory(home_tmp): home_tmp_path, tmp_filename = home_tmp home_tmp_path.write_text('pika=baz') class Settings(BaseSettings): pika: str class Config: env_file = f'~/{tmp_filename}' assert Settings().pika == 'baz' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_none(tmp_path): p = tmp_path / '.env' p.write_text('a') class Settings(BaseSettings): a: str = 'xxx' s = Settings(_env_file=p) assert s.a == 'xxx' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_override_file(tmp_path): p1 = tmp_path / '.env' p1.write_text(test_env_file) p2 = tmp_path / '.env.prod' p2.write_text('A="new string"') class Settings(BaseSettings): a: str class Config: env_file = str(p1) s = Settings(_env_file=p2) assert s.a == 'new string' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_override_none(tmp_path): p = tmp_path / '.env' p.write_text(test_env_file) class Settings(BaseSettings): a: str = None class Config: env_file = p s = Settings(_env_file=None) assert s.a is None @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_not_a_file(env): class Settings(BaseSettings): a: str = None env.set('A', 'ignore non-file') s = Settings(_env_file='tests/') assert s.a == 'ignore non-file' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_read_env_file_cast_sensitive(tmp_path): p = tmp_path / '.env' p.write_text('a="test"\nB=123') assert read_env_file(p) == {'a': 'test', 'b': '123'} assert read_env_file(p, case_sensitive=True) == {'a': 'test', 'B': '123'} @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_read_env_file_syntax_wrong(tmp_path): p = tmp_path / '.env' p.write_text('NOT_AN_ASSIGNMENT') assert read_env_file(p, case_sensitive=True) == {'NOT_AN_ASSIGNMENT': None} @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_example(tmp_path): p = tmp_path / '.env' p.write_text( """\ # ignore comment ENVIRONMENT="production" REDIS_ADDRESS=localhost:6379 MEANING_OF_LIFE=42 MY_VAR='Hello world' """ ) class Settings(BaseSettings): environment: str redis_address: str meaning_of_life: int my_var: str s = Settings(_env_file=str(p)) assert s.dict() == { 'environment': 'production', 'redis_address': 'localhost:6379', 'meaning_of_life': 42, 'my_var': 'Hello world', } @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_env_file_custom_encoding(tmp_path): p = tmp_path / '.env' p.write_text('pika=p!±@', encoding='latin-1') class Settings(BaseSettings): pika: str with pytest.raises(UnicodeDecodeError): Settings(_env_file=str(p)) s = Settings(_env_file=str(p), _env_file_encoding='latin-1') assert s.dict() == {'pika': 'p!±@'} test_default_env_file = """\ debug_mode=true host=localhost Port=8000 """ test_prod_env_file = """\ debug_mode=false host=https://example.com/services """ @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_multiple_env_file(tmp_path): base_env = tmp_path / '.env' base_env.write_text(test_default_env_file) prod_env = tmp_path / '.env.prod' prod_env.write_text(test_prod_env_file) class Settings(BaseSettings): debug_mode: bool host: str port: int class Config: env_file = [base_env, prod_env] s = Settings() assert s.debug_mode is False assert s.host == 'https://example.com/services' assert s.port == 8000 @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_multiple_env_file_encoding(tmp_path): base_env = tmp_path / '.env' base_env.write_text('pika=p!±@', encoding='latin-1') prod_env = tmp_path / '.env.prod' prod_env.write_text('pika=chu!±@', encoding='latin-1') class Settings(BaseSettings): pika: str s = Settings(_env_file=[base_env, prod_env], _env_file_encoding='latin-1') assert s.pika == 'chu!±@' @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_read_dotenv_vars(tmp_path): base_env = tmp_path / '.env' base_env.write_text(test_default_env_file) prod_env = tmp_path / '.env.prod' prod_env.write_text(test_prod_env_file) source = EnvSettingsSource(env_file=[base_env, prod_env], env_file_encoding='utf8') assert source._read_env_files(case_sensitive=False) == { 'debug_mode': 'false', 'host': 'https://example.com/services', 'port': '8000', } assert source._read_env_files(case_sensitive=True) == { 'debug_mode': 'false', 'host': 'https://example.com/services', 'Port': '8000', } @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_read_dotenv_vars_when_env_file_is_none(): assert EnvSettingsSource(env_file=None, env_file_encoding=None)._read_env_files(case_sensitive=False) == {} @pytest.mark.skipif(dotenv, reason='python-dotenv is installed') def test_dotenv_not_installed(tmp_path): p = tmp_path / '.env' p.write_text('a=b') class Settings(BaseSettings): a: str with pytest.raises(ImportError, match=r'^python-dotenv is not installed, run `pip install pydantic\[dotenv\]`$'): Settings(_env_file=p) def test_alias_set(env): class Settings(BaseSettings): foo: str = 'default foo' bar: str = 'bar default' class Config: fields = {'foo': {'env': 'foo_env'}} assert Settings.__fields__['bar'].name == 'bar' assert Settings.__fields__['bar'].alias == 'bar' assert Settings.__fields__['foo'].name == 'foo' assert Settings.__fields__['foo'].alias == 'foo' class SubSettings(Settings): spam: str = 'spam default' assert SubSettings.__fields__['bar'].name == 'bar' assert SubSettings.__fields__['bar'].alias == 'bar' assert SubSettings.__fields__['foo'].name == 'foo' assert SubSettings.__fields__['foo'].alias == 'foo' assert SubSettings().dict() == {'foo': 'default foo', 'bar': 'bar default', 'spam': 'spam default'} env.set('foo_env', 'fff') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bar default', 'spam': 'spam default'} env.set('bar', 'bbb') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bbb', 'spam': 'spam default'} env.set('spam', 'sss') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bbb', 'spam': 'sss'} def test_prefix_on_parent(env): class MyBaseSettings(BaseSettings): var: str = 'old' class MySubSettings(MyBaseSettings): class Config: env_prefix = 'PREFIX_' assert MyBaseSettings().dict() == {'var': 'old'} assert MySubSettings().dict() == {'var': 'old'} env.set('PREFIX_VAR', 'new') assert MyBaseSettings().dict() == {'var': 'old'} assert MySubSettings().dict() == {'var': 'new'} def test_frozenset(env): class Settings(BaseSettings): foo: str = 'default foo' class Config: fields = {'foo': {'env': frozenset(['foo_a', 'foo_b'])}} assert Settings.__fields__['foo'].field_info.extra['env_names'] == frozenset({'foo_a', 'foo_b'}) assert Settings().dict() == {'foo': 'default foo'} env.set('foo_a', 'x') assert Settings().dict() == {'foo': 'x'} def test_secrets_path(tmp_path): p = tmp_path / 'foo' p.write_text('foo_secret_value_str') class Settings(BaseSettings): foo: str class Config: secrets_dir = tmp_path assert Settings().dict() == {'foo': 'foo_secret_value_str'} def test_secrets_case_sensitive(tmp_path): (tmp_path / 'SECRET_VAR').write_text('foo_env_value_str') class Settings(BaseSettings): secret_var: Optional[str] class Config: secrets_dir = tmp_path case_sensitive = True assert Settings().dict() == {'secret_var': None} def test_secrets_case_insensitive(tmp_path): (tmp_path / 'SECRET_VAR').write_text('foo_env_value_str') class Settings(BaseSettings): secret_var: Optional[str] class Config: secrets_dir = tmp_path case_sensitive = False settings = Settings().dict() assert settings == {'secret_var': 'foo_env_value_str'} def test_secrets_path_url(tmp_path): (tmp_path / 'foo').write_text('http://www.example.com') (tmp_path / 'bar').write_text('snap') class Settings(BaseSettings): foo: HttpUrl bar: SecretStr class Config: secrets_dir = tmp_path assert Settings().dict() == {'foo': 'http://www.example.com', 'bar': SecretStr('snap')} def test_secrets_path_json(tmp_path): p = tmp_path / 'foo' p.write_text('{"a": "b"}') class Settings(BaseSettings): foo: Dict[str, str] class Config: secrets_dir = tmp_path assert Settings().dict() == {'foo': {'a': 'b'}} def test_secrets_path_invalid_json(tmp_path): p = tmp_path / 'foo' p.write_text('{"a": "b"') class Settings(BaseSettings): foo: Dict[str, str] class Config: secrets_dir = tmp_path with pytest.raises(SettingsError, match='error parsing env var "foo"'): Settings() def test_secrets_missing(tmp_path): class Settings(BaseSettings): foo: str class Config: secrets_dir = tmp_path with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_secrets_invalid_secrets_dir(tmp_path): p1 = tmp_path / 'foo' p1.write_text('foo_secret_value_str') class Settings(BaseSettings): foo: str class Config: secrets_dir = p1 with pytest.raises(SettingsError, match='secrets_dir must reference a directory, not a file'): Settings() @pytest.mark.skipif(sys.platform.startswith('win'), reason='windows paths break regex') def test_secrets_missing_location(tmp_path): class Settings(BaseSettings): class Config: secrets_dir = tmp_path / 'does_not_exist' with pytest.warns(UserWarning, match=f'directory "{tmp_path}/does_not_exist" does not exist'): Settings() @pytest.mark.skipif(sys.platform.startswith('win'), reason='windows paths break regex') def test_secrets_file_is_a_directory(tmp_path): p1 = tmp_path / 'foo' p1.mkdir() class Settings(BaseSettings): foo: Optional[str] class Config: secrets_dir = tmp_path with pytest.warns(UserWarning, match=f'attempted to load secret file "{tmp_path}/foo" but found a directory inste'): Settings() @pytest.mark.skipif(not dotenv, reason='python-dotenv not installed') def test_secrets_dotenv_precedence(tmp_path): s = tmp_path / 'foo' s.write_text('foo_secret_value_str') e = tmp_path / '.env' e.write_text('foo=foo_env_value_str') class Settings(BaseSettings): foo: str class Config: secrets_dir = tmp_path assert Settings(_env_file=e).dict() == {'foo': 'foo_env_value_str'} def test_external_settings_sources_precedence(env): def external_source_0(settings: BaseSettings) -> Dict[str, str]: return {'apple': 'value 0', 'banana': 'value 2'} def external_source_1(settings: BaseSettings) -> Dict[str, str]: return {'apple': 'value 1', 'raspberry': 'value 3'} class Settings(BaseSettings): apple: str banana: str raspberry: str class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return init_settings, env_settings, file_secret_settings, external_source_0, external_source_1 env.set('banana', 'value 1') assert Settings().dict() == {'apple': 'value 0', 'banana': 'value 1', 'raspberry': 'value 3'} def test_external_settings_sources_filter_env_vars(): vault_storage = {'user:password': {'apple': 'value 0', 'banana': 'value 2'}} class VaultSettingsSource: def __init__(self, user: str, password: str): self.user = user self.password = password def __call__(self, settings: BaseSettings) -> Dict[str, str]: vault_vars = vault_storage[f'{self.user}:{self.password}'] return { field.alias: vault_vars[field.name] for field in settings.__fields__.values() if field.name in vault_vars } class Settings(BaseSettings): apple: str banana: str class Config: @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return ( init_settings, env_settings, file_secret_settings, VaultSettingsSource(user='user', password='password'), ) assert Settings().dict() == {'apple': 'value 0', 'banana': 'value 2'} def test_customise_sources_empty(): class Settings(BaseSettings): apple: str = 'default' banana: str = 'default' class Config: @classmethod def customise_sources(cls, *args, **kwargs): return () assert Settings().dict() == {'apple': 'default', 'banana': 'default'} assert Settings(apple='xxx').dict() == {'apple': 'default', 'banana': 'default'} def test_builtins_settings_source_repr(): assert ( repr(InitSettingsSource(init_kwargs={'apple': 'value 0', 'banana': 'value 1'})) == "InitSettingsSource(init_kwargs={'apple': 'value 0', 'banana': 'value 1'})" ) assert ( repr(EnvSettingsSource(env_file='.env', env_file_encoding='utf-8')) == "EnvSettingsSource(env_file='.env', env_file_encoding='utf-8', env_nested_delimiter=None)" ) assert repr(SecretsSettingsSource(secrets_dir='/secrets')) == "SecretsSettingsSource(secrets_dir='/secrets')" def _parse_custom_dict(value: str) -> Callable[[str], Dict[int, str]]: """A custom parsing function passed into env parsing test.""" res = {} for part in value.split(','): k, v = part.split('=') res[int(k)] = v return res def test_env_setting_source_custom_env_parse(env): class Settings(BaseSettings): top: Dict[int, str] class Config: @classmethod def parse_env_var(cls, field_name: str, raw_val: str): if field_name == 'top': return _parse_custom_dict(raw_val) return cls.json_loads(raw_val) with pytest.raises(ValidationError): Settings() env.set('top', '1=apple,2=banana') s = Settings() assert s.top == {1: 'apple', 2: 'banana'} def test_env_settings_source_custom_env_parse_is_bad(env): class Settings(BaseSettings): top: Dict[int, str] class Config: @classmethod def parse_env_var(cls, field_name: str, raw_val: str): if field_name == 'top': return int(raw_val) return cls.json_loads(raw_val) env.set('top', '1=apple,2=banana') with pytest.raises(SettingsError, match='error parsing env var "top"'): Settings() def test_secret_settings_source_custom_env_parse(tmp_path): p = tmp_path / 'top' p.write_text('1=apple,2=banana') class Settings(BaseSettings): top: Dict[int, str] class Config: secrets_dir = tmp_path @classmethod def parse_env_var(cls, field_name: str, raw_val: str): if field_name == 'top': return _parse_custom_dict(raw_val) return cls.json_loads(raw_val) s = Settings() assert s.top == {1: 'apple', 2: 'banana'} def test_env_json_field(env): class Settings(BaseSettings): x: Json env.set('x', '{"foo": "bar"}') s = Settings() assert s.x == {'foo': 'bar'} env.set('x', 'test') with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [{'loc': ('x',), 'msg': 'Invalid JSON', 'type': 'value_error.json'}] def test_env_json_field_dict(env): class Settings(BaseSettings): x: Json[Dict[str, int]] env.set('x', '{"foo": 1}') s = Settings() assert s.x == {'foo': 1} env.set('x', '{"foo": "bar"}') with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [ {'loc': ('x', 'foo'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] pydantic-1.10.14/tests/test_structural_pattern_matching.py000066400000000000000000000012321455251250200240620ustar00rootroot00000000000000import sys import pytest @pytest.mark.skipif(sys.version_info < (3, 10), reason='requires python 3.10 or higher') def test_match_kwargs(create_module): module = create_module( # language=Python """ from pydantic import BaseModel class Model(BaseModel): a: str b: str def main(model): match model: case Model(a='a', b=b): return b case Model(a='a2'): return 'b2' case _: return None """ ) assert module.main(module.Model(a='a', b='b')) == 'b' assert module.main(module.Model(a='a2', b='b')) == 'b2' assert module.main(module.Model(a='x', b='b')) is None pydantic-1.10.14/tests/test_tools.py000066400000000000000000000065051455251250200174130ustar00rootroot00000000000000import json from typing import Dict, List, Mapping, Union import pytest from pydantic import BaseModel, ValidationError from pydantic.dataclasses import dataclass from pydantic.tools import parse_file_as, parse_obj_as, parse_raw_as, schema_json_of, schema_of @pytest.mark.parametrize('obj,type_,parsed', [('1', int, 1), (['1'], List[int], [1])]) def test_parse_obj(obj, type_, parsed): assert parse_obj_as(type_, obj) == parsed def test_parse_obj_as_model(): class Model(BaseModel): x: int y: bool z: str model_inputs = {'x': '1', 'y': 'true', 'z': 'abc'} assert parse_obj_as(Model, model_inputs) == Model(**model_inputs) def test_parse_obj_preserves_subclasses(): class ModelA(BaseModel): a: Mapping[int, str] class ModelB(ModelA): b: int model_b = ModelB(a={1: 'f'}, b=2) parsed = parse_obj_as(List[ModelA], [model_b]) assert parsed == [model_b] def test_parse_obj_fails(): with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a') assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert exc_info.value.model.__name__ == 'ParsingModel[int]' def test_parsing_model_naming(): with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a') assert str(exc_info.value).split('\n')[0] == '1 validation error for ParsingModel[int]' with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a', type_name='ParsingModel') assert str(exc_info.value).split('\n')[0] == '1 validation error for ParsingModel' with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a', type_name=lambda type_: type_.__name__) assert str(exc_info.value).split('\n')[0] == '1 validation error for int' def test_parse_as_dataclass(): @dataclass class PydanticDataclass: x: int inputs = {'x': '1'} assert parse_obj_as(PydanticDataclass, inputs) == PydanticDataclass(1) def test_parse_mapping_as(): inputs = {'1': '2'} assert parse_obj_as(Dict[int, int], inputs) == {1: 2} def test_parse_file_as(tmp_path): p = tmp_path / 'test.json' p.write_text('{"1": "2"}') assert parse_file_as(Dict[int, int], p) == {1: 2} def test_parse_file_as_json_loads(tmp_path): def custom_json_loads(*args, **kwargs): data = json.loads(*args, **kwargs) data[1] = 99 return data p = tmp_path / 'test_json_loads.json' p.write_text('{"1": "2"}') assert parse_file_as(Dict[int, int], p, json_loads=custom_json_loads) == {1: 99} def test_raw_as(): class Item(BaseModel): id: int name: str item_data = '[{"id": 1, "name": "My Item"}]' items = parse_raw_as(List[Item], item_data) assert items == [Item(id=1, name='My Item')] def test_schema(): assert schema_of(Union[int, str], title='IntOrStr') == { 'title': 'IntOrStr', 'anyOf': [{'type': 'integer'}, {'type': 'string'}], } assert schema_json_of(Union[int, str], title='IntOrStr', indent=2) == ( '{\n' ' "title": "IntOrStr",\n' ' "anyOf": [\n' ' {\n' ' "type": "integer"\n' ' },\n' ' {\n' ' "type": "string"\n' ' }\n' ' ]\n' '}' ) pydantic-1.10.14/tests/test_types.py000066400000000000000000003137521455251250200174240ustar00rootroot00000000000000import itertools import math import os import re import sys import uuid from collections import OrderedDict, deque from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum, IntEnum from pathlib import Path from typing import ( Any, Deque, Dict, FrozenSet, Iterable, Iterator, List, MutableSet, NewType, Optional, Pattern, Sequence, Set, Tuple, Union, ) from uuid import UUID import pytest from typing_extensions import Literal, TypedDict from pydantic import ( UUID1, UUID3, UUID4, UUID5, BaseModel, ByteSize, ConfigError, DirectoryPath, EmailStr, Field, FilePath, FiniteFloat, FutureDate, Json, NameEmail, NegativeFloat, NegativeInt, NonNegativeFloat, NonNegativeInt, NonPositiveFloat, NonPositiveInt, PastDate, PositiveFloat, PositiveInt, PyObject, SecretBytes, SecretStr, StrictBool, StrictBytes, StrictFloat, StrictInt, StrictStr, ValidationError, conbytes, condecimal, confloat, confrozenset, conint, conlist, conset, constr, create_model, errors, validator, ) from pydantic.types import SecretField from pydantic.typing import NoneType try: import email_validator except ImportError: email_validator = None class ConBytesModel(BaseModel): v: conbytes(max_length=10) = b'foobar' def foo(): return 42 def test_constrained_bytes_good(): m = ConBytesModel(v=b'short') assert m.v == b'short' def test_constrained_bytes_default(): m = ConBytesModel() assert m.v == b'foobar' def test_constrained_bytes_too_long(): with pytest.raises(ValidationError) as exc_info: ConBytesModel(v=b'this is too long') assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] @pytest.mark.parametrize( 'to_upper, value, result', [ (True, b'abcd', b'ABCD'), (False, b'aBcD', b'aBcD'), ], ) def test_constrained_bytes_upper(to_upper, value, result): class Model(BaseModel): v: conbytes(to_upper=to_upper) m = Model(v=value) assert m.v == result @pytest.mark.parametrize( 'to_lower, value, result', [ (True, b'ABCD', b'abcd'), (False, b'ABCD', b'ABCD'), ], ) def test_constrained_bytes_lower(to_lower, value, result): class Model(BaseModel): v: conbytes(to_lower=to_lower) m = Model(v=value) assert m.v == result def test_constrained_bytes_strict_true(): class Model(BaseModel): v: conbytes(strict=True) assert Model(v=b'foobar').v == b'foobar' assert Model(v=bytearray('foobar', 'utf-8')).v == b'foobar' with pytest.raises(ValidationError): Model(v='foostring') with pytest.raises(ValidationError): Model(v=42) with pytest.raises(ValidationError): Model(v=0.42) def test_constrained_bytes_strict_false(): class Model(BaseModel): v: conbytes(strict=False) assert Model(v=b'foobar').v == b'foobar' assert Model(v=bytearray('foobar', 'utf-8')).v == b'foobar' assert Model(v='foostring').v == b'foostring' assert Model(v=42).v == b'42' assert Model(v=0.42).v == b'0.42' def test_constrained_bytes_strict_default(): class Model(BaseModel): v: conbytes() assert Model(v=b'foobar').v == b'foobar' assert Model(v=bytearray('foobar', 'utf-8')).v == b'foobar' assert Model(v='foostring').v == b'foostring' assert Model(v=42).v == b'42' assert Model(v=0.42).v == b'0.42' def test_constrained_list_good(): class ConListModelMax(BaseModel): v: conlist(int) = [] m = ConListModelMax(v=[1, 2, 3]) assert m.v == [1, 2, 3] def test_constrained_list_default(): class ConListModelMax(BaseModel): v: conlist(int) = [] m = ConListModelMax() assert m.v == [] def test_constrained_list_too_long(): class ConListModelMax(BaseModel): v: conlist(int, max_items=10) = [] with pytest.raises(ValidationError) as exc_info: ConListModelMax(v=list(str(i) for i in range(11))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 items', 'type': 'value_error.list.max_items', 'ctx': {'limit_value': 10}, } ] def test_constrained_list_too_short(): class ConListModelMin(BaseModel): v: conlist(int, min_items=1) with pytest.raises(ValidationError) as exc_info: ConListModelMin(v=[]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 1}, } ] def test_constrained_list_not_unique_hashable_items(): class ConListModelUnique(BaseModel): v: conlist(int, unique_items=True) with pytest.raises(ValidationError) as exc_info: ConListModelUnique(v=[1, 1, 2, 2, 2, 3]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items', } ] def test_constrained_list_not_unique_unhashable_items(): class ConListModelUnique(BaseModel): v: conlist(Set[int], unique_items=True) m = ConListModelUnique(v=[{1}, {2}, {3}]) assert m.v == [{1}, {2}, {3}] with pytest.raises(ValidationError) as exc_info: ConListModelUnique(v=[{1}, {1}, {2}, {2}, {2}, {3}]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items', } ] def test_constrained_list_optional(): class Model(BaseModel): req: Optional[conlist(str, min_items=1)] = ... opt: Optional[conlist(str, min_items=1)] assert Model(req=None).dict() == {'req': None, 'opt': None} assert Model(req=None, opt=None).dict() == {'req': None, 'opt': None} with pytest.raises(ValidationError) as exc_info: Model(req=[], opt=[]) assert exc_info.value.errors() == [ { 'loc': ('req',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 1}, }, { 'loc': ('opt',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 1}, }, ] assert Model(req=['a'], opt=['a']).dict() == {'req': ['a'], 'opt': ['a']} def test_constrained_list_constraints(): class ConListModelBoth(BaseModel): v: conlist(int, min_items=7, max_items=11) m = ConListModelBoth(v=list(range(7))) assert m.v == list(range(7)) m = ConListModelBoth(v=list(range(11))) assert m.v == list(range(11)) with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=list(range(6))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 7 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 7}, } ] with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=list(range(12))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 11 items', 'type': 'value_error.list.max_items', 'ctx': {'limit_value': 11}, } ] with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=1) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_constrained_list_item_type_fails(): class ConListModel(BaseModel): v: conlist(int) = [] with pytest.raises(ValidationError) as exc_info: ConListModel(v=['a', 'b', 'c']) assert exc_info.value.errors() == [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_conlist(): class Model(BaseModel): foo: List[int] = Field(..., min_items=2, max_items=4, unique_items=True) bar: conlist(str, min_items=1, max_items=4, unique_items=False) = None assert Model(foo=[1, 2], bar=['spoon']).dict() == {'foo': [1, 2], 'bar': ['spoon']} with pytest.raises(ValidationError, match='ensure this value has at least 2 items'): Model(foo=[1]) with pytest.raises(ValidationError, match='ensure this value has at most 4 items'): Model(foo=list(range(5))) with pytest.raises(ValidationError, match='the list has duplicated items'): Model(foo=[1, 1, 2, 2]) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'foo': { 'title': 'Foo', 'type': 'array', 'items': {'type': 'integer'}, 'minItems': 2, 'maxItems': 4, 'uniqueItems': True, }, 'bar': { 'title': 'Bar', 'type': 'array', 'items': {'type': 'string'}, 'minItems': 1, 'maxItems': 4, 'uniqueItems': False, }, }, 'required': ['foo'], } with pytest.raises(ValidationError) as exc_info: Model(foo=[1, 'x', 'y']) assert exc_info.value.errors() == [ {'loc': ('foo', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('foo', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model(foo=1) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_conlist_wrong_type_default(): """It should not validate default value by default""" class Model(BaseModel): v: conlist(int) = 'a' m = Model() assert m.v == 'a' def test_constrained_set_good(): class Model(BaseModel): v: conset(int) = [] m = Model(v=[1, 2, 3]) assert m.v == {1, 2, 3} def test_constrained_set_default(): class Model(BaseModel): v: conset(int) = set() m = Model() assert m.v == set() def test_constrained_set_default_invalid(): class Model(BaseModel): v: conset(int) = 'not valid, not validated' m = Model() assert m.v == 'not valid, not validated' def test_constrained_set_too_long(): class ConSetModelMax(BaseModel): v: conset(int, max_items=10) = [] with pytest.raises(ValidationError) as exc_info: ConSetModelMax(v={str(i) for i in range(11)}) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 items', 'type': 'value_error.set.max_items', 'ctx': {'limit_value': 10}, } ] def test_constrained_set_too_short(): class ConSetModelMin(BaseModel): v: conset(int, min_items=1) with pytest.raises(ValidationError) as exc_info: ConSetModelMin(v=[]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.set.min_items', 'ctx': {'limit_value': 1}, } ] def test_constrained_set_optional(): class Model(BaseModel): req: Optional[conset(str, min_items=1)] = ... opt: Optional[conset(str, min_items=1)] assert Model(req=None).dict() == {'req': None, 'opt': None} assert Model(req=None, opt=None).dict() == {'req': None, 'opt': None} with pytest.raises(ValidationError) as exc_info: Model(req=set(), opt=set()) assert exc_info.value.errors() == [ { 'loc': ('req',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.set.min_items', 'ctx': {'limit_value': 1}, }, { 'loc': ('opt',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.set.min_items', 'ctx': {'limit_value': 1}, }, ] assert Model(req={'a'}, opt={'a'}).dict() == {'req': {'a'}, 'opt': {'a'}} def test_constrained_set_constraints(): class ConSetModelBoth(BaseModel): v: conset(int, min_items=7, max_items=11) m = ConSetModelBoth(v=set(range(7))) assert m.v == set(range(7)) m = ConSetModelBoth(v=set(range(11))) assert m.v == set(range(11)) with pytest.raises(ValidationError) as exc_info: ConSetModelBoth(v=set(range(6))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 7 items', 'type': 'value_error.set.min_items', 'ctx': {'limit_value': 7}, } ] with pytest.raises(ValidationError) as exc_info: ConSetModelBoth(v=set(range(12))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 11 items', 'type': 'value_error.set.max_items', 'ctx': {'limit_value': 11}, } ] with pytest.raises(ValidationError) as exc_info: ConSetModelBoth(v=1) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] def test_constrained_set_item_type_fails(): class ConSetModel(BaseModel): v: conset(int) = [] with pytest.raises(ValidationError) as exc_info: ConSetModel(v=['a', 'b', 'c']) assert exc_info.value.errors() == [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_conset(): class Model(BaseModel): foo: Set[int] = Field(..., min_items=2, max_items=4) bar: conset(str, min_items=1, max_items=4) = None assert Model(foo=[1, 2], bar=['spoon']).dict() == {'foo': {1, 2}, 'bar': {'spoon'}} assert Model(foo=[1, 1, 1, 2, 2], bar=['spoon']).dict() == {'foo': {1, 2}, 'bar': {'spoon'}} with pytest.raises(ValidationError, match='ensure this value has at least 2 items'): Model(foo=[1]) with pytest.raises(ValidationError, match='ensure this value has at most 4 items'): Model(foo=list(range(5))) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'foo': { 'title': 'Foo', 'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True, 'minItems': 2, 'maxItems': 4, }, 'bar': { 'title': 'Bar', 'type': 'array', 'items': {'type': 'string'}, 'uniqueItems': True, 'minItems': 1, 'maxItems': 4, }, }, 'required': ['foo'], } with pytest.raises(ValidationError) as exc_info: Model(foo=[1, 'x', 'y']) errors = exc_info.value.errors() assert len(errors) == 2 assert all(error['msg'] == 'value is not a valid integer' for error in errors) with pytest.raises(ValidationError) as exc_info: Model(foo=1) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] def test_conset_not_required(): class Model(BaseModel): foo: Set[int] = None assert Model(foo=None).foo is None assert Model().foo is None def test_confrozenset(): class Model(BaseModel): foo: FrozenSet[int] = Field(..., min_items=2, max_items=4) bar: confrozenset(str, min_items=1, max_items=4) = None m = Model(foo=[1, 2], bar=['spoon']) assert m.dict() == {'foo': {1, 2}, 'bar': {'spoon'}} assert isinstance(m.foo, frozenset) assert isinstance(m.bar, frozenset) assert Model(foo=[1, 1, 1, 2, 2], bar=['spoon']).dict() == {'foo': {1, 2}, 'bar': {'spoon'}} with pytest.raises(ValidationError, match='ensure this value has at least 2 items'): Model(foo=[1]) with pytest.raises(ValidationError, match='ensure this value has at most 4 items'): Model(foo=list(range(5))) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'foo': { 'title': 'Foo', 'type': 'array', 'items': {'type': 'integer'}, 'uniqueItems': True, 'minItems': 2, 'maxItems': 4, }, 'bar': { 'title': 'Bar', 'type': 'array', 'items': {'type': 'string'}, 'uniqueItems': True, 'minItems': 1, 'maxItems': 4, }, }, 'required': ['foo'], } with pytest.raises(ValidationError) as exc_info: Model(foo=[1, 'x', 'y']) errors = exc_info.value.errors() assert len(errors) == 2 assert all(error['msg'] == 'value is not a valid integer' for error in errors) with pytest.raises(ValidationError) as exc_info: Model(foo=1) assert exc_info.value.errors() == [ {'loc': ('foo',), 'msg': 'value is not a valid frozenset', 'type': 'type_error.frozenset'} ] def test_confrozenset_not_required(): class Model(BaseModel): foo: Optional[FrozenSet[int]] = None assert Model(foo=None).foo is None assert Model().foo is None def test_constrained_frozenset_optional(): class Model(BaseModel): req: Optional[confrozenset(str, min_items=1)] = ... opt: Optional[confrozenset(str, min_items=1)] assert Model(req=None).dict() == {'req': None, 'opt': None} assert Model(req=None, opt=None).dict() == {'req': None, 'opt': None} with pytest.raises(ValidationError) as exc_info: Model(req=frozenset(), opt=frozenset()) assert exc_info.value.errors() == [ { 'loc': ('req',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.frozenset.min_items', 'ctx': {'limit_value': 1}, }, { 'loc': ('opt',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.frozenset.min_items', 'ctx': {'limit_value': 1}, }, ] assert Model(req={'a'}, opt={'a'}).dict() == {'req': {'a'}, 'opt': {'a'}} class ConStringModel(BaseModel): v: constr(max_length=10) = 'foobar' def test_constrained_str_good(): m = ConStringModel(v='short') assert m.v == 'short' def test_constrained_str_default(): m = ConStringModel() assert m.v == 'foobar' def test_constrained_str_too_long(): with pytest.raises(ValidationError) as exc_info: ConStringModel(v='this is too long') assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] @pytest.mark.parametrize( 'to_upper, value, result', [ (True, 'abcd', 'ABCD'), (False, 'aBcD', 'aBcD'), ], ) def test_constrained_str_upper(to_upper, value, result): class Model(BaseModel): v: constr(to_upper=to_upper) m = Model(v=value) assert m.v == result @pytest.mark.parametrize( 'to_lower, value, result', [ (True, 'ABCD', 'abcd'), (False, 'ABCD', 'ABCD'), ], ) def test_constrained_str_lower(to_lower, value, result): class Model(BaseModel): v: constr(to_lower=to_lower) m = Model(v=value) assert m.v == result def test_constrained_str_max_length_0(): class Model(BaseModel): v: constr(max_length=0) m = Model(v='') assert m.v == '' with pytest.raises(ValidationError) as exc_info: Model(v='qwe') assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 0 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 0}, } ] def test_module_import(): class PyObjectModel(BaseModel): module: PyObject = 'os.path' m = PyObjectModel() assert m.module == os.path with pytest.raises(ValidationError) as exc_info: PyObjectModel(module='foobar') assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' '"foobar" doesn\'t look like a module path', 'type': 'type_error.pyobject', 'ctx': {'error_message': '"foobar" doesn\'t look like a module path'}, } ] with pytest.raises(ValidationError) as exc_info: PyObjectModel(module='os.missing') assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' 'Module "os" does not define a "missing" attribute', 'type': 'type_error.pyobject', 'ctx': {'error_message': 'Module "os" does not define a "missing" attribute'}, } ] with pytest.raises(ValidationError) as exc_info: PyObjectModel(module=[1, 2, 3]) assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' 'value is neither a valid import path not a valid callable', 'type': 'type_error.pyobject', 'ctx': {'error_message': 'value is neither a valid import path not a valid callable'}, } ] def test_pyobject_none(): class PyObjectModel(BaseModel): module: PyObject = None m = PyObjectModel() assert m.module is None def test_pyobject_callable(): class PyObjectModel(BaseModel): foo: PyObject = foo m = PyObjectModel() assert m.foo is foo assert m.foo() == 42 class CheckModel(BaseModel): bool_check = True str_check = 's' bytes_check = b's' int_check = 1 float_check = 1.0 uuid_check: UUID = UUID('7bd00d58-6485-4ca6-b889-3da6d8df3ee4') decimal_check: Decimal = Decimal('42.24') class Config: anystr_strip_whitespace = True max_anystr_length = 10 class BoolCastable: def __bool__(self) -> bool: return True @pytest.mark.parametrize( 'field,value,result', [ ('bool_check', True, True), ('bool_check', 1, True), ('bool_check', 'y', True), ('bool_check', 'Y', True), ('bool_check', 'yes', True), ('bool_check', 'Yes', True), ('bool_check', 'YES', True), ('bool_check', 'true', True), ('bool_check', 'True', True), ('bool_check', 'TRUE', True), ('bool_check', 'on', True), ('bool_check', 'On', True), ('bool_check', 'ON', True), ('bool_check', '1', True), ('bool_check', 't', True), ('bool_check', 'T', True), ('bool_check', b'TRUE', True), ('bool_check', False, False), ('bool_check', 0, False), ('bool_check', 'n', False), ('bool_check', 'N', False), ('bool_check', 'no', False), ('bool_check', 'No', False), ('bool_check', 'NO', False), ('bool_check', 'false', False), ('bool_check', 'False', False), ('bool_check', 'FALSE', False), ('bool_check', 'off', False), ('bool_check', 'Off', False), ('bool_check', 'OFF', False), ('bool_check', '0', False), ('bool_check', 'f', False), ('bool_check', 'F', False), ('bool_check', b'FALSE', False), ('bool_check', None, ValidationError), ('bool_check', '', ValidationError), ('bool_check', [], ValidationError), ('bool_check', {}, ValidationError), ('bool_check', [1, 2, 3, 4], ValidationError), ('bool_check', {1: 2, 3: 4}, ValidationError), ('bool_check', b'2', ValidationError), ('bool_check', '2', ValidationError), ('bool_check', 2, ValidationError), ('bool_check', b'\x81', ValidationError), ('bool_check', BoolCastable(), ValidationError), ('str_check', 's', 's'), ('str_check', ' s ', 's'), ('str_check', b's', 's'), ('str_check', b' s ', 's'), ('str_check', 1, '1'), ('str_check', 'x' * 11, ValidationError), ('str_check', b'x' * 11, ValidationError), ('bytes_check', 's', b's'), ('bytes_check', ' s ', b's'), ('bytes_check', b's', b's'), ('bytes_check', b' s ', b's'), ('bytes_check', 1, b'1'), ('bytes_check', bytearray('xx', encoding='utf8'), b'xx'), ('bytes_check', True, b'True'), ('bytes_check', False, b'False'), ('bytes_check', {}, ValidationError), ('bytes_check', 'x' * 11, ValidationError), ('bytes_check', b'x' * 11, ValidationError), ('int_check', 1, 1), ('int_check', 1.9, 1), ('int_check', '1', 1), ('int_check', '1.9', ValidationError), ('int_check', b'1', 1), ('int_check', 12, 12), ('int_check', '12', 12), ('int_check', b'12', 12), ('float_check', 1, 1.0), ('float_check', 1.0, 1.0), ('float_check', '1.0', 1.0), ('float_check', '1', 1.0), ('float_check', b'1.0', 1.0), ('float_check', b'1', 1.0), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8'), UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', b'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', b'\x12\x34\x56\x78' * 4, UUID('12345678-1234-5678-1234-567812345678')), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-', ValidationError), ('uuid_check', 123, ValidationError), ('decimal_check', 42.24, Decimal('42.24')), ('decimal_check', '42.24', Decimal('42.24')), ('decimal_check', b'42.24', Decimal('42.24')), ('decimal_check', ' 42.24 ', Decimal('42.24')), ('decimal_check', Decimal('42.24'), Decimal('42.24')), ('decimal_check', 'not a valid decimal', ValidationError), ('decimal_check', 'NaN', ValidationError), ], ) def test_default_validators(field, value, result): kwargs = {field: value} if result == ValidationError: with pytest.raises(ValidationError): CheckModel(**kwargs) else: assert CheckModel(**kwargs).dict()[field] == result class StrModel(BaseModel): str_check: str class Config: min_anystr_length = 5 max_anystr_length = 10 def test_string_too_long(): with pytest.raises(ValidationError) as exc_info: StrModel(str_check='x' * 150) assert exc_info.value.errors() == [ { 'loc': ('str_check',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] def test_string_too_short(): with pytest.raises(ValidationError) as exc_info: StrModel(str_check='x') assert exc_info.value.errors() == [ { 'loc': ('str_check',), 'msg': 'ensure this value has at least 5 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 5}, } ] class DatetimeModel(BaseModel): dt: datetime = ... date_: date = ... time_: time = ... duration: timedelta = ... def test_datetime_successful(): m = DatetimeModel(dt='2017-10-5T19:47:07', date_=1_494_012_000, time_='10:20:30.400', duration='15:30.0001') assert m.dt == datetime(2017, 10, 5, 19, 47, 7) assert m.date_ == date(2017, 5, 5) assert m.time_ == time(10, 20, 30, 400_000) assert m.duration == timedelta(minutes=15, seconds=30, microseconds=100) def test_datetime_errors(): with pytest.raises(ValueError) as exc_info: DatetimeModel(dt='2017-13-5T19:47:07', date_='XX1494012000', time_='25:20:30.400', duration='15:30.0001 broken') assert exc_info.value.errors() == [ {'loc': ('dt',), 'msg': 'invalid datetime format', 'type': 'value_error.datetime'}, {'loc': ('date_',), 'msg': 'invalid date format', 'type': 'value_error.date'}, {'loc': ('time_',), 'msg': 'invalid time format', 'type': 'value_error.time'}, {'loc': ('duration',), 'msg': 'invalid duration format', 'type': 'value_error.duration'}, ] class FruitEnum(str, Enum): pear = 'pear' banana = 'banana' class ToolEnum(IntEnum): spanner = 1 wrench = 2 class CookingModel(BaseModel): fruit: FruitEnum = FruitEnum.pear tool: ToolEnum = ToolEnum.spanner def test_enum_successful(): m = CookingModel(tool=2) assert m.fruit == FruitEnum.pear assert m.tool == ToolEnum.wrench assert repr(m.tool) == '' def test_enum_fails(): with pytest.raises(ValueError) as exc_info: CookingModel(tool=3) assert exc_info.value.errors() == [ { 'loc': ('tool',), 'msg': 'value is not a valid enumeration member; permitted: 1, 2', 'type': 'type_error.enum', 'ctx': {'enum_values': [ToolEnum.spanner, ToolEnum.wrench]}, } ] assert len(exc_info.value.json()) == 217 def test_int_enum_successful_for_str_int(): m = CookingModel(tool='2') assert m.tool == ToolEnum.wrench assert repr(m.tool) == '' def test_enum_type(): """it should validate any Enum""" class Model(BaseModel): my_enum: Enum Model(my_enum=FruitEnum.banana) Model(my_enum=ToolEnum.wrench) with pytest.raises(ValidationError): Model(my_enum='banana') def test_int_enum_type(): """it should validate any IntEnum""" class Model(BaseModel): my_int_enum: IntEnum Model(my_int_enum=ToolEnum.wrench) with pytest.raises(ValidationError): Model(my_int_enum=FruitEnum.banana) with pytest.raises(ValidationError): Model(my_int_enum=2) @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_string_success(): class MoreStringsModel(BaseModel): str_strip_enabled: constr(strip_whitespace=True) str_strip_disabled: constr(strip_whitespace=False) str_regex: constr(regex=r'^xxx\d{3}$') = ... # noqa: F722 str_min_length: constr(min_length=5) = ... str_curtailed: constr(curtail_length=5) = ... str_email: EmailStr = ... name_email: NameEmail = ... m = MoreStringsModel( str_strip_enabled=' xxx123 ', str_strip_disabled=' xxx123 ', str_regex='xxx123', str_min_length='12345', str_curtailed='123456', str_email='foobar@example.com ', name_email='foo bar ', ) assert m.str_strip_enabled == 'xxx123' assert m.str_strip_disabled == ' xxx123 ' assert m.str_regex == 'xxx123' assert m.str_curtailed == '12345' assert m.str_email == 'foobar@example.com' assert repr(m.name_email) == "NameEmail(name='foo bar', email='foobaR@example.com')" assert str(m.name_email) == 'foo bar ' assert m.name_email.name == 'foo bar' assert m.name_email.email == 'foobaR@example.com' @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_string_fails(): class MoreStringsModel(BaseModel): str_regex: constr(regex=r'^xxx\d{3}$') = ... # noqa: F722 str_min_length: constr(min_length=5) = ... str_curtailed: constr(curtail_length=5) = ... str_email: EmailStr = ... name_email: NameEmail = ... with pytest.raises(ValidationError) as exc_info: MoreStringsModel( str_regex='xxx123xxx', str_min_length='1234', str_curtailed='123', # doesn't fail str_email='foobar<@example.com', name_email='foobar @example.com', ) assert exc_info.value.errors() == [ { 'loc': ('str_regex',), 'msg': 'string does not match regex "^xxx\\d{3}$"', 'type': 'value_error.str.regex', 'ctx': {'pattern': '^xxx\\d{3}$'}, }, { 'loc': ('str_min_length',), 'msg': 'ensure this value has at least 5 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 5}, }, {'loc': ('str_email',), 'msg': 'value is not a valid email address', 'type': 'value_error.email'}, {'loc': ('name_email',), 'msg': 'value is not a valid email address', 'type': 'value_error.email'}, ] @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed_email_str(): with pytest.raises(ImportError): class Model(BaseModel): str_email: EmailStr = ... @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed_name_email(): with pytest.raises(ImportError): class Model(BaseModel): str_email: NameEmail = ... def test_dict(): class Model(BaseModel): v: dict assert Model(v={1: 10, 2: 20}).v == {1: 10, 2: 20} assert Model(v=[(1, 2), (3, 4)]).v == {1: 2, 3: 4} with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, 3]) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] @pytest.mark.parametrize( 'value,result', ( ([1, 2, '3'], [1, 2, '3']), ((1, 2, '3'), [1, 2, '3']), ({1, 2, '3'}, list({1, 2, '3'})), ((i**2 for i in range(5)), [0, 1, 4, 9, 16]), ((deque((1, 2, 3)), list(deque((1, 2, 3))))), ), ) def test_list_success(value, result): class Model(BaseModel): v: list assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_list_fails(value): class Model(BaseModel): v: list with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_ordered_dict(): class Model(BaseModel): v: OrderedDict assert Model(v=OrderedDict([(1, 10), (2, 20)])).v == OrderedDict([(1, 10), (2, 20)]) assert Model(v={1: 10, 2: 20}).v in (OrderedDict([(1, 10), (2, 20)]), OrderedDict([(2, 20), (1, 10)])) assert Model(v=[(1, 2), (3, 4)]).v == OrderedDict([(1, 2), (3, 4)]) with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, 3]) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] @pytest.mark.parametrize( 'value,result', ( ([1, 2, '3'], (1, 2, '3')), ((1, 2, '3'), (1, 2, '3')), ({1, 2, '3'}, tuple({1, 2, '3'})), ((i**2 for i in range(5)), (0, 1, 4, 9, 16)), (deque([1, 2, 3]), (1, 2, 3)), ), ) def test_tuple_success(value, result): class Model(BaseModel): v: tuple assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_tuple_fails(value): class Model(BaseModel): v: tuple with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid tuple', 'type': 'type_error.tuple'}] @pytest.mark.parametrize( 'value,cls,result', ( ([1, 2, '3'], int, (1, 2, 3)), ((1, 2, '3'), int, (1, 2, 3)), ((i**2 for i in range(5)), int, (0, 1, 4, 9, 16)), (('a', 'b', 'c'), str, ('a', 'b', 'c')), ), ) def test_tuple_variable_len_success(value, cls, result): class Model(BaseModel): v: Tuple[cls, ...] assert Model(v=value).v == result @pytest.mark.parametrize( 'value, cls, exc', [ (('a', 'b', [1, 2], 'c'), str, [{'loc': ('v', 2), 'msg': 'str type expected', 'type': 'type_error.str'}]), ( ('a', 'b', [1, 2], 'c', [3, 4]), str, [ {'loc': ('v', 2), 'msg': 'str type expected', 'type': 'type_error.str'}, {'loc': ('v', 4), 'msg': 'str type expected', 'type': 'type_error.str'}, ], ), ], ) def test_tuple_variable_len_fails(value, cls, exc): class Model(BaseModel): v: Tuple[cls, ...] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == exc @pytest.mark.parametrize( 'value,result', ( ({1, 2, 2, '3'}, {1, 2, '3'}), ((1, 2, 2, '3'), {1, 2, '3'}), ([1, 2, 2, '3'], {1, 2, '3'}), ({i**2 for i in range(5)}, {0, 1, 4, 9, 16}), ), ) def test_set_success(value, result): class Model(BaseModel): v: set assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_set_fails(value): class Model(BaseModel): v: set with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] def test_list_type_fails(): class Model(BaseModel): v: List[int] with pytest.raises(ValidationError) as exc_info: Model(v='123') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_set_type_fails(): class Model(BaseModel): v: Set[int] with pytest.raises(ValidationError) as exc_info: Model(v='123') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] @pytest.mark.parametrize( 'cls, value,result', ( (int, [1, 2, 3], [1, 2, 3]), (int, (1, 2, 3), (1, 2, 3)), (int, deque((1, 2, 3)), deque((1, 2, 3))), (float, {1.0, 2.0, 3.0}, {1.0, 2.0, 3.0}), (Set[int], [{1, 2}, {3, 4}, {5, 6}], [{1, 2}, {3, 4}, {5, 6}]), (Tuple[int, str], ((1, 'a'), (2, 'b'), (3, 'c')), ((1, 'a'), (2, 'b'), (3, 'c'))), ), ) def test_sequence_success(cls, value, result): class Model(BaseModel): v: Sequence[cls] assert Model(v=value).v == result @pytest.mark.parametrize( 'cls, value,result', ( (int, (i for i in range(3)), iter([0, 1, 2])), (float, (float(i) for i in range(3)), iter([0.0, 1.0, 2.0])), (str, (str(i) for i in range(3)), iter(['0', '1', '2'])), ), ) def test_sequence_generator_success(cls, value, result): class Model(BaseModel): v: Sequence[cls] validated = Model(v=value).v assert isinstance(validated, Iterator) assert list(validated) == list(result) def test_infinite_iterable(): class Model(BaseModel): it: Iterable[int] b: int def iterable(): i = 0 while True: i += 1 yield i m = Model(it=iterable(), b=3) assert m.b == 3 assert m.it for i in m.it: assert i if i == 10: break def test_invalid_iterable(): class Model(BaseModel): it: Iterable[int] b: int with pytest.raises(ValidationError) as exc_info: Model(it=3, b=3) assert exc_info.value.errors() == [ {'loc': ('it',), 'msg': 'value is not a valid iterable', 'type': 'type_error.iterable'} ] def test_infinite_iterable_validate_first(): class Model(BaseModel): it: Iterable[int] b: int @validator('it') def infinite_first_int(cls, it, field): first_value = next(it) if field.sub_fields: sub_field = field.sub_fields[0] v, error = sub_field.validate(first_value, {}, loc='first_value') if error: raise ValidationError([error], cls) return itertools.chain([first_value], it) def int_iterable(): i = 0 while True: i += 1 yield i m = Model(it=int_iterable(), b=3) assert m.b == 3 assert m.it for i in m.it: assert i if i == 10: break def str_iterable(): while True: yield from 'foobarbaz' with pytest.raises(ValidationError) as exc_info: Model(it=str_iterable(), b=3) assert exc_info.value.errors() == [ {'loc': ('it', 'first_value'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] @pytest.mark.parametrize( 'cls,value,errors', ( ( int, (i for i in ['a', 'b', 'c']), [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ], ), ( float, (i for i in ['a', 'b', 'c']), [ {'loc': ('v', 0), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 1), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 2), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, ], ), ), ) def test_sequence_generator_fails(cls, value, errors): class Model(BaseModel): v: Sequence[cls] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'cls,value,errors', ( (int, [1, 'a', 3], [{'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (int, (1, 2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (float, range(10), [{'loc': ('v',), 'msg': 'value is not a valid sequence', 'type': 'type_error.sequence'}]), (float, ('a', 2.2, 3.3), [{'loc': ('v', 0), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), (float, (1.1, 2.2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), ( Set[int], [{1, 2}, {2, 3}, {'d'}], [{'loc': ('v', 2, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( Tuple[int, str], ((1, 'a'), ('a', 'a'), (3, 'c')), [{'loc': ('v', 1, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( List[int], [{'a': 1, 'b': 2}, [1, 2], [2, 3]], [{'loc': ('v', 0), 'msg': 'value is not a valid list', 'type': 'type_error.list'}], ), ), ) def test_sequence_fails(cls, value, errors): class Model(BaseModel): v: Sequence[cls] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_int_validation(): class Model(BaseModel): a: PositiveInt = None b: NegativeInt = None c: NonNegativeInt = None d: NonPositiveInt = None e: conint(gt=4, lt=10) = None f: conint(ge=0, le=10) = None g: conint(multiple_of=5) = None m = Model(a=5, b=-5, c=0, d=0, e=5, f=0, g=25) assert m == {'a': 5, 'b': -5, 'c': 0, 'd': 0, 'e': 5, 'f': 0, 'g': 25} with pytest.raises(ValidationError) as exc_info: Model(a=-5, b=5, c=-5, d=5, e=-5, f=11, g=42) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is greater than 0', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 0}, }, { 'loc': ('b',), 'msg': 'ensure this value is less than 0', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': 0}, }, { 'loc': ('c',), 'msg': 'ensure this value is greater than or equal to 0', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': 0}, }, { 'loc': ('d',), 'msg': 'ensure this value is less than or equal to 0', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 0}, }, { 'loc': ('e',), 'msg': 'ensure this value is greater than 4', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 4}, }, { 'loc': ('f',), 'msg': 'ensure this value is less than or equal to 10', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 10}, }, { 'loc': ('g',), 'msg': 'ensure this value is a multiple of 5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': 5}, }, ] def test_float_validation(): class Model(BaseModel): a: PositiveFloat = None b: NegativeFloat = None c: NonNegativeFloat = None d: NonPositiveFloat = None e: confloat(gt=4, lt=12.2) = None f: confloat(ge=0, le=9.9) = None g: confloat(multiple_of=0.5) = None h: confloat(allow_inf_nan=False) = None m = Model(a=5.1, b=-5.2, c=0, d=0, e=5.3, f=9.9, g=2.5, h=42) assert m.dict() == {'a': 5.1, 'b': -5.2, 'c': 0, 'd': 0, 'e': 5.3, 'f': 9.9, 'g': 2.5, 'h': 42} assert Model(a=float('inf')).a == float('inf') assert Model(b=float('-inf')).b == float('-inf') with pytest.raises(ValidationError) as exc_info: Model(a=-5.1, b=5.2, c=-5.1, d=5.1, e=-5.3, f=9.91, g=4.2, h=float('nan')) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is greater than 0', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 0}, }, { 'loc': ('b',), 'msg': 'ensure this value is less than 0', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': 0}, }, { 'loc': ('c',), 'msg': 'ensure this value is greater than or equal to 0', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': 0}, }, { 'loc': ('d',), 'msg': 'ensure this value is less than or equal to 0', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 0}, }, { 'loc': ('e',), 'msg': 'ensure this value is greater than 4', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 4}, }, { 'loc': ('f',), 'msg': 'ensure this value is less than or equal to 9.9', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 9.9}, }, { 'loc': ('g',), 'msg': 'ensure this value is a multiple of 0.5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': 0.5}, }, { 'loc': ('h',), 'msg': 'ensure this value is a finite number', 'type': 'value_error.number.not_finite_number', }, ] def test_finite_float_validation(): class Model(BaseModel): a: float = None assert Model(a=float('inf')).a == float('inf') assert Model(a=float('-inf')).a == float('-inf') assert math.isnan(Model(a=float('nan')).a) @pytest.mark.parametrize('value', [float('inf'), float('-inf'), float('nan')]) def test_finite_float_validation_error(value): class Model(BaseModel): a: FiniteFloat assert Model(a=42).a == 42 with pytest.raises(ValidationError) as exc_info: Model(a=value) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is a finite number', 'type': 'value_error.number.not_finite_number', }, ] def test_finite_float_config(): class Model(BaseModel): a: float class Config: allow_inf_nan = False assert Model(a=42).a == 42 with pytest.raises(ValidationError) as exc_info: Model(a=float('nan')) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is a finite number', 'type': 'value_error.number.not_finite_number', }, ] def test_strict_bytes(): class Model(BaseModel): v: StrictBytes assert Model(v=b'foobar').v == b'foobar' assert Model(v=bytearray('foobar', 'utf-8')).v == b'foobar' with pytest.raises(ValidationError): Model(v='foostring') with pytest.raises(ValidationError): Model(v=42) with pytest.raises(ValidationError): Model(v=0.42) def test_strict_bytes_max_length(): class Model(BaseModel): u: StrictBytes = Field(..., max_length=5) assert Model(u=b'foo').u == b'foo' with pytest.raises(ValidationError, match='byte type expected'): Model(u=123) with pytest.raises(ValidationError, match='ensure this value has at most 5 characters'): Model(u=b'1234567') def test_strict_bytes_subclass(): class MyStrictBytes(StrictBytes): pass class Model(BaseModel): v: MyStrictBytes a = Model(v=MyStrictBytes(b'foobar')) assert isinstance(a.v, MyStrictBytes) assert a.v == b'foobar' b = Model(v=MyStrictBytes(bytearray('foobar', 'utf-8'))) assert isinstance(b.v, MyStrictBytes) assert b.v == b'foobar' def test_strict_str(): class Model(BaseModel): v: StrictStr assert Model(v='foobar').v == 'foobar' with pytest.raises(ValidationError, match='str type expected'): Model(v=FruitEnum.banana) with pytest.raises(ValidationError, match='str type expected'): Model(v=123) with pytest.raises(ValidationError, match='str type expected'): Model(v=b'foobar') def test_strict_str_subclass(): class MyStrictStr(StrictStr): pass class Model(BaseModel): v: MyStrictStr m = Model(v=MyStrictStr('foobar')) assert isinstance(m.v, MyStrictStr) assert m.v == 'foobar' def test_strict_str_max_length(): class Model(BaseModel): u: StrictStr = Field(..., max_length=5) assert Model(u='foo').u == 'foo' with pytest.raises(ValidationError, match='str type expected'): Model(u=123) with pytest.raises(ValidationError, match='ensure this value has at most 5 characters'): Model(u='1234567') def test_strict_str_regex(): class Model(BaseModel): u: StrictStr = Field(..., regex=r'^[0-9]+$') assert Model(u='123').u == '123' with pytest.raises(ValidationError, match='str type expected'): Model(u=123) with pytest.raises(ValidationError) as exc_info: Model(u='abc') assert exc_info.value.errors() == [ { 'loc': ('u',), 'msg': 'string does not match regex "^[0-9]+$"', 'type': 'value_error.str.regex', 'ctx': {'pattern': '^[0-9]+$'}, } ] def test_string_regex(): class Model(BaseModel): u: str = Field(..., regex=r'^[0-9]+$') assert Model(u='123').u == '123' with pytest.raises(ValidationError) as exc_info: Model(u='abc') assert exc_info.value.errors() == [ { 'loc': ('u',), 'msg': 'string does not match regex "^[0-9]+$"', 'type': 'value_error.str.regex', 'ctx': {'pattern': '^[0-9]+$'}, } ] def test_strict_bool(): class Model(BaseModel): v: StrictBool assert Model(v=True).v is True assert Model(v=False).v is False with pytest.raises(ValidationError): Model(v=1) with pytest.raises(ValidationError): Model(v='1') with pytest.raises(ValidationError): Model(v=b'1') def test_strict_int(): class Model(BaseModel): v: StrictInt assert Model(v=123456).v == 123456 with pytest.raises(ValidationError, match='value is not a valid int'): Model(v='123456') with pytest.raises(ValidationError, match='value is not a valid int'): Model(v=3.14159) def test_strict_int_subclass(): class MyStrictInt(StrictInt): pass class Model(BaseModel): v: MyStrictInt m = Model(v=MyStrictInt(123456)) assert isinstance(m.v, MyStrictInt) assert m.v == 123456 def test_strict_float(): class Model(BaseModel): v: StrictFloat assert Model(v=3.14159).v == 3.14159 with pytest.raises(ValidationError, match='value is not a valid float'): Model(v='3.14159') with pytest.raises(ValidationError, match='value is not a valid float'): Model(v=123456) def test_strict_float_subclass(): class MyStrictFloat(StrictFloat): pass class Model(BaseModel): v: MyStrictFloat m = Model(v=MyStrictFloat(3.14159)) assert isinstance(m.v, MyStrictFloat) assert m.v == 3.14159 def test_bool_unhashable_fails(): class Model(BaseModel): v: bool with pytest.raises(ValidationError) as exc_info: Model(v={}) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'value could not be parsed to a boolean', 'type': 'type_error.bool'} ] def test_uuid_error(): class Model(BaseModel): v: UUID with pytest.raises(ValidationError) as exc_info: Model(v='ebcdab58-6eb8-46fb-a190-d07a3') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid uuid', 'type': 'type_error.uuid'}] with pytest.raises(ValidationError): Model(v=None) class UUIDModel(BaseModel): a: UUID1 b: UUID3 c: UUID4 d: UUID5 def test_uuid_validation(): a = uuid.uuid1() b = uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') c = uuid.uuid4() d = uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') m = UUIDModel(a=a, b=b, c=c, d=d) assert m.dict() == {'a': a, 'b': b, 'c': c, 'd': d} with pytest.raises(ValidationError) as exc_info: UUIDModel(a=d, b=c, c=b, d=a) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'uuid version 1 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 1}, }, { 'loc': ('b',), 'msg': 'uuid version 3 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 3}, }, { 'loc': ('c',), 'msg': 'uuid version 4 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 4}, }, { 'loc': ('d',), 'msg': 'uuid version 5 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 5}, }, ] @pytest.mark.parametrize( 'enabled, str_check, bytes_check, result_str_check, result_bytes_check', [ (True, ' 123 ', b' 456 ', '123', b'456'), (False, ' 123 ', b' 456 ', ' 123 ', b' 456 '), ], ) def test_anystr_strip_whitespace(enabled, str_check, bytes_check, result_str_check, result_bytes_check): class Model(BaseModel): str_check: str bytes_check: bytes class Config: anystr_strip_whitespace = enabled m = Model(str_check=str_check, bytes_check=bytes_check) assert m.str_check == result_str_check assert m.bytes_check == result_bytes_check @pytest.mark.parametrize( 'enabled, str_check, bytes_check, result_str_check, result_bytes_check', [(True, 'ABCDefG', b'abCD1Fg', 'ABCDEFG', b'ABCD1FG'), (False, 'ABCDefG', b'abCD1Fg', 'ABCDefG', b'abCD1Fg')], ) def test_anystr_upper(enabled, str_check, bytes_check, result_str_check, result_bytes_check): class Model(BaseModel): str_check: str bytes_check: bytes class Config: anystr_upper = enabled m = Model(str_check=str_check, bytes_check=bytes_check) assert m.str_check == result_str_check assert m.bytes_check == result_bytes_check @pytest.mark.parametrize( 'enabled, str_check, bytes_check, result_str_check, result_bytes_check', [(True, 'ABCDefG', b'abCD1Fg', 'abcdefg', b'abcd1fg'), (False, 'ABCDefG', b'abCD1Fg', 'ABCDefG', b'abCD1Fg')], ) def test_anystr_lower(enabled, str_check, bytes_check, result_str_check, result_bytes_check): class Model(BaseModel): str_check: str bytes_check: bytes class Config: anystr_lower = enabled m = Model(str_check=str_check, bytes_check=bytes_check) assert m.str_check == result_str_check assert m.bytes_check == result_bytes_check @pytest.mark.parametrize( 'type_args,value,result', [ (dict(gt=Decimal('42.24')), Decimal('43'), Decimal('43')), ( dict(gt=Decimal('42.24')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is greater than 42.24', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (dict(lt=Decimal('42.24')), Decimal('42'), Decimal('42')), ( dict(lt=Decimal('42.24')), Decimal('43'), [ { 'loc': ('foo',), 'msg': 'ensure this value is less than 42.24', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (dict(ge=Decimal('42.24')), Decimal('43'), Decimal('43')), (dict(ge=Decimal('42.24')), Decimal('42.24'), Decimal('42.24')), ( dict(ge=Decimal('42.24')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is greater than or equal to 42.24', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (dict(le=Decimal('42.24')), Decimal('42'), Decimal('42')), (dict(le=Decimal('42.24')), Decimal('42.24'), Decimal('42.24')), ( dict(le=Decimal('42.24')), Decimal('43'), [ { 'loc': ('foo',), 'msg': 'ensure this value is less than or equal to 42.24', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (dict(max_digits=2, decimal_places=2), Decimal('0.99'), Decimal('0.99')), ( dict(max_digits=2, decimal_places=1), Decimal('0.99'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 1 decimal places', 'type': 'value_error.decimal.max_places', 'ctx': {'decimal_places': 1}, } ], ), ( dict(max_digits=3, decimal_places=1), Decimal('999'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 2 digits before the decimal point', 'type': 'value_error.decimal.whole_digits', 'ctx': {'whole_digits': 2}, } ], ), (dict(max_digits=4, decimal_places=1), Decimal('999'), Decimal('999')), (dict(max_digits=20, decimal_places=2), Decimal('742403889818000000'), Decimal('742403889818000000')), (dict(max_digits=20, decimal_places=2), Decimal('7.42403889818E+17'), Decimal('7.42403889818E+17')), (dict(max_digits=6, decimal_places=2), Decimal('000000000001111.700000'), Decimal('000000000001111.700000')), ( dict(max_digits=6, decimal_places=2), Decimal('0000000000011111.700000'), [ { 'loc': ('foo',), 'type': 'value_error.decimal.whole_digits', 'msg': 'ensure that there are no more than 4 digits before the decimal point', 'ctx': {'whole_digits': 4}, } ], ), ( dict(max_digits=20, decimal_places=2), Decimal('7424742403889818000000'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 20 digits in total', 'type': 'value_error.decimal.max_digits', 'ctx': {'max_digits': 20}, } ], ), (dict(max_digits=5, decimal_places=2), Decimal('7304E-1'), Decimal('7304E-1')), ( dict(max_digits=5, decimal_places=2), Decimal('7304E-3'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 2 decimal places', 'type': 'value_error.decimal.max_places', 'ctx': {'decimal_places': 2}, } ], ), (dict(max_digits=5, decimal_places=5), Decimal('70E-5'), Decimal('70E-5')), ( dict(max_digits=4, decimal_places=4), Decimal('70E-6'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 4 digits in total', 'type': 'value_error.decimal.max_digits', 'ctx': {'max_digits': 4}, } ], ), *[ ( dict(decimal_places=2, max_digits=10), value, [{'loc': ('foo',), 'msg': 'value is not a valid decimal', 'type': 'value_error.decimal.not_finite'}], ) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '-Infinity', ) ], *[ ( dict(decimal_places=2, max_digits=10), Decimal(value), [{'loc': ('foo',), 'msg': 'value is not a valid decimal', 'type': 'value_error.decimal.not_finite'}], ) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '-Infinity', ) ], ( dict(multiple_of=Decimal('5')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is a multiple of 5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': Decimal('5')}, } ], ), ], ) def test_decimal_validation(type_args, value, result): modela = create_model('DecimalModel', foo=(condecimal(**type_args), ...)) modelb = create_model('DecimalModel', foo=(Decimal, Field(..., **type_args))) for model in (modela, modelb): if not isinstance(result, Decimal): with pytest.raises(ValidationError) as exc_info: model(foo=value) assert exc_info.value.errors() == result assert exc_info.value.json().startswith('[') else: assert model(foo=value).foo == result @pytest.mark.parametrize('value,result', (('/test/path', Path('/test/path')), (Path('/test/path'), Path('/test/path')))) def test_path_validation_success(value, result): class Model(BaseModel): foo: Path assert Model(foo=value).foo == result def test_path_validation_fails(): class Model(BaseModel): foo: Path with pytest.raises(ValidationError) as exc_info: Model(foo=123) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'value is not a valid path', 'type': 'type_error.path'}] @pytest.mark.parametrize( 'value,result', (('tests/test_types.py', Path('tests/test_types.py')), (Path('tests/test_types.py'), Path('tests/test_types.py'))), ) def test_file_path_validation_success(value, result): class Model(BaseModel): foo: FilePath assert Model(foo=value).foo == result @pytest.mark.parametrize( 'value,errors', ( ( 'nonexistentfile', [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentfile" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentfile'}, } ], ), ( Path('nonexistentfile'), [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentfile" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentfile'}, } ], ), ( 'tests', [ { 'loc': ('foo',), 'msg': 'path "tests" does not point to a file', 'type': 'value_error.path.not_a_file', 'ctx': {'path': 'tests'}, } ], ), ( Path('tests'), [ { 'loc': ('foo',), 'msg': 'path "tests" does not point to a file', 'type': 'value_error.path.not_a_file', 'ctx': {'path': 'tests'}, } ], ), ), ) def test_file_path_validation_fails(value, errors): class Model(BaseModel): foo: FilePath with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize('value,result', (('tests', Path('tests')), (Path('tests'), Path('tests')))) def test_directory_path_validation_success(value, result): class Model(BaseModel): foo: DirectoryPath assert Model(foo=value).foo == result @pytest.mark.skipif(sys.platform.startswith('win'), reason='paths look different on windows') @pytest.mark.parametrize( 'value,errors', ( ( 'nonexistentdirectory', [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentdirectory" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentdirectory'}, } ], ), ( Path('nonexistentdirectory'), [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentdirectory" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentdirectory'}, } ], ), ( 'tests/test_types.py', [ { 'loc': ('foo',), 'msg': 'path "tests/test_types.py" does not point to a directory', 'type': 'value_error.path.not_a_directory', 'ctx': {'path': 'tests/test_types.py'}, } ], ), ( Path('tests/test_types.py'), [ { 'loc': ('foo',), 'msg': 'path "tests/test_types.py" does not point to a directory', 'type': 'value_error.path.not_a_directory', 'ctx': {'path': 'tests/test_types.py'}, } ], ), ), ) def test_directory_path_validation_fails(value, errors): class Model(BaseModel): foo: DirectoryPath with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == errors base_message = r'.*ensure this value is {msg} \(type=value_error.number.not_{ty}; limit_value={value}\).*' def test_number_gt(): class Model(BaseModel): a: conint(gt=-1) = 0 assert Model(a=0).dict() == {'a': 0} message = base_message.format(msg='greater than -1', ty='gt', value=-1) with pytest.raises(ValidationError, match=message): Model(a=-1) def test_number_ge(): class Model(BaseModel): a: conint(ge=0) = 0 assert Model(a=0).dict() == {'a': 0} message = base_message.format(msg='greater than or equal to 0', ty='ge', value=0) with pytest.raises(ValidationError, match=message): Model(a=-1) def test_number_lt(): class Model(BaseModel): a: conint(lt=5) = 0 assert Model(a=4).dict() == {'a': 4} message = base_message.format(msg='less than 5', ty='lt', value=5) with pytest.raises(ValidationError, match=message): Model(a=5) def test_number_le(): class Model(BaseModel): a: conint(le=5) = 0 assert Model(a=5).dict() == {'a': 5} message = base_message.format(msg='less than or equal to 5', ty='le', value=5) with pytest.raises(ValidationError, match=message): Model(a=6) @pytest.mark.parametrize('value', ((10), (100), (20))) def test_number_multiple_of_int_valid(value): class Model(BaseModel): a: conint(multiple_of=5) assert Model(a=value).dict() == {'a': value} @pytest.mark.parametrize('value', ((1337), (23), (6), (14))) def test_number_multiple_of_int_invalid(value): class Model(BaseModel): a: conint(multiple_of=5) multiple_message = base_message.replace('limit_value', 'multiple_of') message = multiple_message.format(msg='a multiple of 5', ty='multiple', value=5) with pytest.raises(ValidationError, match=message): Model(a=value) @pytest.mark.parametrize('value', ((0.2), (0.3), (0.4), (0.5), (1))) def test_number_multiple_of_float_valid(value): class Model(BaseModel): a: confloat(multiple_of=0.1) assert Model(a=value).dict() == {'a': value} @pytest.mark.parametrize('value', ((0.07), (1.27), (1.003))) def test_number_multiple_of_float_invalid(value): class Model(BaseModel): a: confloat(multiple_of=0.1) multiple_message = base_message.replace('limit_value', 'multiple_of') message = multiple_message.format(msg='a multiple of 0.1', ty='multiple', value=0.1) with pytest.raises(ValidationError, match=message): Model(a=value) @pytest.mark.parametrize('fn', [conint, confloat, condecimal]) def test_bounds_config_exceptions(fn): with pytest.raises(ConfigError): fn(gt=0, ge=0) with pytest.raises(ConfigError): fn(lt=0, le=0) def test_new_type_success(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) c_type = NewType('c_type', List[int]) class Model(BaseModel): a: a_type b: b_type c: c_type m = Model(a=42, b=24, c=[1, 2, 3]) assert m.dict() == {'a': 42, 'b': 24, 'c': [1, 2, 3]} assert repr(Model.__fields__['c']) == "ModelField(name='c', type=List[int], required=True)" def test_new_type_fails(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) c_type = NewType('c_type', List[int]) class Model(BaseModel): a: a_type b: b_type c: c_type with pytest.raises(ValidationError) as exc_info: Model(a='foo', b='bar', c=['foo']) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('c', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_json_any_is_json(): """Mypy doesn't allow plain Json, so Json[Any] must behave just as Json did.""" assert Json[Any] is Json def test_valid_simple_json(): class JsonModel(BaseModel): json_obj: Json obj = '{"a": 1, "b": [2, 3]}' assert JsonModel(json_obj=obj).dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_valid_simple_json_any(): class JsonModel(BaseModel): json_obj: Json[Any] obj = '{"a": 1, "b": [2, 3]}' assert JsonModel(json_obj=obj).dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_invalid_simple_json(): class JsonModel(BaseModel): json_obj: Json obj = '{a: 1, b: [2, 3]}' with pytest.raises(ValidationError) as exc_info: JsonModel(json_obj=obj) assert exc_info.value.errors()[0] == {'loc': ('json_obj',), 'msg': 'Invalid JSON', 'type': 'value_error.json'} def test_invalid_simple_json_any(): class JsonModel(BaseModel): json_obj: Json[Any] obj = '{a: 1, b: [2, 3]}' with pytest.raises(ValidationError) as exc_info: JsonModel(json_obj=obj) assert exc_info.value.errors()[0] == {'loc': ('json_obj',), 'msg': 'Invalid JSON', 'type': 'value_error.json'} def test_valid_simple_json_bytes(): class JsonModel(BaseModel): json_obj: Json obj = b'{"a": 1, "b": [2, 3]}' assert JsonModel(json_obj=obj).dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_valid_detailed_json(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '[1, 2, 3]' assert JsonDetailedModel(json_obj=obj).dict() == {'json_obj': [1, 2, 3]} def test_invalid_detailed_json_value_error(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '(1, 2, 3)' with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors()[0] == {'loc': ('json_obj',), 'msg': 'Invalid JSON', 'type': 'value_error.json'} def test_valid_detailed_json_bytes(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = b'[1, 2, 3]' assert JsonDetailedModel(json_obj=obj).dict() == {'json_obj': [1, 2, 3]} def test_valid_model_json(): class Model(BaseModel): a: int b: List[int] class JsonDetailedModel(BaseModel): json_obj: Json[Model] obj = '{"a": 1, "b": [2, 3]}' m = JsonDetailedModel(json_obj=obj) assert isinstance(m.json_obj, Model) assert m.json_obj.a == 1 assert m.dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_invalid_model_json(): class Model(BaseModel): a: int b: List[int] class JsonDetailedModel(BaseModel): json_obj: Json[Model] obj = '{"a": 1, "c": [2, 3]}' with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors() == [ {'loc': ('json_obj', 'b'), 'msg': 'field required', 'type': 'value_error.missing'} ] def test_invalid_detailed_json_type_error(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '["a", "b", "c"]' with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors() == [ {'loc': ('json_obj', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('json_obj', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('json_obj', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_json_not_str(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = 12 with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors()[0] == { 'loc': ('json_obj',), 'msg': 'JSON object must be str, bytes or bytearray', 'type': 'type_error.json', } def test_json_pre_validator(): call_count = 0 class JsonModel(BaseModel): json_obj: Json @validator('json_obj', pre=True) def check(cls, v): assert v == '"foobar"' nonlocal call_count call_count += 1 return v assert JsonModel(json_obj='"foobar"').dict() == {'json_obj': 'foobar'} assert call_count == 1 def test_json_optional_simple(): class JsonOptionalModel(BaseModel): json_obj: Optional[Json] assert JsonOptionalModel(json_obj=None).dict() == {'json_obj': None} assert JsonOptionalModel(json_obj='["x", "y", "z"]').dict() == {'json_obj': ['x', 'y', 'z']} def test_json_optional_complex(): class JsonOptionalModel(BaseModel): json_obj: Optional[Json[List[int]]] JsonOptionalModel(json_obj=None) good = JsonOptionalModel(json_obj='[1, 2, 3]') assert good.json_obj == [1, 2, 3] with pytest.raises(ValidationError) as exc_info: JsonOptionalModel(json_obj='["i should fail"]') assert exc_info.value.errors() == [ {'loc': ('json_obj', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_json_explicitly_required(): class JsonRequired(BaseModel): json_obj: Json = ... assert JsonRequired(json_obj=None).dict() == {'json_obj': None} assert JsonRequired(json_obj='["x", "y", "z"]').dict() == {'json_obj': ['x', 'y', 'z']} with pytest.raises(ValidationError) as exc_info: JsonRequired() assert exc_info.value.errors() == [{'loc': ('json_obj',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_json_no_default(): class JsonRequired(BaseModel): json_obj: Json assert JsonRequired(json_obj=None).dict() == {'json_obj': None} assert JsonRequired(json_obj='["x", "y", "z"]').dict() == {'json_obj': ['x', 'y', 'z']} assert JsonRequired().dict() == {'json_obj': None} @pytest.mark.parametrize('pattern_type', [re.Pattern, Pattern]) def test_pattern(pattern_type): class Foobar(BaseModel): pattern: pattern_type f = Foobar(pattern=r'^whatev.r\d$') assert f.pattern.__class__.__name__ == 'Pattern' # check it's really a proper pattern assert f.pattern.match('whatever1') assert not f.pattern.match(' whatever1') # Check that pre-compiled patterns are accepted unchanged p = re.compile(r'^whatev.r\d$') f2 = Foobar(pattern=p) assert f2.pattern is p assert Foobar.schema() == { 'type': 'object', 'title': 'Foobar', 'properties': {'pattern': {'type': 'string', 'format': 'regex', 'title': 'Pattern'}}, 'required': ['pattern'], } @pytest.mark.parametrize('pattern_type', [re.Pattern, Pattern]) def test_pattern_error(pattern_type): class Foobar(BaseModel): pattern: pattern_type with pytest.raises(ValidationError) as exc_info: Foobar(pattern='[xx') assert exc_info.value.errors() == [ {'loc': ('pattern',), 'msg': 'Invalid regular expression', 'type': 'value_error.regex_pattern'} ] def test_secretfield(): class Foobar(SecretField): ... message = "Can't instantiate abstract class Foobar with abstract methods? get_secret_value" with pytest.raises(TypeError, match=message): Foobar() def test_secretstr(): class Foobar(BaseModel): password: SecretStr empty_password: SecretStr # Initialize the model. f = Foobar(password='1234', empty_password='') # Assert correct types. assert f.password.__class__.__name__ == 'SecretStr' assert f.empty_password.__class__.__name__ == 'SecretStr' # Assert str and repr are correct. assert str(f.password) == '**********' assert str(f.empty_password) == '' assert repr(f.password) == "SecretStr('**********')" assert repr(f.empty_password) == "SecretStr('')" # Assert retrieval of secret value is correct assert f.password.get_secret_value() == '1234' assert f.empty_password.get_secret_value() == '' with pytest.warns(DeprecationWarning, match=r'`secret_str.display\(\)` is deprecated'): assert f.password.display() == '**********' with pytest.warns(DeprecationWarning, match=r'`secret_str.display\(\)` is deprecated'): assert f.empty_password.display() == '' # Assert that SecretStr is equal to SecretStr if the secret is the same. assert f == f.copy() assert f != f.copy(update=dict(password='4321')) def test_secretstr_is_secret_field(): assert issubclass(SecretStr, SecretField) def test_secretstr_equality(): assert SecretStr('abc') == SecretStr('abc') assert SecretStr('123') != SecretStr('321') assert SecretStr('123') != '123' assert SecretStr('123') is not SecretStr('123') def test_secretstr_idempotent(): class Foobar(BaseModel): password: SecretStr # Should not raise an exception m = Foobar(password=SecretStr('1234')) assert m.password.get_secret_value() == '1234' def test_secretstr_is_hashable(): assert type(hash(SecretStr('secret'))) is int def test_secretstr_error(): class Foobar(BaseModel): password: SecretStr with pytest.raises(ValidationError) as exc_info: Foobar(password=[6, 23, 'abc']) assert exc_info.value.errors() == [{'loc': ('password',), 'msg': 'str type expected', 'type': 'type_error.str'}] def test_secretstr_min_max_length(): class Foobar(BaseModel): password: SecretStr = Field(min_length=6, max_length=10) with pytest.raises(ValidationError) as exc_info: Foobar(password='') assert exc_info.value.errors() == [ { 'loc': ('password',), 'msg': 'ensure this value has at least 6 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 6}, } ] with pytest.raises(ValidationError) as exc_info: Foobar(password='1' * 20) assert exc_info.value.errors() == [ { 'loc': ('password',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] value = '1' * 8 assert Foobar(password=value).password.get_secret_value() == value def test_secretbytes(): class Foobar(BaseModel): password: SecretBytes empty_password: SecretBytes # Initialize the model. f = Foobar(password=b'wearebytes', empty_password=b'') # Assert correct types. assert f.password.__class__.__name__ == 'SecretBytes' assert f.empty_password.__class__.__name__ == 'SecretBytes' # Assert str and repr are correct. assert str(f.password) == '**********' assert str(f.empty_password) == '' assert repr(f.password) == "SecretBytes(b'**********')" assert repr(f.empty_password) == "SecretBytes(b'')" # Assert retrieval of secret value is correct assert f.password.get_secret_value() == b'wearebytes' assert f.empty_password.get_secret_value() == b'' with pytest.warns(DeprecationWarning, match=r'`secret_bytes.display\(\)` is deprecated'): assert f.password.display() == '**********' with pytest.warns(DeprecationWarning, match=r'`secret_bytes.display\(\)` is deprecated'): assert f.empty_password.display() == '' # Assert that SecretBytes is equal to SecretBytes if the secret is the same. assert f == f.copy() assert f != f.copy(update=dict(password=b'4321')) def test_secretbytes_is_secret_field(): assert issubclass(SecretBytes, SecretField) def test_secretbytes_equality(): assert SecretBytes(b'abc') == SecretBytes(b'abc') assert SecretBytes(b'123') != SecretBytes(b'321') assert SecretBytes(b'123') != b'123' assert SecretBytes(b'123') is not SecretBytes(b'123') def test_secretbytes_idempotent(): class Foobar(BaseModel): password: SecretBytes # Should not raise an exception. _ = Foobar(password=SecretBytes(b'1234')) def test_secretbytes_is_hashable(): assert type(hash(SecretBytes(b'secret'))) is int def test_secretbytes_error(): class Foobar(BaseModel): password: SecretBytes with pytest.raises(ValidationError) as exc_info: Foobar(password=[6, 23, 'abc']) assert exc_info.value.errors() == [{'loc': ('password',), 'msg': 'byte type expected', 'type': 'type_error.bytes'}] def test_secretbytes_min_max_length(): class Foobar(BaseModel): password: SecretBytes = Field(min_length=6, max_length=10) with pytest.raises(ValidationError) as exc_info: Foobar(password=b'') assert exc_info.value.errors() == [ { 'loc': ('password',), 'msg': 'ensure this value has at least 6 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 6}, } ] with pytest.raises(ValidationError) as exc_info: Foobar(password=b'1' * 20) assert exc_info.value.errors() == [ { 'loc': ('password',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] value = b'1' * 8 assert Foobar(password=value).password.get_secret_value() == value @pytest.mark.parametrize('secret_cls', [SecretStr, SecretBytes]) @pytest.mark.parametrize( 'field_kw,schema_kw', [ [{}, {}], [{'min_length': 6}, {'minLength': 6}], [{'max_length': 10}, {'maxLength': 10}], [{'min_length': 6, 'max_length': 10}, {'minLength': 6, 'maxLength': 10}], ], ids=['no-constrains', 'min-constraint', 'max-constraint', 'min-max-constraints'], ) def test_secrets_schema(secret_cls, field_kw, schema_kw): class Foobar(BaseModel): password: secret_cls = Field(**field_kw) assert Foobar.schema() == { 'title': 'Foobar', 'type': 'object', 'properties': { 'password': {'title': 'Password', 'type': 'string', 'writeOnly': True, 'format': 'password', **schema_kw} }, 'required': ['password'], } def test_generic_without_params(): class Model(BaseModel): generic_list: List generic_dict: Dict generic_tuple: Tuple m = Model(generic_list=[0, 'a'], generic_dict={0: 'a', 'a': 0}, generic_tuple=(1, 'q')) assert m.dict() == {'generic_list': [0, 'a'], 'generic_dict': {0: 'a', 'a': 0}, 'generic_tuple': (1, 'q')} def test_generic_without_params_error(): class Model(BaseModel): generic_list: List generic_dict: Dict generic_tuple: Tuple with pytest.raises(ValidationError) as exc_info: Model(generic_list=0, generic_dict=0, generic_tuple=0) assert exc_info.value.errors() == [ {'loc': ('generic_list',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}, {'loc': ('generic_dict',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}, {'loc': ('generic_tuple',), 'msg': 'value is not a valid tuple', 'type': 'type_error.tuple'}, ] def test_literal_single(): class Model(BaseModel): a: Literal['a'] Model(a='a') with pytest.raises(ValidationError) as exc_info: Model(a='b') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': "unexpected value; permitted: 'a'", 'type': 'value_error.const', 'ctx': {'given': 'b', 'permitted': ('a',)}, } ] def test_literal_multiple(): class Model(BaseModel): a_or_b: Literal['a', 'b'] Model(a_or_b='a') Model(a_or_b='b') with pytest.raises(ValidationError) as exc_info: Model(a_or_b='c') assert exc_info.value.errors() == [ { 'loc': ('a_or_b',), 'msg': "unexpected value; permitted: 'a', 'b'", 'type': 'value_error.const', 'ctx': {'given': 'c', 'permitted': ('a', 'b')}, } ] def test_unsupported_field_type(): with pytest.raises(TypeError, match=r'MutableSet(.*)not supported'): class UnsupportedModel(BaseModel): unsupported: MutableSet[int] def test_frozenset_field(): class FrozenSetModel(BaseModel): set: FrozenSet[int] test_set = frozenset({1, 2, 3}) object_under_test = FrozenSetModel(set=test_set) assert object_under_test.set == test_set @pytest.mark.parametrize( 'value,result', [ ([1, 2, 3], frozenset([1, 2, 3])), ({1, 2, 3}, frozenset([1, 2, 3])), ((1, 2, 3), frozenset([1, 2, 3])), (deque([1, 2, 3]), frozenset([1, 2, 3])), ], ) def test_frozenset_field_conversion(value, result): class FrozenSetModel(BaseModel): set: FrozenSet[int] object_under_test = FrozenSetModel(set=value) assert object_under_test.set == result def test_frozenset_field_not_convertible(): class FrozenSetModel(BaseModel): set: FrozenSet[int] with pytest.raises(ValidationError, match=r'frozenset'): FrozenSetModel(set=42) @pytest.mark.parametrize( 'input_value,output,human_bin,human_dec', ( ('1', 1, '1.0B', '1.0B'), ('1.0', 1, '1.0B', '1.0B'), ('1b', 1, '1.0B', '1.0B'), ('1.5 KB', int(1.5e3), '1.5KiB', '1.5KB'), ('1.5 K', int(1.5e3), '1.5KiB', '1.5KB'), ('1.5 MB', int(1.5e6), '1.4MiB', '1.5MB'), ('1.5 M', int(1.5e6), '1.4MiB', '1.5MB'), ('5.1kib', 5222, '5.1KiB', '5.2KB'), ('6.2EiB', 7148113328562451456, '6.2EiB', '7.1EB'), ), ) def test_bytesize_conversions(input_value, output, human_bin, human_dec): class Model(BaseModel): size: ByteSize m = Model(size=input_value) assert m.size == output assert m.size.human_readable() == human_bin assert m.size.human_readable(decimal=True) == human_dec def test_bytesize_to(): class Model(BaseModel): size: ByteSize m = Model(size='1GiB') assert m.size.to('MiB') == pytest.approx(1024) assert m.size.to('MB') == pytest.approx(1073.741824) assert m.size.to('TiB') == pytest.approx(0.0009765625) def test_bytesize_raises(): class Model(BaseModel): size: ByteSize with pytest.raises(ValidationError, match='parse value'): Model(size='d1MB') with pytest.raises(ValidationError, match='byte unit'): Model(size='1LiB') # 1Gi is not a valid unit unlike 1G with pytest.raises(ValidationError, match='byte unit'): Model(size='1Gi') m = Model(size='1MB') with pytest.raises(errors.InvalidByteSizeUnit, match='byte unit'): m.size.to('bad_unit') def test_deque_success(): class Model(BaseModel): v: deque assert Model(v=[1, 2, 3]).v == deque([1, 2, 3]) @pytest.mark.parametrize( 'cls,value,result', ( (int, [1, 2, 3], deque([1, 2, 3])), (int, (1, 2, 3), deque((1, 2, 3))), (int, deque((1, 2, 3)), deque((1, 2, 3))), (float, {1.0, 2.0, 3.0}, deque({1.0, 2.0, 3.0})), (Set[int], [{1, 2}, {3, 4}, {5, 6}], deque([{1, 2}, {3, 4}, {5, 6}])), (Tuple[int, str], ((1, 'a'), (2, 'b'), (3, 'c')), deque(((1, 'a'), (2, 'b'), (3, 'c')))), (str, [w for w in 'one two three'.split()], deque(['one', 'two', 'three'])), (int, frozenset([1, 2, 3]), deque([1, 2, 3])), ), ) def test_deque_generic_success(cls, value, result): class Model(BaseModel): v: Deque[cls] assert Model(v=value).v == result def test_deque_maxlen(): class DequeTypedModel(BaseModel): field: Deque[int] = deque(maxlen=10) assert DequeTypedModel(field=deque(maxlen=25)).field.maxlen == 25 assert DequeTypedModel().field.maxlen == 10 class DequeUnTypedModel(BaseModel): field: deque = deque(maxlen=10) assert DequeUnTypedModel(field=deque(maxlen=25)).field.maxlen == 25 assert DequeTypedModel().field.maxlen == 10 class DeuqueNoDefaultModel(BaseModel): field: deque assert DeuqueNoDefaultModel(field=deque(maxlen=25)).field.maxlen == 25 @pytest.mark.parametrize( 'cls,value,errors', ( (int, [1, 'a', 3], [{'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (int, (1, 2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (float, range(10), [{'loc': ('v',), 'msg': 'value is not a valid sequence', 'type': 'type_error.sequence'}]), (float, ('a', 2.2, 3.3), [{'loc': ('v', 0), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), (float, (1.1, 2.2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), ( Set[int], [{1, 2}, {2, 3}, {'d'}], [{'loc': ('v', 2, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( Tuple[int, str], ((1, 'a'), ('a', 'a'), (3, 'c')), [{'loc': ('v', 1, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( List[int], [{'a': 1, 'b': 2}, [1, 2], [2, 3]], [{'loc': ('v', 0), 'msg': 'value is not a valid list', 'type': 'type_error.list'}], ), ), ) def test_deque_fails(cls, value, errors): class Model(BaseModel): v: Deque[cls] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_deque_model(): class Model2(BaseModel): x: int class Model(BaseModel): v: Deque[Model2] seq = [Model2(x=1), Model2(x=2)] assert Model(v=seq).v == deque(seq) def test_deque_json(): class Model(BaseModel): v: Deque[int] assert Model(v=deque((1, 2, 3))).json() == '{"v": [1, 2, 3]}' none_value_type_cases = None, type(None), NoneType, Literal[None] @pytest.mark.parametrize('value_type', none_value_type_cases) def test_none(value_type): class Model(BaseModel): my_none: value_type my_none_list: List[value_type] my_none_dict: Dict[str, value_type] my_json_none: Json[value_type] Model( my_none=None, my_none_list=[None] * 3, my_none_dict={'a': None, 'b': None}, my_json_none='null', ) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'my_none': {'title': 'My None', 'type': 'null'}, 'my_none_list': { 'title': 'My None List', 'type': 'array', 'items': {'type': 'null'}, }, 'my_none_dict': { 'title': 'My None Dict', 'type': 'object', 'additionalProperties': {'type': 'null'}, }, 'my_json_none': {'title': 'My Json None', 'type': 'null'}, }, 'required': ['my_none', 'my_none_list', 'my_none_dict', 'my_json_none'], } with pytest.raises(ValidationError) as exc_info: Model( my_none='qwe', my_none_list=[1, None, 'qwe'], my_none_dict={'a': 1, 'b': None}, my_json_none='"a"', ) assert exc_info.value.errors() == [ {'loc': ('my_none',), 'msg': 'value is not None', 'type': 'type_error.not_none'}, {'loc': ('my_none_list', 0), 'msg': 'value is not None', 'type': 'type_error.not_none'}, {'loc': ('my_none_list', 2), 'msg': 'value is not None', 'type': 'type_error.not_none'}, {'loc': ('my_none_dict', 'a'), 'msg': 'value is not None', 'type': 'type_error.not_none'}, {'loc': ('my_json_none',), 'msg': 'value is not None', 'type': 'type_error.not_none'}, ] def test_default_union_types(): class DefaultModel(BaseModel): v: Union[int, bool, str] assert DefaultModel(v=True).dict() == {'v': 1} assert DefaultModel(v=1).dict() == {'v': 1} assert DefaultModel(v='1').dict() == {'v': 1} assert DefaultModel.schema() == { 'title': 'DefaultModel', 'type': 'object', 'properties': {'v': {'title': 'V', 'anyOf': [{'type': t} for t in ('integer', 'boolean', 'string')]}}, 'required': ['v'], } def test_smart_union_types(): class SmartModel(BaseModel): v: Union[int, bool, str] class Config: smart_union = True assert SmartModel(v=1).dict() == {'v': 1} assert SmartModel(v=True).dict() == {'v': True} assert SmartModel(v='1').dict() == {'v': '1'} assert SmartModel.schema() == { 'title': 'SmartModel', 'type': 'object', 'properties': {'v': {'title': 'V', 'anyOf': [{'type': t} for t in ('integer', 'boolean', 'string')]}}, 'required': ['v'], } def test_default_union_class(): class A(BaseModel): x: str class B(BaseModel): x: str class Model(BaseModel): y: Union[A, B] assert isinstance(Model(y=A(x='a')).y, A) # `B` instance is coerced to `A` assert isinstance(Model(y=B(x='b')).y, A) def test_smart_union_class(): class A(BaseModel): x: str class B(BaseModel): x: str class Model(BaseModel): y: Union[A, B] class Config: smart_union = True assert isinstance(Model(y=A(x='a')).y, A) assert isinstance(Model(y=B(x='b')).y, B) def test_default_union_subclass(): class MyStr(str): ... class Model(BaseModel): x: Union[int, str] assert Model(x=MyStr('1')).x == 1 def test_smart_union_subclass(): class MyStr(str): ... class Model(BaseModel): x: Union[int, str] class Config: smart_union = True assert Model(x=MyStr('1')).x == '1' def test_default_union_compound_types(): class Model(BaseModel): values: Union[Dict[str, str], List[str]] assert Model(values={'L': '1'}).dict() == {'values': {'L': '1'}} assert Model(values=['L1']).dict() == {'values': {'L': '1'}} # dict(['L1']) == {'L': '1'} def test_smart_union_compound_types(): class Model(BaseModel): values: Union[Dict[str, str], List[str], Dict[str, List[str]]] class Config: smart_union = True assert Model(values={'L': '1'}).dict() == {'values': {'L': '1'}} assert Model(values=['L1']).dict() == {'values': ['L1']} assert Model(values=('L1',)).dict() == {'values': {'L': '1'}} # expected coercion into first dict if not a list assert Model(values={'x': ['pika']}) == {'values': {'x': ['pika']}} assert Model(values={'x': ('pika',)}).dict() == {'values': {'x': ['pika']}} with pytest.raises(ValidationError) as e: Model(values={'x': {'a': 'b'}}) assert e.value.errors() == [ {'loc': ('values', 'x'), 'msg': 'str type expected', 'type': 'type_error.str'}, {'loc': ('values',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}, {'loc': ('values', 'x'), 'msg': 'value is not a valid list', 'type': 'type_error.list'}, ] def test_smart_union_compouned_types_edge_case(): """For now, `smart_union` does not support well compound types""" class Model(BaseModel, smart_union=True): x: Union[List[str], List[int]] # should consider [1, 2] valid and not coerce once `smart_union` is improved assert Model(x=[1, 2]).x == ['1', '2'] # still coerce if needed assert Model(x=[1, '2']).x == ['1', '2'] def test_smart_union_typeddict(): class Dict1(TypedDict): foo: str class Dict2(TypedDict): bar: str class M(BaseModel): d: Union[Dict2, Dict1] class Config: smart_union = True assert M(d=dict(foo='baz')).d == {'foo': 'baz'} @pytest.mark.parametrize( 'value,result', ( ('1996-01-22', date(1996, 1, 22)), (date(1996, 1, 22), date(1996, 1, 22)), ), ) def test_past_date_validation_success(value, result): class Model(BaseModel): foo: PastDate assert Model(foo=value).foo == result @pytest.mark.parametrize( 'value', ( date.today(), date.today() + timedelta(1), datetime.today(), datetime.today() + timedelta(1), '2064-06-01', ), ) def test_past_date_validation_fails(value): class Model(BaseModel): foo: PastDate with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == [ { 'loc': ('foo',), 'msg': 'date is not in the past', 'type': 'value_error.date.not_in_the_past', } ] @pytest.mark.parametrize( 'value,result', ( (date.today() + timedelta(1), date.today() + timedelta(1)), (datetime.today() + timedelta(1), date.today() + timedelta(1)), ('2064-06-01', date(2064, 6, 1)), ), ) def test_future_date_validation_success(value, result): class Model(BaseModel): foo: FutureDate assert Model(foo=value).foo == result @pytest.mark.parametrize( 'value', ( date.today(), date.today() - timedelta(1), datetime.today(), datetime.today() - timedelta(1), '1996-01-22', ), ) def test_future_date_validation_fails(value): class Model(BaseModel): foo: FutureDate with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == [ { 'loc': ('foo',), 'msg': 'date is not in the future', 'type': 'value_error.date.not_in_the_future', } ] def test_typing_extension_literal_field(): from typing_extensions import Literal class Model(BaseModel): foo: Literal['foo'] assert Model(foo='foo').foo == 'foo' @pytest.mark.skipif(sys.version_info < (3, 8), reason='`typing.Literal` is available for python 3.8 and above.') def test_typing_literal_field(): from typing import Literal class Model(BaseModel): foo: Literal['foo'] assert Model(foo='foo').foo == 'foo' pydantic-1.10.14/tests/test_types_payment_card_number.py000066400000000000000000000114721455251250200235140ustar00rootroot00000000000000from collections import namedtuple from typing import Any import pytest from pydantic import BaseModel, ValidationError from pydantic.errors import InvalidLengthForBrand, LuhnValidationError, NotDigitError from pydantic.types import PaymentCardBrand, PaymentCardNumber VALID_AMEX = '370000000000002' VALID_MC = '5100000000000003' VALID_VISA_13 = '4050000000001' VALID_VISA_16 = '4050000000000001' VALID_VISA_19 = '4050000000000000001' VALID_OTHER = '2000000000000000008' LUHN_INVALID = '4000000000000000' LEN_INVALID = '40000000000000006' # Mock PaymentCardNumber PCN = namedtuple('PaymentCardNumber', ['card_number', 'brand']) PCN.__len__ = lambda v: len(v.card_number) class PaymentCard(BaseModel): card_number: PaymentCardNumber def test_validate_digits(): digits = '12345' assert PaymentCardNumber.validate_digits(digits) == digits with pytest.raises(NotDigitError): PaymentCardNumber.validate_digits('hello') @pytest.mark.parametrize( 'card_number, valid', [ ('0', True), ('00', True), ('18', True), ('0000000000000000', True), ('4242424242424240', False), ('4242424242424241', False), ('4242424242424242', True), ('4242424242424243', False), ('4242424242424244', False), ('4242424242424245', False), ('4242424242424246', False), ('4242424242424247', False), ('4242424242424248', False), ('4242424242424249', False), ('42424242424242426', True), ('424242424242424267', True), ('4242424242424242675', True), ('5164581347216566', True), ('4345351087414150', True), ('343728738009846', True), ('5164581347216567', False), ('4345351087414151', False), ('343728738009847', False), ('000000018', True), ('99999999999999999999', True), ('99999999999999999999999999999999999999999999999999999999999999999997', True), ], ) def test_validate_luhn_check_digit(card_number: str, valid: bool): if valid: assert PaymentCardNumber.validate_luhn_check_digit(card_number) == card_number else: with pytest.raises(LuhnValidationError): PaymentCardNumber.validate_luhn_check_digit(card_number) @pytest.mark.parametrize( 'card_number, brand, valid', [ (VALID_VISA_13, PaymentCardBrand.visa, True), (VALID_VISA_16, PaymentCardBrand.visa, True), (VALID_VISA_19, PaymentCardBrand.visa, True), (VALID_MC, PaymentCardBrand.mastercard, True), (VALID_AMEX, PaymentCardBrand.amex, True), (VALID_OTHER, PaymentCardBrand.other, True), (LEN_INVALID, PaymentCardBrand.visa, False), (VALID_AMEX, PaymentCardBrand.mastercard, False), ], ) def test_length_for_brand(card_number: str, brand: PaymentCardBrand, valid: bool): pcn = PCN(card_number, brand) if valid: assert PaymentCardNumber.validate_length_for_brand(pcn) == pcn else: with pytest.raises(InvalidLengthForBrand): PaymentCardNumber.validate_length_for_brand(pcn) @pytest.mark.parametrize( 'card_number, brand', [ (VALID_AMEX, PaymentCardBrand.amex), (VALID_MC, PaymentCardBrand.mastercard), (VALID_VISA_16, PaymentCardBrand.visa), (VALID_OTHER, PaymentCardBrand.other), ], ) def test_get_brand(card_number: str, brand: PaymentCardBrand): assert PaymentCardNumber._get_brand(card_number) == brand def test_valid(): card = PaymentCard(card_number=VALID_VISA_16) assert str(card.card_number) == VALID_VISA_16 assert card.card_number.masked == '405000******0001' @pytest.mark.parametrize( 'card_number, error_message', [ (None, 'type_error.none.not_allowed'), ('1' * 11, 'value_error.any_str.min_length'), ('1' * 20, 'value_error.any_str.max_length'), ('h' * 16, 'value_error.payment_card_number.digits'), (LUHN_INVALID, 'value_error.payment_card_number.luhn_check'), (LEN_INVALID, 'value_error.payment_card_number.invalid_length_for_brand'), ], ) def test_error_types(card_number: Any, error_message: str): with pytest.raises(ValidationError, match=error_message) as exc_info: PaymentCard(card_number=card_number) assert exc_info.value.json().startswith('[') def test_payment_card_brand(): b = PaymentCardBrand.visa assert str(b) == 'Visa' assert b is PaymentCardBrand.visa assert b == PaymentCardBrand.visa assert b in {PaymentCardBrand.visa, PaymentCardBrand.mastercard} b = 'Visa' assert b is not PaymentCardBrand.visa assert b == PaymentCardBrand.visa assert b in {PaymentCardBrand.visa, PaymentCardBrand.mastercard} b = PaymentCardBrand.amex assert b is not PaymentCardBrand.visa assert b != PaymentCardBrand.visa assert b not in {PaymentCardBrand.visa, PaymentCardBrand.mastercard} pydantic-1.10.14/tests/test_typing.py000066400000000000000000000110701455251250200175560ustar00rootroot00000000000000import sys from collections import namedtuple from typing import Any, Callable as TypingCallable, Dict, ForwardRef, List, NamedTuple, NewType, Union # noqa: F401 import pytest from typing_extensions import Annotated # noqa: F401 from pydantic import Field # noqa: F401 from pydantic.typing import Literal, convert_generics, is_literal_type, is_namedtuple, is_none_type, is_typeddict try: from typing import TypedDict as typing_TypedDict except ImportError: typing_TypedDict = None try: from typing_extensions import TypedDict as typing_extensions_TypedDict except ImportError: typing_extensions_TypedDict = None try: from mypy_extensions import TypedDict as mypy_extensions_TypedDict except ImportError: mypy_extensions_TypedDict = None ALL_TYPEDDICT_KINDS = (typing_TypedDict, typing_extensions_TypedDict, mypy_extensions_TypedDict) def test_is_namedtuple(): class Employee(NamedTuple): name: str id: int = 3 assert is_namedtuple(namedtuple('Point', 'x y')) is True assert is_namedtuple(Employee) is True assert is_namedtuple(NamedTuple('Employee', [('name', str), ('id', int)])) is True class Other(tuple): name: str id: int assert is_namedtuple(Other) is False @pytest.mark.parametrize('TypedDict', (t for t in ALL_TYPEDDICT_KINDS if t is not None)) def test_is_typeddict_typing(TypedDict): class Employee(TypedDict): name: str id: int assert is_typeddict(Employee) is True assert is_typeddict(TypedDict('Employee', {'name': str, 'id': int})) is True class Other(dict): name: str id: int assert is_typeddict(Other) is False def test_is_none_type(): assert is_none_type(Literal[None]) is True assert is_none_type(None) is True assert is_none_type(type(None)) is True assert is_none_type(6) is False assert is_none_type({}) is False # WARNING: It's important to test `typing.Callable` not # `collections.abc.Callable` (even with python >= 3.9) as they behave # differently assert is_none_type(TypingCallable) is False class Hero: pass class Team: pass @pytest.mark.skipif(sys.version_info < (3, 9), reason='PEP585 generics only supported for python 3.9 and above.') @pytest.mark.parametrize( ['type_', 'expectations'], [ ('int', 'int'), ('Union[list["Hero"], int]', 'Union[list[ForwardRef("Hero")], int]'), ('list["Hero"]', 'list[ForwardRef("Hero")]'), ('dict["Hero", "Team"]', 'dict[ForwardRef("Hero"), ForwardRef("Team")]'), ('dict["Hero", list["Team"]]', 'dict[ForwardRef("Hero"), list[ForwardRef("Team")]]'), ('dict["Hero", List["Team"]]', 'dict[ForwardRef("Hero"), List[ForwardRef("Team")]]'), ('Dict["Hero", list["Team"]]', 'Dict[ForwardRef("Hero"), list[ForwardRef("Team")]]'), ( 'Annotated[list["Hero"], Field(min_length=2)]', 'Annotated[list[ForwardRef("Hero")], Field(min_length=2)]', ), ], ) def test_convert_generics(type_, expectations): assert str(convert_generics(eval(type_))) == str(eval(expectations)) @pytest.mark.skipif(sys.version_info < (3, 10), reason='NewType class was added in python 3.10.') def test_convert_generics_unsettable_args(): class User(NewType): __origin__ = type(list[str]) __args__ = (list['Hero'],) def __init__(self, name: str, tp: type) -> None: super().__init__(name, tp) def __setattr__(self, __name: str, __value: Any) -> None: if __name == '__args__': raise AttributeError # will be thrown during the generics conversion return super().__setattr__(__name, __value) # tests that convert_generics will not throw an exception even if __args__ isn't settable assert convert_generics(User('MyUser', str)).__args__ == (list['Hero'],) @pytest.mark.skipif(sys.version_info < (3, 10), reason='PEP604 unions only supported for python 3.10 and above.') def test_convert_generics_pep604(): assert ( convert_generics(dict['Hero', list['Team']] | int) == dict[ForwardRef('Hero'), list[ForwardRef('Team')]] | int ) def test_is_literal_with_typing_extension_literal(): from typing_extensions import Literal assert is_literal_type(Literal) is False assert is_literal_type(Literal['foo']) is True @pytest.mark.skipif(sys.version_info < (3, 8), reason='`typing.Literal` is available for python 3.8 and above.') def test_is_literal_with_typing_literal(): from typing import Literal assert is_literal_type(Literal) is False assert is_literal_type(Literal['foo']) is True pydantic-1.10.14/tests/test_utils.py000066400000000000000000000377771455251250200174320ustar00rootroot00000000000000import collections.abc import os import pickle import re import string import sys from copy import copy, deepcopy from typing import Callable, Dict, ForwardRef, List, NewType, Tuple, TypeVar, Union import pytest from typing_extensions import Annotated, Literal from pydantic import BaseModel, ConstrainedList, conlist from pydantic.color import Color from pydantic.dataclasses import dataclass from pydantic.fields import Undefined from pydantic.typing import ( all_literal_values, display_as_type, get_args, get_origin, is_new_type, new_type_supertype, resolve_annotations, ) from pydantic.utils import ( BUILTIN_COLLECTIONS, ClassAttribute, ValueItems, all_identical, deep_update, get_model, import_string, lenient_issubclass, path_type, smart_deepcopy, to_lower_camel, truncate, unique_list, ) from pydantic.version import version_info try: import devtools except ImportError: devtools = None def test_import_module(): assert import_string('os.path') == os.path def test_import_module_invalid(): with pytest.raises(ImportError) as exc_info: import_string('xx') assert exc_info.value.args[0] == '"xx" doesn\'t look like a module path' def test_import_no_attr(): with pytest.raises(ImportError) as exc_info: import_string('os.foobar') assert exc_info.value.args[0] == 'Module "os" does not define a "foobar" attribute' @pytest.mark.parametrize( 'value,expected', ((str, 'str'), ('string', 'str'), (Union[str, int], 'Union[str, int]'), (list, 'list')) ) def test_display_as_type(value, expected): assert display_as_type(value) == expected @pytest.mark.skipif(sys.version_info < (3, 9), reason='generic aliases are not available in python < 3.9') def test_display_as_type_generic_alias(): assert display_as_type(list[[Union[str, int]]]) == 'list[[Union[str, int]]]' def test_lenient_issubclass(): class A(str): pass assert lenient_issubclass(A, str) is True @pytest.mark.skipif(sys.version_info < (3, 9), reason='generic aliases are not available in python < 3.9') def test_lenient_issubclass_with_generic_aliases(): from collections.abc import Mapping # should not raise an error here: assert lenient_issubclass(list[str], Mapping) is False def test_lenient_issubclass_is_lenient(): assert lenient_issubclass('a', 'a') is False @pytest.mark.parametrize( 'input_value,output', [ (object, ""), (string.ascii_lowercase, "'abcdefghijklmnopq…'"), (list(range(20)), '[0, 1, 2, 3, 4, 5, …'), ], ) def test_truncate(input_value, output): with pytest.warns(DeprecationWarning, match='`truncate` is no-longer used by pydantic and is deprecated'): assert truncate(input_value, max_len=20) == output @pytest.mark.parametrize( 'input_value,output', [ ([], []), ([1, 1, 1, 2, 1, 2, 3, 2, 3, 1, 4, 2, 3, 1], [1, 2, 3, 4]), (['a', 'a', 'b', 'a', 'b', 'c', 'b', 'c', 'a'], ['a', 'b', 'c']), ], ) def test_unique_list(input_value, output): assert unique_list(input_value) == output assert unique_list(unique_list(input_value)) == unique_list(input_value) def test_value_items(): v = ['a', 'b', 'c'] vi = ValueItems(v, {0, -1}) assert vi.is_excluded(2) assert [v_ for i, v_ in enumerate(v) if not vi.is_excluded(i)] == ['b'] assert vi.is_included(2) assert [v_ for i, v_ in enumerate(v) if vi.is_included(i)] == ['a', 'c'] v2 = {'a': v, 'b': {'a': 1, 'b': (1, 2)}, 'c': 1} vi = ValueItems(v2, {'a': {0, -1}, 'b': {'a': ..., 'b': -1}}) assert not vi.is_excluded('a') assert vi.is_included('a') assert not vi.is_excluded('c') assert not vi.is_included('c') assert str(vi) == "{'a': {0, -1}, 'b': {'a': Ellipsis, 'b': -1}}" assert repr(vi) == "ValueItems({'a': {0, -1}, 'b': {'a': Ellipsis, 'b': -1}})" excluded = {k_: v_ for k_, v_ in v2.items() if not vi.is_excluded(k_)} assert excluded == {'a': v, 'b': {'a': 1, 'b': (1, 2)}, 'c': 1} included = {k_: v_ for k_, v_ in v2.items() if vi.is_included(k_)} assert included == {'a': v, 'b': {'a': 1, 'b': (1, 2)}} sub_v = included['a'] sub_vi = ValueItems(sub_v, vi.for_element('a')) assert repr(sub_vi) == 'ValueItems({0: Ellipsis, 2: Ellipsis})' assert sub_vi.is_excluded(2) assert [v_ for i, v_ in enumerate(sub_v) if not sub_vi.is_excluded(i)] == ['b'] assert sub_vi.is_included(2) assert [v_ for i, v_ in enumerate(sub_v) if sub_vi.is_included(i)] == ['a', 'c'] @pytest.mark.parametrize( 'base,override,intersect,expected', [ # Check in default (union) mode (..., ..., False, ...), (None, None, False, None), ({}, {}, False, {}), (..., None, False, ...), (None, ..., False, ...), (None, {}, False, {}), ({}, None, False, {}), (..., {}, False, {}), ({}, ..., False, ...), ({'a': None}, {'a': None}, False, {}), ({'a'}, ..., False, ...), ({'a'}, {}, False, {'a': ...}), ({'a'}, {'b'}, False, {'a': ..., 'b': ...}), ({'a': ...}, {'b': {'c'}}, False, {'a': ..., 'b': {'c': ...}}), ({'a': ...}, {'a': {'c'}}, False, {'a': {'c': ...}}), ({'a': {'c': ...}, 'b': {'d'}}, {'a': ...}, False, {'a': ..., 'b': {'d': ...}}), # Check in intersection mode (..., ..., True, ...), (None, None, True, None), ({}, {}, True, {}), (..., None, True, ...), (None, ..., True, ...), (None, {}, True, {}), ({}, None, True, {}), (..., {}, True, {}), ({}, ..., True, {}), ({'a': None}, {'a': None}, True, {}), ({'a'}, ..., True, {'a': ...}), ({'a'}, {}, True, {}), ({'a'}, {'b'}, True, {}), ({'a': ...}, {'b': {'c'}}, True, {}), ({'a': ...}, {'a': {'c'}}, True, {'a': {'c': ...}}), ({'a': {'c': ...}, 'b': {'d'}}, {'a': ...}, True, {'a': {'c': ...}}), # Check usage of `True` instead of `...` (..., True, False, True), (True, ..., False, ...), (True, None, False, True), ({'a': {'c': True}, 'b': {'d'}}, {'a': True}, False, {'a': True, 'b': {'d': ...}}), ], ) def test_value_items_merge(base, override, intersect, expected): actual = ValueItems.merge(base, override, intersect=intersect) assert actual == expected def test_value_items_error(): with pytest.raises(TypeError) as e: ValueItems(1, (1, 2, 3)) assert str(e.value) == "Unexpected type of exclude value " def test_is_new_type(): new_type = NewType('new_type', str) new_new_type = NewType('new_new_type', new_type) assert is_new_type(new_type) assert is_new_type(new_new_type) assert not is_new_type(str) def test_new_type_supertype(): new_type = NewType('new_type', str) new_new_type = NewType('new_new_type', new_type) assert new_type_supertype(new_type) == str assert new_type_supertype(new_new_type) == str def test_pretty(): class MyTestModel(BaseModel): a = 1 b = [1, 2, 3] m = MyTestModel() assert m.__repr_name__() == 'MyTestModel' assert str(m) == 'a=1 b=[1, 2, 3]' assert repr(m) == 'MyTestModel(a=1, b=[1, 2, 3])' assert list(m.__pretty__(lambda x: f'fmt: {x!r}')) == [ 'MyTestModel(', 1, 'a=', 'fmt: 1', ',', 0, 'b=', 'fmt: [1, 2, 3]', ',', 0, -1, ')', ] def test_pretty_color(): c = Color('red') assert str(c) == 'red' assert repr(c) == "Color('red', rgb=(255, 0, 0))" assert list(c.__pretty__(lambda x: f'fmt: {x!r}')) == [ 'Color(', 1, "fmt: 'red'", ',', 0, 'rgb=', 'fmt: (255, 0, 0)', ',', 0, -1, ')', ] @pytest.mark.skipif(not devtools, reason='devtools not installed') def test_devtools_output(): class MyTestModel(BaseModel): a = 1 b = [1, 2, 3] assert devtools.pformat(MyTestModel()) == 'MyTestModel(\n a=1,\n b=[1, 2, 3],\n)' @pytest.mark.skipif(not devtools, reason='devtools not installed') def test_devtools_output_validation_error(): class Model(BaseModel): a: int with pytest.raises(ValueError) as exc_info: Model() assert devtools.pformat(exc_info.value) == ( 'ValidationError(\n' " model='Model',\n" ' errors=[\n' ' {\n' " 'loc': ('a',),\n" " 'msg': 'field required',\n" " 'type': 'value_error.missing',\n" ' },\n' ' ],\n' ')' ) @pytest.mark.parametrize( 'mapping, updating_mapping, expected_mapping, msg', [ ( {'key': {'inner_key': 0}}, {'other_key': 1}, {'key': {'inner_key': 0}, 'other_key': 1}, 'extra keys are inserted', ), ( {'key': {'inner_key': 0}, 'other_key': 1}, {'key': [1, 2, 3]}, {'key': [1, 2, 3], 'other_key': 1}, 'values that can not be merged are updated', ), ( {'key': {'inner_key': 0}}, {'key': {'other_key': 1}}, {'key': {'inner_key': 0, 'other_key': 1}}, 'values that have corresponding keys are merged', ), ( {'key': {'inner_key': {'deep_key': 0}}}, {'key': {'inner_key': {'other_deep_key': 1}}}, {'key': {'inner_key': {'deep_key': 0, 'other_deep_key': 1}}}, 'deeply nested values that have corresponding keys are merged', ), ], ) def test_deep_update(mapping, updating_mapping, expected_mapping, msg): assert deep_update(mapping, updating_mapping) == expected_mapping, msg def test_deep_update_is_not_mutating(): mapping = {'key': {'inner_key': {'deep_key': 1}}} updated_mapping = deep_update(mapping, {'key': {'inner_key': {'other_deep_key': 1}}}) assert updated_mapping == {'key': {'inner_key': {'deep_key': 1, 'other_deep_key': 1}}} assert mapping == {'key': {'inner_key': {'deep_key': 1}}} def test_undefined_repr(): assert repr(Undefined) == 'PydanticUndefined' def test_undefined_copy(): assert copy(Undefined) is Undefined assert deepcopy(Undefined) is Undefined def test_get_model(): class A(BaseModel): a: str assert get_model(A) == A @dataclass class B: a: str assert get_model(B) == B.__pydantic_model__ class C: pass with pytest.raises(TypeError): get_model(C) def test_version_info(): s = version_info() assert re.match(' *pydantic version: ', s) assert s.count('\n') == 5 def test_class_attribute(): class Foo: attr = ClassAttribute('attr', 'foo') assert Foo.attr == 'foo' with pytest.raises(AttributeError, match="'attr' attribute of 'Foo' is class-only"): Foo().attr f = Foo() f.attr = 'not foo' assert f.attr == 'not foo' def test_all_literal_values(): L1 = Literal['1'] assert all_literal_values(L1) == ('1',) L2 = Literal['2'] L12 = Literal[L1, L2] assert sorted(all_literal_values(L12)) == sorted(('1', '2')) L312 = Literal['3', Literal[L1, L2]] assert sorted(all_literal_values(L312)) == sorted(('1', '2', '3')) def test_path_type(tmp_path): assert path_type(tmp_path) == 'directory' file = tmp_path / 'foobar.txt' file.write_text('hello') assert path_type(file) == 'file' def test_path_type_unknown(tmp_path): p = type( 'FakePath', (), { 'exists': lambda: True, 'is_dir': lambda: False, 'is_file': lambda: False, 'is_mount': lambda: False, 'is_symlink': lambda: False, 'is_block_device': lambda: False, 'is_char_device': lambda: False, 'is_fifo': lambda: False, 'is_socket': lambda: False, }, ) assert path_type(p) == 'unknown' @pytest.mark.parametrize( 'obj', (1, 1.0, '1', b'1', int, None, test_all_literal_values, len, test_all_literal_values.__code__, lambda: ..., ...), ) def test_smart_deepcopy_immutable_non_sequence(obj, mocker): # make sure deepcopy is not used # (other option will be to use obj.copy(), but this will produce error as none of given objects have this method) mocker.patch('pydantic.utils.deepcopy', side_effect=RuntimeError) assert smart_deepcopy(obj) is deepcopy(obj) is obj @pytest.mark.parametrize('empty_collection', (collection() for collection in BUILTIN_COLLECTIONS)) def test_smart_deepcopy_empty_collection(empty_collection, mocker): mocker.patch('pydantic.utils.deepcopy', side_effect=RuntimeError) # make sure deepcopy is not used if not isinstance(empty_collection, (tuple, frozenset)): # empty tuple or frozenset are always the same object assert smart_deepcopy(empty_collection) is not empty_collection @pytest.mark.parametrize( 'collection', (c.fromkeys((1,)) if issubclass(c, dict) else c((1,)) for c in BUILTIN_COLLECTIONS) ) def test_smart_deepcopy_collection(collection, mocker): expected_value = object() mocker.patch('pydantic.utils.deepcopy', return_value=expected_value) assert smart_deepcopy(collection) is expected_value @pytest.mark.parametrize('error', [TypeError, ValueError, RuntimeError]) def test_smart_deepcopy_error(error, mocker): class RaiseOnBooleanOperation(str): def __bool__(self): raise error('raised error') obj = RaiseOnBooleanOperation() expected_value = deepcopy(obj) assert smart_deepcopy(obj) == expected_value T = TypeVar('T') @pytest.mark.parametrize( 'input_value,output_value', [ (Annotated[int, 10] if Annotated else None, Annotated), (Callable[[], T][int], collections.abc.Callable), (Dict[str, int], dict), (List[str], list), (Union[int, str], Union), (int, None), ], ) def test_get_origin(input_value, output_value): if input_value is None: pytest.skip('Skipping undefined hint for this python version') assert get_origin(input_value) is output_value @pytest.mark.skipif(sys.version_info < (3, 8), reason='get_args is only consistent for python >= 3.8') @pytest.mark.parametrize( 'input_value,output_value', [ (conlist(str), (str,)), (ConstrainedList, ()), (List[str], (str,)), (Dict[str, int], (str, int)), (int, ()), (Union[int, Union[T, int], str][int], (int, str)), (Union[int, Tuple[T, int]][str], (int, Tuple[str, int])), (Callable[[], T][int], ([], int)), (Annotated[int, 10] if Annotated else None, (int, 10)), ], ) def test_get_args(input_value, output_value): if input_value is None: pytest.skip('Skipping undefined hint for this python version') assert get_args(input_value) == output_value def test_resolve_annotations_no_module(): # TODO: is there a better test for this, can this case really happen? fr = ForwardRef('Foo') assert resolve_annotations({'Foo': ForwardRef('Foo')}, None) == {'Foo': fr} def test_all_identical(): a, b = object(), object() c = [b] assert all_identical([a, b], [a, b]) is True assert all_identical([a, b], [a, b]) is True assert all_identical([a, b, b], [a, b, b]) is True assert all_identical([a, c, b], [a, c, b]) is True assert all_identical([], [a]) is False, 'Expected iterables with different lengths to evaluate to `False`' assert all_identical([a], []) is False, 'Expected iterables with different lengths to evaluate to `False`' assert ( all_identical([a, [b], b], [a, [b], b]) is False ), 'New list objects are different objects and should therefore not be identical.' def test_undefined_pickle(): undefined2 = pickle.loads(pickle.dumps(Undefined)) assert undefined2 is Undefined def test_on_lower_camel_zero_length(): assert to_lower_camel('') == '' def test_on_lower_camel_one_length(): assert to_lower_camel('a') == 'a' def test_on_lower_camel_many_length(): assert to_lower_camel('i_like_turtles') == 'iLikeTurtles' pydantic-1.10.14/tests/test_validators.py000066400000000000000000001141521455251250200204210ustar00rootroot00000000000000from collections import deque from datetime import datetime from enum import Enum from functools import partial, partialmethod from itertools import product from typing import Any, Callable, Dict, List, Optional, Tuple, Union import pytest from typing_extensions import Literal from pydantic import BaseModel, ConfigError, Extra, Field, ValidationError, conlist, errors, validator from pydantic.class_validators import make_generic_validator, root_validator def test_simple(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [{'loc': ('a',), 'msg': '"foobar" not found in a', 'type': 'value_error'}] def test_int_validation(): class Model(BaseModel): a: int with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model(a=3).a == 3 assert Model(a=True).a == 1 assert Model(a=False).a == 0 assert Model(a=4.5).a == 4 @pytest.mark.parametrize('value', [2.2250738585072011e308, float('nan'), float('inf')]) def test_int_overflow_validation(value): class Model(BaseModel): a: int with pytest.raises(ValidationError) as exc_info: Model(a=value) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_frozenset_validation(): class Model(BaseModel): a: frozenset with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid frozenset', 'type': 'type_error.frozenset'} ] assert Model(a={1, 2, 3}).a == frozenset({1, 2, 3}) assert Model(a=frozenset({1, 2, 3})).a == frozenset({1, 2, 3}) assert Model(a=[4, 5]).a == frozenset({4, 5}) assert Model(a=(6,)).a == frozenset({6}) def test_deque_validation(): class Model(BaseModel): a: deque with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'value is not a valid deque', 'type': 'type_error.deque'}] assert Model(a={1, 2, 3}).a == deque([1, 2, 3]) assert Model(a=deque({1, 2, 3})).a == deque([1, 2, 3]) assert Model(a=[4, 5]).a == deque([4, 5]) assert Model(a=(6,)).a == deque([6]) def test_validate_whole(): class Model(BaseModel): a: List[int] @validator('a', pre=True) def check_a1(cls, v): v.append('123') return v @validator('a') def check_a2(cls, v): v.append(456) return v assert Model(a=[1, 2]).a == [1, 2, 123, 456] def test_validate_kwargs(): class Model(BaseModel): b: int a: List[int] @validator('a', each_item=True) def check_a1(cls, v, values, **kwargs): return v + values['b'] assert Model(a=[1, 2], b=6).a == [7, 8] def test_validate_pre_error(): calls = [] class Model(BaseModel): a: List[int] @validator('a', pre=True) def check_a1(cls, v): calls.append(f'check_a1 {v}') if 1 in v: raise ValueError('a1 broken') v[0] += 1 return v @validator('a') def check_a2(cls, v): calls.append(f'check_a2 {v}') if 10 in v: raise ValueError('a2 broken') return v assert Model(a=[3, 8]).a == [4, 8] assert calls == ['check_a1 [3, 8]', 'check_a2 [4, 8]'] calls = [] with pytest.raises(ValidationError) as exc_info: Model(a=[1, 3]) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'a1 broken', 'type': 'value_error'}] assert calls == ['check_a1 [1, 3]'] calls = [] with pytest.raises(ValidationError) as exc_info: Model(a=[5, 10]) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'a2 broken', 'type': 'value_error'}] assert calls == ['check_a1 [5, 10]', 'check_a2 [6, 10]'] class ValidateAssignmentModel(BaseModel): a: int = 4 b: str = ... c: int = 0 @validator('b') def b_length(cls, v, values, **kwargs): if 'a' in values and len(v) < values['a']: raise ValueError('b too short') return v @validator('c') def double_c(cls, v): return v * 2 class Config: validate_assignment = True extra = Extra.allow def test_validating_assignment_ok(): p = ValidateAssignmentModel(b='hello') assert p.b == 'hello' def test_validating_assignment_fail(): with pytest.raises(ValidationError): ValidateAssignmentModel(a=10, b='hello') p = ValidateAssignmentModel(b='hello') with pytest.raises(ValidationError): p.b = 'x' def test_validating_assignment_value_change(): p = ValidateAssignmentModel(b='hello', c=2) assert p.c == 4 p = ValidateAssignmentModel(b='hello') assert p.c == 0 p.c = 3 assert p.c == 6 def test_validating_assignment_extra(): p = ValidateAssignmentModel(b='hello', extra_field=1.23) assert p.extra_field == 1.23 p = ValidateAssignmentModel(b='hello') p.extra_field = 1.23 assert p.extra_field == 1.23 p.extra_field = 'bye' assert p.extra_field == 'bye' def test_validating_assignment_dict(): with pytest.raises(ValidationError) as exc_info: ValidateAssignmentModel(a='x', b='xx') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_validating_assignment_values_dict(): class ModelOne(BaseModel): a: int class ModelTwo(BaseModel): m: ModelOne b: int @validator('b') def validate_b(cls, b, values): if 'm' in values: return b + values['m'].a # this fails if values['m'] is a dict else: return b class Config: validate_assignment = True model = ModelTwo(m=ModelOne(a=1), b=2) assert model.b == 3 model.b = 3 assert model.b == 4 def test_validate_multiple(): # also test TypeError class Model(BaseModel): a: str b: str @validator('a', 'b') def check_a_and_b(cls, v, field, **kwargs): if len(v) < 4: raise TypeError(f'{field.alias} is too short') return v + 'x' assert Model(a='1234', b='5678').dict() == {'a': '1234x', 'b': '5678x'} with pytest.raises(ValidationError) as exc_info: Model(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'a is too short', 'type': 'type_error'}, {'loc': ('b',), 'msg': 'b is too short', 'type': 'type_error'}, ] def test_classmethod(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert cls is Model return v m = Model(a='this is foobar good') assert m.a == 'this is foobar good' m.check_a('x') def test_duplicates(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str b: str @validator('a') def duplicate_name(cls, v): return v @validator('b') # noqa def duplicate_name(cls, v): # noqa return v assert str(exc_info.value) == ( 'duplicate validator function ' '"tests.test_validators.test_duplicates..Model.duplicate_name"; ' 'if this is intended, set `allow_reuse=True`' ) def test_use_bare(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator def checker(cls, v): return v assert 'validators should be used with fields' in str(exc_info.value) def test_use_no_fields(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator() def checker(cls, v): return v assert 'validator with no fields specified' in str(exc_info.value) def test_validate_always(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', pre=True, always=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'xxx' assert Model().a == 'xxx' assert check_calls == 1 assert Model(a='y').a == 'y' assert check_calls == 2 def test_validate_always_on_inheritance(): check_calls = 0 class ParentModel(BaseModel): a: str = None class Model(ParentModel): @validator('a', pre=True, always=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'xxx' assert Model().a == 'xxx' assert check_calls == 1 assert Model(a='y').a == 'y' assert check_calls == 2 def test_validate_not_always(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'xxx' assert Model().a is None assert check_calls == 0 assert Model(a='y').a == 'y' assert check_calls == 1 def test_wildcard_validators(): calls = [] class Model(BaseModel): a: str b: int @validator('a') def check_a(cls, v, field, **kwargs): calls.append(('check_a', v, field.name)) return v @validator('*') def check_all(cls, v, field, **kwargs): calls.append(('check_all', v, field.name)) return v assert Model(a='abc', b='123').dict() == dict(a='abc', b=123) assert calls == [('check_a', 'abc', 'a'), ('check_all', 'abc', 'a'), ('check_all', 123, 'b')] def test_wildcard_validator_error(): class Model(BaseModel): a: str b: str @validator('*') def check_all(cls, v, field, **kwargs): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Model(a='foobar a', b='foobar b').b == 'foobar b' with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': '"foobar" not found in a', 'type': 'value_error'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_invalid_field(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator('b') def check_b(cls, v): return v assert str(exc_info.value) == ( "Validators defined with incorrect fields: check_b " # noqa: Q000 "(use check_fields=False if you're inheriting from the model and intended this)" ) def test_validate_child(): class Parent(BaseModel): a: str class Child(Parent): @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Parent(a='this is not a child').a == 'this is not a child' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Child(a='snap') def test_validate_child_extra(): class Parent(BaseModel): a: str @validator('a') def check_a_one(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): @validator('a') def check_a_two(cls, v): return v.upper() assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'THIS IS FOOBAR GOOD' with pytest.raises(ValidationError): Child(a='snap') def test_validate_child_all(): class Parent(BaseModel): a: str class Child(Parent): @validator('*') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Parent(a='this is not a child').a == 'this is not a child' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Child(a='snap') def test_validate_parent(): class Parent(BaseModel): a: str @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Parent(a='snap') with pytest.raises(ValidationError): Child(a='snap') def test_validate_parent_all(): class Parent(BaseModel): a: str @validator('*') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Parent(a='snap') with pytest.raises(ValidationError): Child(a='snap') def test_inheritance_keep(): class Parent(BaseModel): a: int @validator('a') def add_to_a(cls, v): return v + 1 class Child(Parent): pass assert Child(a=0).a == 1 def test_inheritance_replace(): class Parent(BaseModel): a: int @validator('a') def add_to_a(cls, v): return v + 1 class Child(Parent): @validator('a') def add_to_a(cls, v): return v + 5 assert Child(a=0).a == 5 def test_inheritance_new(): class Parent(BaseModel): a: int @validator('a') def add_one_to_a(cls, v): return v + 1 class Child(Parent): @validator('a') def add_five_to_a(cls, v): return v + 5 assert Child(a=0).a == 6 def test_validation_each_item(): class Model(BaseModel): foobar: Dict[int, int] @validator('foobar', each_item=True) def check_foobar(cls, v): return v + 1 assert Model(foobar={1: 1}).foobar == {1: 2} def test_validation_each_item_one_sublevel(): class Model(BaseModel): foobar: List[Tuple[int, int]] @validator('foobar', each_item=True) def check_foobar(cls, v: Tuple[int, int]) -> Tuple[int, int]: v1, v2 = v assert v1 == v2 return v assert Model(foobar=[(1, 1), (2, 2)]).foobar == [(1, 1), (2, 2)] def test_key_validation(): class Model(BaseModel): foobar: Dict[int, int] @validator('foobar') def check_foobar(cls, value): return {k + 1: v + 1 for k, v in value.items()} assert Model(foobar={1: 1}).foobar == {2: 2} def test_validator_always_optional(): check_calls = 0 class Model(BaseModel): a: Optional[str] = None @validator('a', pre=True, always=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'default value' assert Model(a='y').a == 'y' assert check_calls == 1 assert Model().a == 'default value' assert check_calls == 2 def test_validator_always_pre(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', always=True, pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' assert check_calls == 2 def test_validator_always_post(): class Model(BaseModel): a: str = None @validator('a', always=True) def check_a(cls, v): return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' def test_validator_always_post_optional(): class Model(BaseModel): a: Optional[str] = None @validator('a', always=True, pre=True) def check_a(cls, v): return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' def test_validator_bad_fields_throws_configerror(): """ Attempts to create a validator with fields set as a list of strings, rather than just multiple string args. Expects ConfigError to be raised. """ with pytest.raises(ConfigError, match='validator fields should be passed as separate string args.'): class Model(BaseModel): a: str b: str @validator(['a', 'b']) def check_fields(cls, v): return v def test_datetime_validator(): check_calls = 0 class Model(BaseModel): d: datetime = None @validator('d', pre=True, always=True) def check_d(cls, v): nonlocal check_calls check_calls += 1 return v or datetime(2032, 1, 1) assert Model(d='2023-01-01T00:00:00').d == datetime(2023, 1, 1) assert check_calls == 1 assert Model().d == datetime(2032, 1, 1) assert check_calls == 2 assert Model(d=datetime(2023, 1, 1)).d == datetime(2023, 1, 1) assert check_calls == 3 def test_pre_called_once(): check_calls = 0 class Model(BaseModel): a: Tuple[int, int, int] @validator('a', pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v assert Model(a=['1', '2', '3']).a == (1, 2, 3) assert check_calls == 1 @pytest.mark.parametrize( 'fields,result', [ (['val'], '_v_'), (['foobar'], '_v_'), (['val', 'field'], '_v_,_field_'), (['val', 'config'], '_v_,_config_'), (['val', 'values'], '_v_,_values_'), (['val', 'field', 'config'], '_v_,_field_,_config_'), (['val', 'field', 'values'], '_v_,_field_,_values_'), (['val', 'config', 'values'], '_v_,_config_,_values_'), (['val', 'field', 'values', 'config'], '_v_,_field_,_values_,_config_'), (['cls', 'val'], '_cls_,_v_'), (['cls', 'foobar'], '_cls_,_v_'), (['cls', 'val', 'field'], '_cls_,_v_,_field_'), (['cls', 'val', 'config'], '_cls_,_v_,_config_'), (['cls', 'val', 'values'], '_cls_,_v_,_values_'), (['cls', 'val', 'field', 'config'], '_cls_,_v_,_field_,_config_'), (['cls', 'val', 'field', 'values'], '_cls_,_v_,_field_,_values_'), (['cls', 'val', 'config', 'values'], '_cls_,_v_,_config_,_values_'), (['cls', 'val', 'field', 'values', 'config'], '_cls_,_v_,_field_,_values_,_config_'), ], ) def test_make_generic_validator(fields, result): exec(f"""def testing_function({', '.join(fields)}): return {' + "," + '.join(fields)}""") func = locals()['testing_function'] validator = make_generic_validator(func) assert validator.__qualname__ == 'testing_function' assert validator.__name__ == 'testing_function' # args: cls, v, values, field, config assert validator('_cls_', '_v_', '_values_', '_field_', '_config_') == result def test_make_generic_validator_kwargs(): def test_validator(v, **kwargs): return ', '.join(f'{k}: {v}' for k, v in kwargs.items()) validator = make_generic_validator(test_validator) assert validator.__name__ == 'test_validator' assert validator('_cls_', '_v_', '_vs_', '_f_', '_c_') == 'values: _vs_, field: _f_, config: _c_' def test_make_generic_validator_invalid(): def test_validator(v, foobar): return foobar with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (v, foobar), should be: (value, values, config, field)' in str(exc_info.value) def test_make_generic_validator_cls_kwargs(): def test_validator(cls, v, **kwargs): return ', '.join(f'{k}: {v}' for k, v in kwargs.items()) validator = make_generic_validator(test_validator) assert validator.__name__ == 'test_validator' assert validator('_cls_', '_v_', '_vs_', '_f_', '_c_') == 'values: _vs_, field: _f_, config: _c_' def test_make_generic_validator_cls_invalid(): def test_validator(cls, v, foobar): return foobar with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (cls, v, foobar), should be: (cls, value, values, config, field)' in str(exc_info.value) def test_make_generic_validator_self(): def test_validator(self, v): return v with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (self, v), "self" not permitted as first argument, should be: (cls, value' in str(exc_info.value) def test_assert_raises_validation_error(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert v == 'a', 'invalid a' return v Model(a='a') with pytest.raises(ValidationError) as exc_info: Model(a='snap') injected_by_pytest = "\nassert 'snap' == 'a'\n - a\n + snap" assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': f'invalid a{injected_by_pytest}', 'type': 'assertion_error'} ] def test_whole(): with pytest.warns(DeprecationWarning, match='The "whole" keyword argument is deprecated'): class Model(BaseModel): x: List[int] @validator('x', whole=True) def check_something(cls, v): return v def test_root_validator(): root_val_values = [] class Model(BaseModel): a: int = 1 b: str c: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator def example_root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') @root_validator def example_root_validator2(cls, values): root_val_values.append(values) if 'snap' in values.get('c', ''): raise ValueError('foobar2') return dict(values, c='changed') assert Model(a='123', b='bar', c='baz').dict() == {'a': 123, 'b': 'changed', 'c': 'changed'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon', c='snap dragon2') assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}, {'loc': ('__root__',), 'msg': 'foobar2', 'type': 'value_error'}, ] with pytest.raises(ValidationError) as exc_info: Model(a='broken', b='bar', c='baz') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert root_val_values == [ {'a': 123, 'b': 'barbar', 'c': 'baz'}, {'a': 123, 'b': 'changed', 'c': 'baz'}, {'a': 1, 'b': 'snap dragonsnap dragon', 'c': 'snap dragon2'}, {'a': 1, 'b': 'snap dragonsnap dragon', 'c': 'snap dragon2'}, {'b': 'barbar', 'c': 'baz'}, {'b': 'changed', 'c': 'baz'}, ] def test_root_validator_pre(): root_val_values = [] class Model(BaseModel): a: int = 1 b: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator(pre=True) def root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return {'a': 42, 'b': 'changed'} assert Model(a='123', b='bar').dict() == {'a': 42, 'b': 'changedchanged'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon') assert root_val_values == [{'a': '123', 'b': 'bar'}, {'b': 'snap dragon'}] assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] def test_root_validator_repeat(): with pytest.raises(errors.ConfigError, match='duplicate validator function'): class Model(BaseModel): a: int = 1 @root_validator def root_validator_repeated(cls, values): return values @root_validator # noqa: F811 def root_validator_repeated(cls, values): # noqa: F811 return values def test_root_validator_repeat2(): with pytest.raises(errors.ConfigError, match='duplicate validator function'): class Model(BaseModel): a: int = 1 @validator('a') def repeat_validator(cls, v): return v @root_validator(pre=True) # noqa: F811 def repeat_validator(cls, values): # noqa: F811 return values def test_root_validator_self(): with pytest.raises( errors.ConfigError, match=r'Invalid signature for root validator root_validator: \(self, values\)' ): class Model(BaseModel): a: int = 1 @root_validator def root_validator(self, values): return values def test_root_validator_extra(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: int = 1 @root_validator def root_validator(cls, values, another): return values assert str(exc_info.value) == ( 'Invalid signature for root validator root_validator: (cls, values, another), should be: (cls, values).' ) def test_root_validator_types(): root_val_values = None class Model(BaseModel): a: int = 1 b: str @root_validator def root_validator(cls, values): nonlocal root_val_values root_val_values = cls, values return values class Config: extra = Extra.allow assert Model(b='bar', c='wobble').dict() == {'a': 1, 'b': 'bar', 'c': 'wobble'} assert root_val_values == (Model, {'a': 1, 'b': 'bar', 'c': 'wobble'}) def test_root_validator_inheritance(): calls = [] class Parent(BaseModel): pass @root_validator def root_validator_parent(cls, values): calls.append(f'parent validator: {values}') return {'extra1': 1, **values} class Child(Parent): a: int @root_validator def root_validator_child(cls, values): calls.append(f'child validator: {values}') return {'extra2': 2, **values} assert len(Child.__post_root_validators__) == 2 assert len(Child.__pre_root_validators__) == 0 assert Child(a=123).dict() == {'extra2': 2, 'extra1': 1, 'a': 123} assert calls == ["parent validator: {'a': 123}", "child validator: {'extra1': 1, 'a': 123}"] def test_root_validator_returns_none_exception(): class Model(BaseModel): a: int = 1 @root_validator def root_validator_repeated(cls, values): return None with pytest.raises(TypeError, match='Model values must be a dict'): Model() def reusable_validator(num): return num * 2 def test_reuse_global_validators(): class Model(BaseModel): x: int y: int double_x = validator('x', allow_reuse=True)(reusable_validator) double_y = validator('y', allow_reuse=True)(reusable_validator) assert dict(Model(x=1, y=1)) == {'x': 2, 'y': 2} def declare_with_reused_validators(include_root, allow_1, allow_2, allow_3): class Model(BaseModel): a: str b: str @validator('a', allow_reuse=allow_1) def duplicate_name(cls, v): return v @validator('b', allow_reuse=allow_2) # noqa F811 def duplicate_name(cls, v): # noqa F811 return v if include_root: @root_validator(allow_reuse=allow_3) # noqa F811 def duplicate_name(cls, values): # noqa F811 return values @pytest.fixture def reset_tracked_validators(): from pydantic.class_validators import _FUNCS original_tracked_validators = set(_FUNCS) yield _FUNCS.clear() _FUNCS.update(original_tracked_validators) @pytest.mark.parametrize('include_root,allow_1,allow_2,allow_3', product(*[[True, False]] * 4)) def test_allow_reuse(include_root, allow_1, allow_2, allow_3, reset_tracked_validators): duplication_count = int(not allow_1) + int(not allow_2) + int(include_root and not allow_3) if duplication_count > 1: with pytest.raises(ConfigError) as exc_info: declare_with_reused_validators(include_root, allow_1, allow_2, allow_3) assert str(exc_info.value).startswith('duplicate validator function') else: declare_with_reused_validators(include_root, allow_1, allow_2, allow_3) @pytest.mark.parametrize('validator_classmethod,root_validator_classmethod', product(*[[True, False]] * 2)) def test_root_validator_classmethod(validator_classmethod, root_validator_classmethod, reset_tracked_validators): root_val_values = [] class Model(BaseModel): a: int = 1 b: str def repeat_b(cls, v): return v * 2 if validator_classmethod: repeat_b = classmethod(repeat_b) repeat_b = validator('b')(repeat_b) def example_root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') if root_validator_classmethod: example_root_validator = classmethod(example_root_validator) example_root_validator = root_validator(example_root_validator) assert Model(a='123', b='bar').dict() == {'a': 123, 'b': 'changed'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon') assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Model(a='broken', b='bar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert root_val_values == [{'a': 123, 'b': 'barbar'}, {'a': 1, 'b': 'snap dragonsnap dragon'}, {'b': 'barbar'}] def test_root_validator_skip_on_failure(): a_called = False class ModelA(BaseModel): a: int @root_validator def example_root_validator(cls, values): nonlocal a_called a_called = True with pytest.raises(ValidationError): ModelA(a='a') assert a_called b_called = False class ModelB(BaseModel): a: int @root_validator(skip_on_failure=True) def example_root_validator(cls, values): nonlocal b_called b_called = True with pytest.raises(ValidationError): ModelB(a='a') assert not b_called def test_assignment_validator_cls(): validator_calls = 0 class Model(BaseModel): name: str class Config: validate_assignment = True @validator('name') def check_foo(cls, value): nonlocal validator_calls validator_calls += 1 assert cls == Model return value m = Model(name='hello') m.name = 'goodbye' assert validator_calls == 2 def test_literal_validator(): class Model(BaseModel): a: Literal['foo'] Model(a='foo') with pytest.raises(ValidationError) as exc_info: Model(a='nope') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': "unexpected value; permitted: 'foo'", 'type': 'value_error.const', 'ctx': {'given': 'nope', 'permitted': ('foo',)}, } ] def test_literal_validator_non_str_value(): class Model(BaseModel): a: Literal['foo'] Model(a='foo') with pytest.raises(ValidationError) as exc_info: Model(a={'bar': 'foo'}) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': "unexpected value; permitted: 'foo'", 'type': 'value_error.const', 'ctx': {'given': {'bar': 'foo'}, 'permitted': ('foo',)}, } ] def test_literal_validator_str_enum(): class Bar(str, Enum): FIZ = 'fiz' FUZ = 'fuz' class Foo(BaseModel): bar: Bar barfiz: Literal[Bar.FIZ] fizfuz: Literal[Bar.FIZ, Bar.FUZ] my_foo = Foo.parse_obj({'bar': 'fiz', 'barfiz': 'fiz', 'fizfuz': 'fiz'}) assert my_foo.bar is Bar.FIZ assert my_foo.barfiz is Bar.FIZ assert my_foo.fizfuz is Bar.FIZ my_foo = Foo.parse_obj({'bar': 'fiz', 'barfiz': 'fiz', 'fizfuz': 'fuz'}) assert my_foo.bar is Bar.FIZ assert my_foo.barfiz is Bar.FIZ assert my_foo.fizfuz is Bar.FUZ def test_nested_literal_validator(): L1 = Literal['foo'] L2 = Literal['bar'] class Model(BaseModel): a: Literal[L1, L2] Model(a='foo') with pytest.raises(ValidationError) as exc_info: Model(a='nope') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': "unexpected value; permitted: 'foo', 'bar'", 'type': 'value_error.const', 'ctx': {'given': 'nope', 'permitted': ('foo', 'bar')}, } ] def test_union_literal_with_constraints(): class Model(BaseModel, validate_assignment=True): x: Union[Literal[42], Literal['pika']] = Field(allow_mutation=False) m = Model(x=42) with pytest.raises(TypeError): m.x += 1 def test_field_that_is_being_validated_is_excluded_from_validator_values(mocker): check_values = mocker.MagicMock() class Model(BaseModel): foo: str bar: str = Field(alias='pika') baz: str class Config: validate_assignment = True @validator('foo') def validate_foo(cls, v, values): check_values({**values}) return v @validator('bar') def validate_bar(cls, v, values): check_values({**values}) return v model = Model(foo='foo_value', pika='bar_value', baz='baz_value') check_values.reset_mock() assert list(dict(model).items()) == [('foo', 'foo_value'), ('bar', 'bar_value'), ('baz', 'baz_value')] model.foo = 'new_foo_value' check_values.assert_called_once_with({'bar': 'bar_value', 'baz': 'baz_value'}) check_values.reset_mock() model.bar = 'new_bar_value' check_values.assert_called_once_with({'foo': 'new_foo_value', 'baz': 'baz_value'}) # ensure field order is the same assert list(dict(model).items()) == [('foo', 'new_foo_value'), ('bar', 'new_bar_value'), ('baz', 'baz_value')] def test_exceptions_in_field_validators_restore_original_field_value(): class Model(BaseModel): foo: str class Config: validate_assignment = True @validator('foo') def validate_foo(cls, v): if v == 'raise_exception': raise RuntimeError('test error') return v model = Model(foo='foo') with pytest.raises(RuntimeError, match='test error'): model.foo = 'raise_exception' assert model.foo == 'foo' def test_overridden_root_validators(mocker): validate_stub = mocker.stub(name='validate') class A(BaseModel): x: str @root_validator(pre=True) def pre_root(cls, values): validate_stub('A', 'pre') return values @root_validator(pre=False) def post_root(cls, values): validate_stub('A', 'post') return values class B(A): @root_validator(pre=True) def pre_root(cls, values): validate_stub('B', 'pre') return values @root_validator(pre=False) def post_root(cls, values): validate_stub('B', 'post') return values A(x='pika') assert validate_stub.call_args_list == [mocker.call('A', 'pre'), mocker.call('A', 'post')] validate_stub.reset_mock() B(x='pika') assert validate_stub.call_args_list == [mocker.call('B', 'pre'), mocker.call('B', 'post')] def test_list_unique_items_with_optional(): class Model(BaseModel): foo: Optional[List[str]] = Field(None, unique_items=True) bar: conlist(str, unique_items=True) = Field(None) assert Model().dict() == {'foo': None, 'bar': None} assert Model(foo=None, bar=None).dict() == {'foo': None, 'bar': None} assert Model(foo=['k1'], bar=['k1']).dict() == {'foo': ['k1'], 'bar': ['k1']} with pytest.raises(ValidationError) as exc_info: Model(foo=['k1', 'k1'], bar=['k1', 'k1']) assert exc_info.value.errors() == [ {'loc': ('foo',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'}, {'loc': ('bar',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'}, ] @pytest.mark.parametrize( 'func,allow_reuse', [ pytest.param(partial, False, id='`partial` and check for reuse'), pytest.param(partial, True, id='`partial` and ignore reuse'), pytest.param(partialmethod, False, id='`partialmethod` and check for reuse'), pytest.param(partialmethod, True, id='`partialmethod` and ignore reuse'), ], ) def test_functool_as_validator( reset_tracked_validators, func: Callable, allow_reuse: bool, ): def custom_validator( cls, v: Any, allowed: str, ) -> Any: assert v == allowed, f'Only {allowed} allowed as value; given: {v}' return v validate = func(custom_validator, allowed='TEXT') class TestClass(BaseModel): name: str _custom_validate = validator('name', allow_reuse=allow_reuse)(validate) pydantic-1.10.14/tests/test_validators_dataclass.py000077500000000000000000000063751455251250200224520ustar00rootroot00000000000000from dataclasses import asdict, is_dataclass from typing import List import pytest from pydantic import ValidationError, root_validator, validator from pydantic.dataclasses import dataclass def test_simple(): @dataclass class MyDataclass: a: str @validator('a') def change_a(cls, v): return v + ' changed' assert MyDataclass(a='this is foobar good').a == 'this is foobar good changed' def test_validate_pre(): @dataclass class MyDataclass: a: List[int] @validator('a', pre=True) def check_a1(cls, v): v.append('123') return v @validator('a') def check_a2(cls, v): v.append(456) return v assert MyDataclass(a=[1, 2]).a == [1, 2, 123, 456] def test_validate_multiple(): # also test TypeError @dataclass class MyDataclass: a: str b: str @validator('a', 'b') def check_a_and_b(cls, v, field, **kwargs): if len(v) < 4: raise TypeError(f'{field.alias} is too short') return v + 'x' assert asdict(MyDataclass(a='1234', b='5678')) == {'a': '1234x', 'b': '5678x'} with pytest.raises(ValidationError) as exc_info: MyDataclass(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'a is too short', 'type': 'type_error'}, {'loc': ('b',), 'msg': 'b is too short', 'type': 'type_error'}, ] def test_classmethod(): @dataclass class MyDataclass: a: str @validator('a') def check_a(cls, v): assert cls is MyDataclass and is_dataclass(MyDataclass) return v m = MyDataclass(a='this is foobar good') assert m.a == 'this is foobar good' m.check_a('x') def test_validate_parent(): @dataclass class Parent: a: str @validator('a') def change_a(cls, v): return v + ' changed' @dataclass class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good changed' assert Child(a='this is foobar good').a == 'this is foobar good changed' def test_inheritance_replace(): @dataclass class Parent: a: int @validator('a') def add_to_a(cls, v): return v + 1 @dataclass class Child(Parent): @validator('a') def add_to_a(cls, v): return v + 5 assert Child(a=0).a == 5 def test_root_validator(): root_val_values = [] @dataclass class MyDataclass: a: int b: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator def root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') assert asdict(MyDataclass(a='123', b='bar')) == {'a': 123, 'b': 'changed'} with pytest.raises(ValidationError) as exc_info: MyDataclass(a=1, b='snap dragon') assert root_val_values == [{'a': 123, 'b': 'barbar'}, {'a': 1, 'b': 'snap dragonsnap dragon'}] assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] pydantic-1.10.14/tests/test_version.py000066400000000000000000000003041455251250200177270ustar00rootroot00000000000000import pydantic def test_version_attribute_is_present(): assert hasattr(pydantic, '__version__') def test_version_attribute_is_a_string(): assert isinstance(pydantic.__version__, str)