pax_global_header00006660000000000000000000000064147305135330014516gustar00rootroot0000000000000052 comment=d771df52ae016527257db4d67a695bebf0ca69ad pydantic-pydantic-core-d771df5/000077500000000000000000000000001473051353300165135ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.cargo/000077500000000000000000000000001473051353300176645ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.cargo/config.toml000066400000000000000000000004701473051353300220270ustar00rootroot00000000000000[build] rustflags = [] # see https://pyo3.rs/main/building_and_distribution.html#macos [target.x86_64-apple-darwin] rustflags = [ "-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup", ] [target.aarch64-apple-darwin] rustflags = [ "-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup", ] pydantic-pydantic-core-d771df5/.codecov.yml000066400000000000000000000002711473051353300207360ustar00rootroot00000000000000codecov: require_ci_to_pass: false coverage: precision: 2 range: [90, 100] status: patch: false project: false comment: layout: 'header, diff, flags, files, footer' pydantic-pydantic-core-d771df5/.github/000077500000000000000000000000001473051353300200535ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000011741473051353300236570ustar00rootroot00000000000000 ## Change Summary ## Related issue number ## Checklist * [ ] Unit tests for the changes exist * [ ] Documentation reflects the changes where applicable * [ ] Pydantic tests pass with this `pydantic-core` (except for expected changes) * [ ] My PR is ready to review, **please add a comment including the phrase "please review" to assign reviewers** pydantic-pydantic-core-d771df5/.github/actions/000077500000000000000000000000001473051353300215135ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.github/actions/build-pgo-wheel/000077500000000000000000000000001473051353300244775ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.github/actions/build-pgo-wheel/action.yml000066400000000000000000000050711473051353300265020ustar00rootroot00000000000000name: Build PGO wheel description: Builds a PGO-optimized wheel inputs: interpreter: description: 'Interpreter to build the wheel for' required: true rust-toolchain: description: 'Rust toolchain to use' required: true outputs: wheel: description: 'Path to the built wheel' value: ${{ steps.find_wheel.outputs.path }} runs: using: "composite" steps: - name: prepare self schema shell: bash # generate up front so that we don't have to do this inside the docker container run: uv run python generate_self_schema.py - name: prepare profiling directory shell: bash # making this ahead of the compile ensures that the local user can write to this # directory; the maturin action (on linux) runs in docker so would create as root run: mkdir -p ${{ github.workspace }}/profdata - name: build initial wheel uses: PyO3/maturin-action@v1 with: manylinux: auto args: > --release --out pgo-wheel --interpreter ${{ inputs.interpreter }} rust-toolchain: ${{ inputs.rust-toolchain }} docker-options: -e CI env: RUSTFLAGS: '-Cprofile-generate=${{ github.workspace }}/profdata' - name: detect rust host run: echo RUST_HOST=$(rustc -Vv | grep host | cut -d ' ' -f 2) >> "$GITHUB_ENV" shell: bash - name: generate pgo data run: | uv sync --group testing uv pip install pydantic-core --no-index --no-deps --find-links pgo-wheel --force-reinstall uv run pytest tests/benchmarks RUST_HOST=$(rustc -Vv | grep host | cut -d ' ' -f 2) rustup run ${{ inputs.rust-toolchain }} bash -c 'echo LLVM_PROFDATA=$RUSTUP_HOME/toolchains/$RUSTUP_TOOLCHAIN/lib/rustlib/$RUST_HOST/bin/llvm-profdata >> "$GITHUB_ENV"' shell: bash - name: merge pgo data run: ${{ env.LLVM_PROFDATA }} merge -o ${{ github.workspace }}/merged.profdata ${{ github.workspace }}/profdata shell: pwsh # because it handles paths on windows better, and works well enough on unix for this step - name: build pgo-optimized wheel uses: PyO3/maturin-action@v1 with: manylinux: auto args: > --release --out dist --interpreter ${{ inputs.interpreter }} rust-toolchain: ${{inputs.rust-toolchain}} docker-options: -e CI env: RUSTFLAGS: '-Cprofile-use=${{ github.workspace }}/merged.profdata' - name: find built wheel id: find_wheel run: echo "path=$(ls dist/*.whl)" | tee -a "$GITHUB_OUTPUT" shell: bash pydantic-pydantic-core-d771df5/.github/check_version.py000077500000000000000000000025541473051353300232600ustar00rootroot00000000000000#!/usr/bin/env python3 """ Check the version in Cargo.toml matches the version from `GITHUB_REF` environment variable. """ import os import re import sys from pathlib import Path def main() -> int: cargo_path = Path('Cargo.toml') if not cargo_path.is_file(): print(f'✖ path "{cargo_path}" does not exist') return 1 version_ref = os.getenv('GITHUB_REF') if version_ref: version = re.sub('^refs/tags/v*', '', version_ref.lower()) else: print(f'✖ "GITHUB_REF" env variables not found') return 1 # convert from python pre-release version to rust pre-release version # this is the reverse of what's done in lib.rs::_rust_notify version = version.replace('a', '-alpha').replace('b', '-beta') version_regex = re.compile(r"""^version ?= ?(["'])(.+)\1""", re.M) cargo_content = cargo_path.read_text() match = version_regex.search(cargo_content) if not match: print(f'✖ {version_regex!r} not found in {cargo_path}') return 1 cargo_version = match.group(2) if cargo_version == version: print(f'✓ GITHUB_REF version matches {cargo_path} version "{cargo_version}"') return 0 else: print(f'✖ GITHUB_REF version "{version}" does not match {cargo_path} version "{cargo_version}"') return 1 if __name__ == '__main__': sys.exit(main()) pydantic-pydantic-core-d771df5/.github/dependabot.yml000066400000000000000000000005601473051353300227040ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "monthly" - package-ecosystem: "pip" directory: "/" schedule: interval: "monthly" groups: python-packages: patterns: - "*" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "monthly" pydantic-pydantic-core-d771df5/.github/workflows/000077500000000000000000000000001473051353300221105ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/.github/workflows/ci.yml000066400000000000000000000456101473051353300232340ustar00rootroot00000000000000name: ci on: push: branches: - main tags: - '**' pull_request: {} env: COLUMNS: 150 UV_PYTHON: 3.13 jobs: coverage: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 - name: install rust nightly uses: dtolnay/rust-toolchain@nightly - id: cache-rust name: cache rust uses: Swatinem/rust-cache@v2 - run: cargo install rustfilt coverage-prepare if: steps.cache-rust.outputs.cache-hit != 'true' - run: rustup component add llvm-tools-preview - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: uv sync --group testing - run: rustc --version --verbose - run: make build-dev env: RUST_BACKTRACE: 1 RUSTFLAGS: '-C instrument-coverage' - run: uv pip freeze - run: uv run coverage run -m pytest - run: ls -lha - run: uv run coverage xml - run: coverage-prepare lcov python/pydantic_core/*.so - uses: codecov/codecov-action@v4 # See https://github.com/PyO3/pyo3/discussions/2781 # tests intermittently segfault with pypy and cpython 3.7 when using `coverage run ...`, hence separate job test-python: name: test ${{ matrix.python-version }} strategy: fail-fast: false matrix: python-version: - '3.8' - '3.9' - '3.10' - '3.11' - '3.12' - '3.13' - 'pypy3.9' - 'pypy3.10' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: install rust stable uses: dtolnay/rust-toolchain@stable - name: cache rust uses: Swatinem/rust-cache@v2 with: key: test-v3 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: uv sync --group testing - run: uv pip install -e . env: RUST_BACKTRACE: 1 - run: uv pip freeze - run: uv run pytest env: HYPOTHESIS_PROFILE: slow env: UV_PYTHON: ${{ matrix.python-version }} test-os: name: test on ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu, macos, windows] runs-on: ${{ matrix.os }}-latest steps: - uses: actions/checkout@v4 - name: install rust stable uses: dtolnay/rust-toolchain@stable - name: cache rust uses: Swatinem/rust-cache@v2 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: uv sync --group testing - run: uv pip install -e . env: RUST_BACKTRACE: 1 - run: uv pip freeze - run: uv run pytest - run: cargo test test-msrv: name: test MSRV runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: uv sync --group testing - name: resolve MSRV id: resolve-msrv run: echo MSRV=`uv run python -c 'import tomllib; print(tomllib.load(open("Cargo.toml", "rb"))["package"]["rust-version"])'` >> $GITHUB_OUTPUT - name: install rust MSRV uses: dtolnay/rust-toolchain@master with: toolchain: ${{ steps.resolve-msrv.outputs.MSRV }} - name: cache rust uses: Swatinem/rust-cache@v2 - run: uv pip install -e . env: RUST_BACKTRACE: 1 - run: uv pip freeze - run: uv run pytest - run: cargo test # test with a debug build as it picks up errors which optimised release builds do not test-debug: name: test-debug ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: - '3.13' - 'pypy3.10' steps: - uses: actions/checkout@v4 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install rust stable uses: dtolnay/rust-toolchain@stable - name: cache rust uses: Swatinem/rust-cache@v2 - name: install deps run: uv sync --group testing - run: make build-dev - run: uv pip freeze - run: uv run pytest env: UV_PYTHON: ${{ matrix.python-version }} test-pydantic-integration: runs-on: ubuntu-latest continue-on-error: true steps: - uses: actions/checkout@v4 with: repository: pydantic/pydantic path: pydantic - uses: actions/checkout@v4 with: path: pydantic-core - name: install rust stable uses: dtolnay/rust-toolchain@stable - name: cache rust uses: Swatinem/rust-cache@v2 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: | uv sync --extra timezone uv pip install maturin pip uv run bash -c 'cd ../pydantic-core && maturin develop' working-directory: pydantic - run: uv --version && uv pip list working-directory: pydantic # Run pytest with lax xfail because we often add tests to pydantic # which xfail on a pending release of pydantic-core - run: uv run pytest --override-ini=xfail_strict=False working-directory: pydantic env: PYDANTIC_PRIVATE_ALLOW_UNHANDLED_SCHEMA_TYPES: 1 env: UV_PROJECT_ENVIRONMENT: ${{ github.workspace }}/.venv lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: install rust stable uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - name: cache rust uses: Swatinem/rust-cache@v2 # used to lint js code - uses: actions/setup-node@v4 with: node-version: '18' - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: | uv sync --group linting make build-dev uv pip freeze - run: make lint - run: make pyright - run: npm install - run: npm run lint bench: name: rust benchmarks runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: install rust nightly uses: dtolnay/rust-toolchain@nightly - name: cache rust uses: Swatinem/rust-cache@v2 - uses: actions/setup-python@v5 with: python-version: '3.13' - run: pip install typing_extensions - run: cargo bench build-wasm-emscripten: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: install rust nightly uses: dtolnay/rust-toolchain@nightly with: components: rust-src targets: wasm32-unknown-emscripten - name: cache rust uses: Swatinem/rust-cache@v2 - uses: mymindstorm/setup-emsdk@v14 with: # NOTE!: as per https://github.com/pydantic/pydantic-core/pull/149 this version needs to match the version # in node_modules/pyodide/repodata.json, to get the version, run: # `cat node_modules/pyodide/repodata.json | python -m json.tool | rg platform` version: '3.1.58' actions-cache-folder: emsdk-cache - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install deps run: uv sync --group wasm - name: build wheels run: make build-wasm - uses: actions/setup-node@v4 with: node-version: '18' - run: npm install - run: npm run test - run: | ls -lh dist/ ls -l dist/ - uses: actions/upload-artifact@v4 with: name: wasm_wheels path: dist env: UV_PYTHON: 3.12 # https://github.com/marketplace/actions/alls-green#why used for branch protection checks check: if: always() needs: [coverage, test-python, test-os, test-debug, lint, bench, build-wasm-emscripten] runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} allowed-failures: coverage build-sdist: name: build sdist runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: '3.13' - uses: PyO3/maturin-action@v1 with: command: sdist args: --out dist rust-toolchain: stable - uses: actions/upload-artifact@v4 with: name: pypi_files_sdist path: dist build: name: build on ${{ matrix.os }} (${{ matrix.target }} - ${{ matrix.interpreter || 'all' }}${{ matrix.os == 'linux' && format(' - {0}', matrix.manylinux == 'auto' && 'manylinux' || matrix.manylinux) || '' }}) # only run on push to main and on release if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || contains(github.event.pull_request.labels.*.name, 'Full Build') strategy: fail-fast: false matrix: os: [linux, macos, windows] target: [x86_64, aarch64] manylinux: [auto] include: # manylinux for various platforms, plus x86_64 pypy - os: linux manylinux: auto target: i686 - os: linux manylinux: auto target: aarch64 - os: linux manylinux: auto target: armv7 interpreter: 3.8 3.9 3.10 3.11 3.12 3.13 - os: linux manylinux: auto target: ppc64le interpreter: 3.8 3.9 3.10 3.11 3.12 3.13 - os: linux manylinux: auto target: s390x interpreter: 3.8 3.9 3.10 3.11 3.12 3.13 - os: linux manylinux: auto target: x86_64 interpreter: pypy3.9 pypy3.10 # musllinux - os: linux manylinux: musllinux_1_1 target: x86_64 - os: linux manylinux: musllinux_1_1 target: aarch64 - os: linux manylinux: musllinux_1_1 target: armv7 # macos; # all versions x86_64 # arm pypy and older pythons which can't be run on the arm hardware for PGO - os: macos target: x86_64 - os: macos target: aarch64 interpreter: 3.8 3.9 pypy3.9 pypy3.10 # windows; # x86_64 pypy builds are not PGO optimized # i686 not supported by pypy # aarch64 only 3.11 and up, also not PGO optimized - os: windows target: x86_64 interpreter: pypy3.9 pypy3.10 - os: windows target: i686 python-architecture: x86 interpreter: 3.8 3.9 3.10 3.11 3.12 3.13 - os: windows target: aarch64 interpreter: 3.11 3.12 3.13 exclude: # See above; disabled for now. - os: windows target: aarch64 runs-on: ${{ (matrix.os == 'linux' && 'ubuntu') || matrix.os }}-latest steps: - uses: actions/checkout@v4 - name: set up python uses: actions/setup-python@v5 with: python-version: '3.13' architecture: ${{ matrix.python-architecture || 'x64' }} - run: pip install -U twine 'ruff==0.5.0' typing_extensions # generate self-schema now, so we don't have to do so inside docker in maturin build - run: python generate_self_schema.py - name: build wheels uses: PyO3/maturin-action@v1 with: target: ${{ matrix.target }} manylinux: ${{ matrix.manylinux }} args: --release --out dist --interpreter ${{ matrix.interpreter || '3.8 3.9 3.10 3.11 3.12 3.13 pypy3.9 pypy3.10' }} # Limit windows builds to 1.77 to keep Windows 7 support. # FIXME: Unpin when Python 3.8 support is dropped. (3.9 requires Windows 10) rust-toolchain: ${{ (matrix.os == 'windows' && '1.77') || 'stable' }} docker-options: -e CI - run: ${{ (matrix.os == 'windows' && 'dir') || 'ls -lh' }} dist/ - run: twine check --strict dist/* - uses: actions/upload-artifact@v4 with: name: pypi_files_${{ matrix.os }}_${{ matrix.target }}_${{ matrix.interpreter || 'all' }}_${{ matrix.manylinux }} path: dist build-pgo: name: build pgo-optimized on ${{ matrix.os }} / ${{ matrix.interpreter }} # only run on push to main and on release if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || contains(github.event.pull_request.labels.*.name, 'Full Build') strategy: fail-fast: false matrix: os: [linux, windows, macos] interpreter: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] include: # standard runners with override for macos arm - os: linux runs-on: ubuntu-latest - os: windows ls: dir runs-on: windows-latest - os: macos runs-on: macos-latest-xlarge exclude: # macos arm only supported from 3.10 and up - os: macos interpreter: '3.8' - os: macos interpreter: '3.9' runs-on: ${{ matrix.runs-on }} steps: - uses: actions/checkout@v4 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: install rust stable id: rust-toolchain uses: dtolnay/rust-toolchain@master with: components: llvm-tools # Limit windows builds to 1.77 to keep Windows 7 support. # FIXME: Unpin when Python 3.8 support is dropped. (3.9 requires Windows 10) toolchain: ${{ (matrix.os == 'windows' && '1.77') || 'stable' }} - name: Build PGO wheel id: pgo-wheel uses: ./.github/actions/build-pgo-wheel with: interpreter: ${{ env.UV_PYTHON }} rust-toolchain: ${{ steps.rust-toolchain.outputs.name }} - run: ${{ matrix.ls || 'ls -lh' }} dist/ - uses: actions/upload-artifact@v4 with: name: pypi_files_${{ matrix.os }}_${{ matrix.interpreter }} path: dist env: UV_PYTHON: ${{ matrix.interpreter }} inspect-pypi-assets: needs: [build, build-sdist, build-pgo] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: get dist artifacts uses: actions/download-artifact@v4 with: pattern: pypi_files_* merge-multiple: true path: dist - name: list dist files run: | ls -lh dist/ ls -l dist/ echo "`ls dist | wc -l` files" - name: extract and list sdist file run: | mkdir sdist-files tar -xvf dist/*.tar.gz -C sdist-files tree -a sdist-files - name: extract and list wheel file run: | ls dist/*cp310-manylinux*x86_64.whl | head -n 1 python -m zipfile --list `ls dist/*cp310-manylinux*x86_64.whl | head -n 1` test-builds-arch: name: test build on ${{ matrix.target }}-${{ matrix.distro }} needs: [build] runs-on: ubuntu-latest strategy: fail-fast: false matrix: target: [aarch64, armv7, s390x, ppc64le] distro: ['ubuntu22.04'] include: - target: aarch64 distro: alpine_latest steps: - uses: actions/checkout@v4 - name: get dist artifacts uses: actions/download-artifact@v4 with: pattern: pypi_files_linux_* merge-multiple: true path: dist - uses: uraimo/run-on-arch-action@v2.8.1 name: install & test with: arch: ${{ matrix.target }} distro: ${{ matrix.distro }} githubToken: ${{ github.token }} install: | set -x if command -v apt-get &> /dev/null; then echo "installing python & pip with apt-get..." apt-get update apt-get install -y --no-install-recommends python3 python3-pip python3-venv git curl else echo "installing python & pip with apk..." apk update apk add python3 py3-pip git curl fi run: | set -x curl -LsSf https://astral.sh/uv/install.sh | sh source $HOME/.local/bin/env uv sync --frozen --group testing --no-install-project uv pip install pydantic-core --no-index --no-deps --find-links dist --force-reinstall uv run --no-sync pytest --ignore=tests/test_docstrings.py uv run --no-sync python -c 'import pydantic_core._pydantic_core; print(pydantic_core._pydantic_core.__version__)' test-builds-os: name: test build on ${{ matrix.os }} needs: [build, build-pgo] strategy: fail-fast: false matrix: os: [ubuntu, macos, windows] runs-on: ${{ matrix.os }}-latest steps: - uses: actions/checkout@v4 - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: get dist artifacts uses: actions/download-artifact@v4 with: pattern: pypi_files_* merge-multiple: true path: dist - run: uv sync --group testing - run: uv pip install pydantic-core --no-index --no-deps --find-links dist --force-reinstall - run: uv run pytest --ignore=tests/test_docstrings.py release: needs: [test-builds-arch, test-builds-os, build-sdist, check] if: success() && startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: set up python uses: actions/setup-python@v5 with: python-version: '3.13' - run: pip install -U twine - name: check package version run: python .github/check_version.py - name: get dist artifacts uses: actions/download-artifact@v4 with: pattern: pypi_files_* merge-multiple: true path: dist - run: twine check --strict dist/* - name: upload to pypi run: twine upload dist/* env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.pypi_token }} - name: get wasm dist artifacts uses: actions/download-artifact@v4 with: name: wasm_wheels path: wasm - name: upload to github release uses: softprops/action-gh-release@v2 with: files: | wasm/*.whl prerelease: ${{ contains(github.ref, 'alpha') || contains(github.ref, 'beta') }} pydantic-pydantic-core-d771df5/.github/workflows/codspeed.yml000066400000000000000000000032341473051353300244230ustar00rootroot00000000000000name: codspeed on: push: branches: - main pull_request: # `workflow_dispatch` allows CodSpeed to trigger backtest # performance analysis in order to generate initial data. workflow_dispatch: env: UV_FROZEN: true UV_PYTHON: 3.13 jobs: benchmarks: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 # Using this action is still necessary for CodSpeed to work: - uses: actions/setup-python@v5 with: python-version: ${{ env.UV_PYTHON}} - name: install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - name: Install deps run: | uv sync --group testing uv pip uninstall pytest-speed uv pip install pytest-benchmark==4.0.0 pytest-codspeed - name: Install rust stable id: rust-toolchain uses: dtolnay/rust-toolchain@stable with: components: llvm-tools - name: Cache rust uses: Swatinem/rust-cache@v2 - name: Build PGO wheel id: pgo-wheel uses: ./.github/actions/build-pgo-wheel with: interpreter: ${{ env.UV_PYTHON }} rust-toolchain: ${{ steps.rust-toolchain.outputs.name }} env: # make sure profiling information is present CARGO_PROFILE_RELEASE_DEBUG: "line-tables-only" CARGO_PROFILE_RELEASE_STRIP: false - name: Install PGO wheel run: uv pip install ${{ steps.pgo-wheel.outputs.wheel }} --force-reinstall - name: Run CodSpeed benchmarks uses: CodSpeedHQ/action@v3 with: run: uv run --group=codspeed pytest tests/benchmarks/ --codspeed pydantic-pydantic-core-d771df5/.gitignore000066400000000000000000000006271473051353300205100ustar00rootroot00000000000000*.py[cod] *.egg-info/ .coverage .python-version package-lock.json test.py .cache/ .hypothesis/ build/ dist/ docs/_build/ htmlcov/ node_modules/ /.benchmarks/ /.idea/ /.pytest_cache/ /.vscode/ /env*/ /env/ /flame/ /pytest-speed/ /sandbox/ /sandbox/ /site/ /target/ /worktree/ /.editorconfig /*.lcov /*.profdata /*.profraw /foobar.py /python/pydantic_core/*.so /src/self_schema.py # samply /profile.json pydantic-pydantic-core-d771df5/.mypy-stubtest-allowlist000066400000000000000000000011061473051353300233730ustar00rootroot00000000000000# TODO: don't want to expose this staticmethod, requires https://github.com/PyO3/pyo3/issues/2384 pydantic_core._pydantic_core.PydanticUndefinedType.new # See #1540 for discussion pydantic_core._pydantic_core.from_json pydantic_core._pydantic_core.SchemaValidator.validate_python pydantic_core._pydantic_core.SchemaValidator.validate_json pydantic_core._pydantic_core.SchemaValidator.validate_strings # the `warnings` kwarg for SchemaSerializer functions has custom logic pydantic_core._pydantic_core.SchemaSerializer.to_python pydantic_core._pydantic_core.SchemaSerializer.to_json pydantic-pydantic-core-d771df5/.pre-commit-config.yaml000066400000000000000000000012231473051353300227720ustar00rootroot00000000000000fail_fast: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: - id: check-yaml - id: check-toml - id: end-of-file-fixer - id: trailing-whitespace - id: check-added-large-files - repo: local hooks: - id: lint-python name: Lint Python entry: make lint-python types: [python] language: system pass_filenames: false - id: typecheck-python name: Typecheck Python entry: make pyright types: [python] language: system pass_filenames: false - id: lint-rust name: Lint Rust entry: make lint-rust types: [rust] language: system pass_filenames: false pydantic-pydantic-core-d771df5/.rustfmt.toml000066400000000000000000000000201473051353300211620ustar00rootroot00000000000000max_width = 120 pydantic-pydantic-core-d771df5/Cargo.lock000066400000000000000000000544071473051353300204320ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "ahash" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", "zerocopy", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bitvec" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", "radium", "tap", "wyz", ] [[package]] name = "cc" version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac367972e516d45567c7eafc73d24e1c193dcf200a8d94e9db7b3d38b349572d" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "enum_dispatch" version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" dependencies = [ "once_cell", "proc-macro2", "quote", "syn", ] [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "funty" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "idna" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", ] [[package]] name = "idna" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd" dependencies = [ "icu_normalizer", "icu_properties", "smallvec", "utf8_iter", ] [[package]] name = "indexmap" version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "indoc" version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "itoa" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jiter" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07f69a121b68af57bc10f151f3f67444a64d1d3a0eb48b042801ea917a38dd25" dependencies = [ "ahash", "bitvec", "lexical-parse-float", "num-bigint", "num-traits", "pyo3", "pyo3-build-config", "smallvec", ] [[package]] name = "lexical-parse-float" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" dependencies = [ "lexical-parse-integer", "lexical-util", "static_assertions", ] [[package]] name = "lexical-parse-integer" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" dependencies = [ "lexical-util", "static_assertions", ] [[package]] name = "lexical-util" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" dependencies = [ "static_assertions", ] [[package]] name = "libc" version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "litemap" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] [[package]] name = "num-bigint" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", ] [[package]] name = "num-integer" version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ "num-traits", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "portable-atomic" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "proc-macro2" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "pydantic-core" version = "2.27.2" dependencies = [ "ahash", "base64", "enum_dispatch", "hex", "idna 1.0.2", "jiter", "num-bigint", "pyo3", "pyo3-build-config", "python3-dll-a", "regex", "serde", "serde_json", "smallvec", "speedate", "strum", "strum_macros", "url", "uuid", "version_check", ] [[package]] name = "pyo3" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f402062616ab18202ae8319da13fa4279883a2b8a9d9f83f20dbade813ce1884" dependencies = [ "cfg-if", "indoc", "libc", "memoffset", "num-bigint", "once_cell", "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", "unindent", ] [[package]] name = "pyo3-build-config" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b14b5775b5ff446dd1056212d778012cbe8a0fbffd368029fd9e25b514479c38" dependencies = [ "once_cell", "python3-dll-a", "target-lexicon", ] [[package]] name = "pyo3-ffi" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ab5bcf04a2cdcbb50c7d6105de943f543f9ed92af55818fd17b660390fc8636" dependencies = [ "libc", "pyo3-build-config", ] [[package]] name = "pyo3-macros" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fd24d897903a9e6d80b968368a34e1525aeb719d568dba8b3d4bfa5dc67d453" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", "syn", ] [[package]] name = "pyo3-macros-backend" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36c011a03ba1e50152b4b394b479826cad97e7a21eb52df179cd91ac411cbfbe" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", "syn", ] [[package]] name = "python3-dll-a" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd0b78171a90d808b319acfad166c4790d9e9759bbc14ac8273fe133673dd41b" dependencies = [ "cc", ] [[package]] name = "quote" version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] [[package]] name = "radium" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustversion" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "serde" version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap", "itoa", "memchr", "ryu", "serde", ] [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "speedate" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a5e7adf4e07e7de39a64d77962ca14a09165e592d42d0c9f9acadb679f4f937" dependencies = [ "strum", "strum_macros", ] [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strum" version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", "syn", ] [[package]] name = "syn" version = "2.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83540f837a8afc019423a8edb95b52a8effe46957ee402287f4292fae35be021" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tap" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" version = "0.12.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "unicode-bidi" version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unindent" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" [[package]] name = "url" version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna 0.5.0", "percent-encoding", ] [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "wyz" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" dependencies = [ "tap", ] [[package]] name = "yoke" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerocopy" version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "zerofrom" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn", ] pydantic-pydantic-core-d771df5/Cargo.toml000066400000000000000000000057241473051353300204530ustar00rootroot00000000000000[package] name = "pydantic-core" version = "2.27.2" edition = "2021" license = "MIT" homepage = "https://github.com/pydantic/pydantic-core" repository = "https://github.com/pydantic/pydantic-core.git" readme = "README.md" include = [ "/pyproject.toml", "/README.md", "/LICENSE", "/Makefile", "/build.rs", "/generate_self_schema.py", "/rust-toolchain", "/src", "!/src/self_schema.py", "/python/pydantic_core", "/tests", "/.cargo", "!__pycache__", "!tests/.hypothesis", "!tests/.pytest_cache", "!*.so", ] rust-version = "1.75" [dependencies] # TODO it would be very nice to remove the "py-clone" feature as it can panic, # but needs a bit of work to make sure it's not used in the codebase pyo3 = { version = "0.22.6", features = ["generate-import-lib", "num-bigint", "py-clone"] } regex = "1.11.1" strum = { version = "0.26.3", features = ["derive"] } strum_macros = "0.26.4" serde_json = {version = "1.0.132", features = ["arbitrary_precision", "preserve_order"]} enum_dispatch = "0.3.13" serde = { version = "1.0.214", features = ["derive"] } speedate = "0.15.0" smallvec = "1.13.2" ahash = "0.8.10" url = "2.5.0" # idna is already required by url, added here to be explicit idna = "1.0.2" base64 = "0.22.1" num-bigint = "0.4.6" python3-dll-a = "0.2.10" uuid = "1.11.0" jiter = { version = "0.7.1", features = ["python"] } hex = "0.4.3" [lib] name = "_pydantic_core" crate-type = ["cdylib", "rlib"] [features] # must be enabled when building with `cargo build`, maturin enables this automatically extension-module = ["pyo3/extension-module"] [profile.release] lto = "fat" codegen-units = 1 strip = true [profile.bench] debug = true strip = false # This is separate to benchmarks because `bench` ends up building testing # harnesses into code, as it's a special cargo profile. [profile.profiling] inherits = "release" debug = true strip = false [dev-dependencies] pyo3 = { version = "0.22.6", features = ["auto-initialize"] } [build-dependencies] version_check = "0.9.5" # used where logic has to be version/distribution specific, e.g. pypy pyo3-build-config = { version = "0.22.6" } [lints.clippy] dbg_macro = "warn" print_stdout = "warn" # in general we lint against the pedantic group, but we will whitelist # certain lints which we don't want to enforce (for now) pedantic = { level = "warn", priority = -1 } cast_possible_truncation = "allow" cast_possible_wrap = "allow" cast_precision_loss = "allow" cast_sign_loss = "allow" doc_markdown = "allow" float_cmp = "allow" fn_params_excessive_bools = "allow" if_not_else = "allow" manual_let_else = "allow" match_bool = "allow" match_same_arms = "allow" missing_errors_doc = "allow" missing_panics_doc = "allow" module_name_repetitions = "allow" must_use_candidate = "allow" needless_pass_by_value = "allow" similar_names = "allow" single_match_else = "allow" struct_excessive_bools = "allow" too_many_lines = "allow" unnecessary_wraps = "allow" unused_self = "allow" used_underscore_binding = "allow" pydantic-pydantic-core-d771df5/LICENSE000066400000000000000000000020701473051353300175170ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2022 Samuel Colvin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pydantic-pydantic-core-d771df5/Makefile000066400000000000000000000103171473051353300201550ustar00rootroot00000000000000.DEFAULT_GOAL := all sources = python/pydantic_core tests generate_self_schema.py wasm-preview/run_tests.py # using pip install cargo (via maturin via pip) doesn't get the tty handle # so doesn't render color without some help export CARGO_TERM_COLOR=$(shell (test -t 0 && echo "always") || echo "auto") # maturin develop only makes sense inside a virtual env, is otherwise # more or less equivalent to pip install -e just a little nicer USE_MATURIN = $(shell [ "$$VIRTUAL_ENV" != "" ] && (which maturin)) .PHONY: .uv ## Check that uv is installed .uv: @uv -V || echo 'Please install uv: https://docs.astral.sh/uv/getting-started/installation/' .PHONY: .pre-commit ## Check that pre-commit is installed .pre-commit: @pre-commit -V || echo 'Please install pre-commit: https://pre-commit.com/' .PHONY: install install: .uv .pre-commit uv pip install -U wheel uv sync --frozen --group all uv pip install -v -e . pre-commit install .PHONY: rebuild-lockfiles ## Rebuild lockfiles from scratch, updating all dependencies rebuild-lockfiles: .uv uv lock --upgrade .PHONY: install-rust-coverage install-rust-coverage: cargo install rustfilt coverage-prepare rustup component add llvm-tools-preview .PHONY: install-pgo rustup component add llvm-tools-preview .PHONY: build-dev build-dev: @rm -f python/pydantic_core/*.so ifneq ($(USE_MATURIN),) uv run maturin develop else uv pip install --force-reinstall -v -e . --config-settings=build-args='--profile dev' endif .PHONY: build-prod build-prod: @rm -f python/pydantic_core/*.so ifneq ($(USE_MATURIN),) uv run maturin develop --release else uv pip install -v -e . endif .PHONY: build-profiling build-profiling: @rm -f python/pydantic_core/*.so ifneq ($(USE_MATURIN),) uv run maturin develop --profile profiling else uv pip install --force-reinstall -v -e . --config-settings=build-args='--profile profiling' endif .PHONY: build-coverage build-coverage: @rm -f python/pydantic_core/*.so ifneq ($(USE_MATURIN),) RUSTFLAGS='-C instrument-coverage' uv run maturin develop --release else RUSTFLAGS='-C instrument-coverage' uv pip install -v -e . endif .PHONY: build-pgo build-pgo: @rm -f python/pydantic_core/*.so $(eval PROFDATA := $(shell mktemp -d)) ifneq ($(USE_MATURIN),) RUSTFLAGS='-Cprofile-generate=$(PROFDATA)' uv run maturin develop --release else RUSTFLAGS='-Cprofile-generate=$(PROFDATA)' uv pip install --force-reinstall -v -e . endif pytest tests/benchmarks $(eval LLVM_PROFDATA := $(shell rustup run stable bash -c 'echo $$RUSTUP_HOME/toolchains/$$RUSTUP_TOOLCHAIN/lib/rustlib/$$(rustc -Vv | grep host | cut -d " " -f 2)/bin/llvm-profdata')) $(LLVM_PROFDATA) merge -o $(PROFDATA)/merged.profdata $(PROFDATA) ifneq ($(USE_MATURIN),) RUSTFLAGS='-Cprofile-use=$(PROFDATA)/merged.profdata' uv run maturin develop --release else RUSTFLAGS='-Cprofile-use=$(PROFDATA)/merged.profdata' uv pip install --force-reinstall -v -e . endif @rm -rf $(PROFDATA) .PHONY: build-wasm build-wasm: @echo 'This requires python 3.12, maturin and emsdk to be installed' uv run maturin build --release --target wasm32-unknown-emscripten --out dist -i 3.12 ls -lh dist .PHONY: format format: uv run ruff check --fix $(sources) uv run ruff format $(sources) cargo fmt .PHONY: lint-python lint-python: uv run ruff check $(sources) uv run ruff format --check $(sources) uv run griffe dump -f -d google -LWARNING -o/dev/null python/pydantic_core .PHONY: lint-rust lint-rust: cargo fmt --version cargo fmt --all -- --check cargo clippy --version cargo clippy --tests -- -D warnings .PHONY: lint lint: lint-python lint-rust .PHONY: pyright pyright: uv run pyright .PHONY: test test: uv run pytest .PHONY: testcov testcov: build-coverage @rm -rf htmlcov @mkdir -p htmlcov coverage run -m pytest coverage report coverage html -d htmlcov/python coverage-prepare html python/pydantic_core/*.so .PHONY: all all: format build-dev lint test .PHONY: clean clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -rf src/self_schema.py rm -rf .cache rm -rf htmlcov rm -rf .pytest_cache rm -rf *.egg-info rm -f .coverage rm -f .coverage.* rm -rf build rm -rf perf.data* rm -rf python/pydantic_core/*.so pydantic-pydantic-core-d771df5/README.md000066400000000000000000000117551473051353300200030ustar00rootroot00000000000000# pydantic-core [![CI](https://github.com/pydantic/pydantic-core/workflows/ci/badge.svg?event=push)](https://github.com/pydantic/pydantic-core/actions?query=event%3Apush+branch%3Amain+workflow%3Aci) [![Coverage](https://codecov.io/gh/pydantic/pydantic-core/branch/main/graph/badge.svg)](https://codecov.io/gh/pydantic/pydantic-core) [![pypi](https://img.shields.io/pypi/v/pydantic-core.svg)](https://pypi.python.org/pypi/pydantic-core) [![versions](https://img.shields.io/pypi/pyversions/pydantic-core.svg)](https://github.com/pydantic/pydantic-core) [![license](https://img.shields.io/github/license/pydantic/pydantic-core.svg)](https://github.com/pydantic/pydantic-core/blob/main/LICENSE) This package provides the core functionality for [pydantic](https://docs.pydantic.dev) validation and serialization. Pydantic-core is currently around 17x faster than pydantic V1. See [`tests/benchmarks/`](./tests/benchmarks/) for details. ## Example of direct usage _NOTE: You should not need to use pydantic-core directly; instead, use pydantic, which in turn uses pydantic-core._ ```py from pydantic_core import SchemaValidator, ValidationError v = SchemaValidator( { 'type': 'typed-dict', 'fields': { 'name': { 'type': 'typed-dict-field', 'schema': { 'type': 'str', }, }, 'age': { 'type': 'typed-dict-field', 'schema': { 'type': 'int', 'ge': 18, }, }, 'is_developer': { 'type': 'typed-dict-field', 'schema': { 'type': 'default', 'schema': {'type': 'bool'}, 'default': True, }, }, }, } ) r1 = v.validate_python({'name': 'Samuel', 'age': 35}) assert r1 == {'name': 'Samuel', 'age': 35, 'is_developer': True} # pydantic-core can also validate JSON directly r2 = v.validate_json('{"name": "Samuel", "age": 35}') assert r1 == r2 try: v.validate_python({'name': 'Samuel', 'age': 11}) except ValidationError as e: print(e) """ 1 validation error for model age Input should be greater than or equal to 18 [type=greater_than_equal, context={ge: 18}, input_value=11, input_type=int] """ ``` ## Getting Started You'll need rust stable [installed](https://rustup.rs/), or rust nightly if you want to generate accurate coverage. With rust and python 3.8+ installed, compiling pydantic-core should be possible with roughly the following: ```bash # clone this repo or your fork git clone git@github.com:pydantic/pydantic-core.git cd pydantic-core # create a new virtual env python3 -m venv env source env/bin/activate # install dependencies and install pydantic-core make install ``` That should be it, the example shown above should now run. You might find it useful to look at [`python/pydantic_core/_pydantic_core.pyi`](./python/pydantic_core/_pydantic_core.pyi) and [`python/pydantic_core/core_schema.py`](./python/pydantic_core/core_schema.py) for more information on the python API, beyond that, [`tests/`](./tests) provide a large number of examples of usage. If you want to contribute to pydantic-core, you'll want to use some other make commands: * `make build-dev` to build the package during development * `make build-prod` to perform an optimised build for benchmarking * `make test` to run the tests * `make testcov` to run the tests and generate a coverage report * `make lint` to run the linter * `make format` to format python and rust code * `make` to run `format build-dev lint test` ## Profiling It's possible to profile the code using the [`flamegraph` utility from `flamegraph-rs`](https://github.com/flamegraph-rs/flamegraph). (Tested on Linux.) You can install this with `cargo install flamegraph`. Run `make build-profiling` to install a release build with debugging symbols included (needed for profiling). Once that is built, you can profile pytest benchmarks with (e.g.): ```bash flamegraph -- pytest tests/benchmarks/test_micro_benchmarks.py -k test_list_of_ints_core_py --benchmark-enable ``` The `flamegraph` command will produce an interactive SVG at `flamegraph.svg`. ## Releasing 1. Bump package version locally. Do not just edit `Cargo.toml` on Github, you need both `Cargo.toml` and `Cargo.lock` to be updated. 2. Make a PR for the version bump and merge it. 3. Go to https://github.com/pydantic/pydantic-core/releases and click "Draft a new release" 4. In the "Choose a tag" dropdown enter the new tag `v` and select "Create new tag on publish" when the option appears. 5. Enter the release title in the form "v " 6. Click Generate release notes button 7. Click Publish release 8. Go to https://github.com/pydantic/pydantic-core/actions and ensure that all build for release are done successfully. 9. Go to https://pypi.org/project/pydantic-core/ and ensure that the latest release is published. 10. Done 🎉 pydantic-pydantic-core-d771df5/benches/000077500000000000000000000000001473051353300201225ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/benches/main.rs000066400000000000000000000742541473051353300214300ustar00rootroot00000000000000#![feature(test)] extern crate test; use test::{black_box, Bencher}; use pyo3::prelude::*; use pyo3::types::{PyDict, PyString}; use _pydantic_core::{validate_core_schema, SchemaValidator}; fn build_schema_validator_with_globals(py: Python, code: &str, globals: Option<&Bound<'_, PyDict>>) -> SchemaValidator { let mut schema = py.eval_bound(code, globals, None).unwrap().extract().unwrap(); schema = validate_core_schema(&schema, None).unwrap().extract().unwrap(); SchemaValidator::py_new(py, &schema, None).unwrap() } fn build_schema_validator(py: Python, code: &str) -> SchemaValidator { build_schema_validator_with_globals(py, code, None) } fn json<'a>(py: Python<'a>, code: &'a str) -> Bound<'a, PyAny> { black_box(PyString::new_bound(py, code).into_any()) } #[bench] fn ints_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'int'}"); let result = validator .validate_json(py, &json(py, "123"), None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, 123); bench.iter(|| { black_box( validator .validate_json(py, &json(py, "123"), None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn ints_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'int'}"); let input = 123_i64.into_py(py).into_bound(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, 123); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn list_int_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'list', 'items_schema': {'type': 'int'}}"); let code = format!( "[{}]", (0..100).map(|x| x.to_string()).collect::>().join(",") ); bench.iter(|| { black_box( validator .validate_json(py, &json(py, &code), None, None, None, false.into()) .unwrap(), ) }) }) } fn list_int_input(py: Python<'_>) -> (SchemaValidator, PyObject) { let validator = build_schema_validator(py, "{'type': 'list', 'items_schema': {'type': 'int'}}"); let code = format!( "[{}]", (0..100).map(|x| x.to_string()).collect::>().join(",") ); let input = py.eval_bound(&code, None, None).unwrap(); (validator, input.to_object(py)) } #[bench] fn list_int_python(bench: &mut Bencher) { Python::with_gil(|py| { let (validator, input) = list_int_input(py); let input = black_box(input.bind(py)); bench.iter(|| { let v = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); black_box(v) }) }) } #[bench] fn list_int_python_isinstance(bench: &mut Bencher) { Python::with_gil(|py| { let (validator, input) = list_int_input(py); let input = black_box(input.bind(py)); let v = validator.isinstance_python(py, &input, None, None, None, None).unwrap(); assert!(v); bench.iter(|| { let v = validator.isinstance_python(py, &input, None, None, None, None).unwrap(); black_box(v) }) }) } #[bench] fn list_error_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'list', 'items_schema': {'type': 'int'}}"); let code = format!( "[{}]", (0..100) .map(|v| format!(r#""{}""#, as_str(v))) .collect::>() .join(", ") ); match validator.validate_json(py, &json(py, &code), None, None, None, false.into()) { Ok(_) => panic!("unexpectedly valid"), Err(e) => { let v = e.value_bound(py); // println!("error: {}", v.to_string()); assert_eq!(v.getattr("title").unwrap().to_string(), "list[int]"); let error_count: i64 = v.call_method0("error_count").unwrap().extract().unwrap(); assert_eq!(error_count, 100); } }; bench.iter( || match validator.validate_json(py, &json(py, &code), None, None, None, false.into()) { Ok(_) => panic!("unexpectedly valid"), Err(e) => black_box(e), }, ) }) } fn list_error_python_input(py: Python<'_>) -> (SchemaValidator, PyObject) { let validator = build_schema_validator(py, "{'type': 'list', 'items_schema': {'type': 'int'}}"); let code = format!( "[{}]", (0..100) .map(|v| format!(r#""{}""#, as_str(v))) .collect::>() .join(", ") ); let input = py.eval_bound(&code, None, None).unwrap().extract().unwrap(); match validator.validate_python(py, &input, None, None, None, None, false.into()) { Ok(_) => panic!("unexpectedly valid"), Err(e) => { let v = e.value_bound(py); // println!("error: {}", v.to_string()); assert_eq!(v.getattr("title").unwrap().to_string(), "list[int]"); let error_count: i64 = v.call_method0("error_count").unwrap().extract().unwrap(); assert_eq!(error_count, 100); } }; (validator, input.to_object(py)) } #[bench] fn list_error_python(bench: &mut Bencher) { Python::with_gil(|py| { let (validator, input) = list_error_python_input(py); let input = black_box(input.bind(py)); bench.iter(|| { let result = validator.validate_python(py, &input, None, None, None, None, false.into()); match result { Ok(_) => panic!("unexpectedly valid"), Err(e) => black_box(e), } }) }) } #[bench] fn list_error_python_isinstance(bench: &mut Bencher) { Python::with_gil(|py| { let (validator, input) = list_error_python_input(py); let input = black_box(input.bind(py)); let r = validator.isinstance_python(py, &input, None, None, None, None).unwrap(); assert!(!r); bench.iter(|| { black_box(validator.isinstance_python(py, &input, None, None, None, None).unwrap()); }) }) } #[bench] fn list_any_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'list'}"); let code = format!( "[{}]", (0..100).map(|x| x.to_string()).collect::>().join(",") ); bench.iter(|| { black_box( validator .validate_json(py, &json(py, &code), None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn list_any_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'list'}"); let code = format!( "[{}]", (0..100).map(|x| x.to_string()).collect::>().join(",") ); let input = py.eval_bound(&code, None, None).unwrap().to_object(py); let input = black_box(input.bind(py)); bench.iter(|| { let v = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); black_box(v) }) }) } fn as_char(i: u8) -> char { (i % 26 + 97) as char } fn as_str(i: u8) -> String { format!("{}{}", as_char(i / 26), as_char(i)) } #[bench] fn dict_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, "{'type': 'dict', 'keys_schema': {'type': 'str'}, 'values_schema': {'type': 'int'}}", ); let code = format!( "{{{}}}", (0..100_u8) .map(|i| format!(r#""{}": {i}"#, as_str(i))) .collect::>() .join(", ") ); bench.iter(|| { black_box( validator .validate_json(py, &json(py, &code), None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn dict_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, "{'type': 'dict', 'keys_schema': {'type': 'str'}, 'values_schema': {'type': 'int'}}", ); let code = format!( "{{{}}}", (0..100_u8) .map(|i| format!(r#""{}{}": {i}"#, as_char(i / 26), as_char(i))) .collect::>() .join(", ") ); let input = py.eval_bound(&code, None, None).unwrap().to_object(py); let input = black_box(input.bind(py)); bench.iter(|| { let v = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); black_box(v) }) }) } #[bench] fn dict_value_error(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, r"{ 'type': 'dict', 'keys_schema': {'type': 'str'}, 'values_schema': {'type': 'int', 'lt': 0}, }", ); let code = format!( "{{{}}}", (0..100_u8) .map(|i| format!(r#""{}": {i}"#, as_str(i))) .collect::>() .join(", ") ); let input = py.eval_bound(&code, None, None).unwrap().to_object(py).into_bound(py); match validator.validate_python(py, &input, None, None, None, None, false.into()) { Ok(_) => panic!("unexpectedly valid"), Err(e) => { let v = e.value_bound(py); // println!("error: {}", v.to_string()); assert_eq!(v.getattr("title").unwrap().to_string(), "dict[str,constrained-int]"); let error_count: i64 = v.call_method0("error_count").unwrap().extract().unwrap(); assert_eq!(error_count, 100); } }; let input = black_box(input); bench.iter(|| { let result = validator.validate_python(py, &input, None, None, None, None, false.into()); match result { Ok(_) => panic!("unexpectedly valid"), Err(e) => black_box(e), } }) }) } #[bench] fn typed_dict_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, r"{ 'type': 'typed-dict', 'extra_behavior': 'ignore', 'fields': { 'a': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'b': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'c': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'd': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'e': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'f': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'g': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'h': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'i': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'j': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, }, }", ); let code = r#"{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 0}"#.to_string(); bench.iter(|| { black_box( validator .validate_json(py, &json(py, &code), None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn typed_dict_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, r"{ 'type': 'typed-dict', 'extra_behavior': 'ignore', 'fields': { 'a': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'b': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'c': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'd': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'e': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'f': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'g': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'h': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'i': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, 'j': {'type': 'typed-dict-field', 'schema': {'type': 'int'}}, }, }", ); let code = r#"{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 0}"#.to_string(); let input = py.eval_bound(&code, None, None).unwrap().to_object(py); let input = black_box(input.bind(py)); bench.iter(|| { let v = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); black_box(v) }) }) } #[bench] fn typed_dict_deep_error(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, r"{ 'type': 'typed-dict', 'fields': { 'field_a': {'type': 'typed-dict-field', 'schema': {'type': 'str'}}, 'field_b': { 'type': 'typed-dict-field', 'schema': { 'type': 'typed-dict', 'fields': { 'field_c': {'type': 'typed-dict-field','schema': {'type': 'str'}}, 'field_d': { 'type': 'typed-dict-field', 'schema': { 'type': 'typed-dict', 'fields': {'field_e': {'type': 'typed-dict-field','schema': {'type': 'str'}}, 'field_f': {'type': 'typed-dict-field','schema': {'type': 'int'}}}, } }, }, } }, }, }", ); let code = "{'field_a': '1', 'field_b': {'field_c': '2', 'field_d': {'field_e': '4', 'field_f': 'xx'}}}"; let input = py.eval_bound(code, None, None).unwrap().to_object(py); let input = black_box(input.bind(py)); match validator.validate_python(py, &input, None, None, None, None, false.into()) { Ok(_) => panic!("unexpectedly valid"), Err(e) => { let v = e.value_bound(py); // println!("error: {}", v.to_string()); assert_eq!(v.getattr("title").unwrap().to_string(), "typed-dict"); let error_count: i64 = v.call_method0("error_count").unwrap().extract().unwrap(); assert_eq!(error_count, 1); } }; bench.iter(|| { let result = validator.validate_python(py, &input, None, None, None, None, false.into()); match result { Ok(_) => panic!("unexpectedly valid"), Err(e) => black_box(e), } }) }) } #[bench] fn complete_model(bench: &mut Bencher) { Python::with_gil(|py| { let sys_path = py.import_bound("sys").unwrap().getattr("path").unwrap(); sys_path.call_method1("append", ("./tests/benchmarks/",)).unwrap(); let complete_schema = py.import_bound("complete_schema").unwrap(); let mut schema = complete_schema.call_method0("schema").unwrap(); schema = validate_core_schema(&schema, None).unwrap().extract().unwrap(); let validator = SchemaValidator::py_new(py, &schema, None).unwrap(); let input = complete_schema.call_method0("input_data_lax").unwrap(); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ); }) }) } #[bench] fn nested_model_using_definitions(bench: &mut Bencher) { Python::with_gil(|py| { let sys_path = py.import_bound("sys").unwrap().getattr("path").unwrap(); sys_path.call_method1("append", ("./tests/benchmarks/",)).unwrap(); let complete_schema = py.import_bound("nested_schema").unwrap(); let mut schema = complete_schema.call_method0("schema_using_defs").unwrap(); schema = validate_core_schema(&schema, None).unwrap().extract().unwrap(); let validator = SchemaValidator::py_new(py, &schema, None).unwrap(); let input = complete_schema.call_method0("input_data_valid").unwrap(); let input = black_box(input); validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ); }) }) } #[bench] fn nested_model_inlined(bench: &mut Bencher) { Python::with_gil(|py| { let sys_path = py.import_bound("sys").unwrap().getattr("path").unwrap(); sys_path.call_method1("append", ("./tests/benchmarks/",)).unwrap(); let complete_schema = py.import_bound("nested_schema").unwrap(); let mut schema = complete_schema.call_method0("inlined_schema").unwrap(); schema = validate_core_schema(&schema, None).unwrap().extract().unwrap(); let validator = SchemaValidator::py_new(py, &schema, None).unwrap(); let input = complete_schema.call_method0("input_data_valid").unwrap(); let input = black_box(input); validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ); }) }) } #[bench] fn literal_ints_few_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'literal', 'expected': list(range(5))}"); let input = 4_i64.into_py(py); let input = input.bind(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, 4); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_strings_few_small_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'literal', 'expected': [f'{idx}' for idx in range(5)]}"); let input = py.eval_bound("'4'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_strings_few_large_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, "{'type': 'literal', 'expected': ['a' * 25 + f'{idx}' for idx in range(5)]}", ); let input = py.eval_bound("'a' * 25 + '4'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_enums_few_python(bench: &mut Bencher) { Python::with_gil(|py| { let globals = PyDict::new_bound(py); py.run_bound( r" from enum import Enum class Foo(Enum): v1 = object() v2 = object() v3 = object() v4 = object() ", Some(&globals), None, ) .unwrap(); let validator = build_schema_validator_with_globals( py, "{'type': 'literal', 'expected': [Foo.v1, Foo.v2, Foo.v3, Foo.v4]}", Some(&globals), ); let input = py.eval_bound("Foo.v4", Some(&globals), None).unwrap(); let input = input.to_object(py).into_bound(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); assert!(input.eq(result).unwrap()); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_ints_many_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'literal', 'expected': list(range(100))}"); let input = 99_i64.into_py(py).into_bound(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, 99); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_strings_many_small_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'literal', 'expected': [f'{idx}' for idx in range(100)]}"); let input = py.eval_bound("'99'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_strings_many_large_python(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, "{'type': 'literal', 'expected': ['a' * 25 + f'{idx}' for idx in range(100)]}", ); let input = py.eval_bound("'a' * 25 + '99'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_ints_many_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator(py, "{'type': 'literal', 'expected': list(range(100))}"); let input_json = py.eval_bound("'99'", None, None).unwrap(); let input_json = input_json.to_object(py).into_bound(py); let result = validator .validate_json(py, &input_json, None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, 99); let input_json = black_box(input_json); bench.iter(|| { black_box( validator .validate_json(py, &input_json, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_strings_many_large_json(bench: &mut Bencher) { Python::with_gil(|py| { let validator = build_schema_validator( py, "{'type': 'literal', 'expected': ['a' * 25 + f'{idx}' for idx in range(100)]}", ); let input = py.eval_bound("'a' * 25 + '99'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_json = py.eval_bound("'\"' + 'a' * 25 + '99' + '\"'", None, None).unwrap(); let input_json = input_json.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_json(py, &input_json, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input_json = black_box(input_json); bench.iter(|| { black_box( validator .validate_json(py, &input_json, None, None, None, false.into()) .unwrap(), ) }) }) } #[bench] fn literal_mixed_few_python(bench: &mut Bencher) { Python::with_gil(|py| { let globals = PyDict::new_bound(py); py.run_bound( r" from enum import Enum class Foo(Enum): v1 = object() v2 = object() v3 = object() v4 = object() ", Some(&globals), None, ) .unwrap(); let validator = build_schema_validator_with_globals( py, "{'type': 'literal', 'expected': [None, 'null', -1, Foo.v4]}", Some(&globals), ); // String { let input = py.eval_bound("'null'", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_str: String = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_str: String = result.extract(py).unwrap(); assert_eq!(result_str, input_str); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) } // Int { let input = py.eval_bound("-1", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let input_int: i64 = input.extract().unwrap(); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); let result_int: i64 = result.extract(py).unwrap(); assert_eq!(result_int, input_int); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) } // None { let input = py.eval_bound("None", None, None).unwrap(); let input = input.to_object(py).into_bound(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); assert!(input.eq(result).unwrap()); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) } // Enum { let input = py.eval_bound("Foo.v4", Some(&globals), None).unwrap(); let input = input.to_object(py).into_bound(py); let result = validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(); assert!(input.eq(result).unwrap()); let input = black_box(input); bench.iter(|| { black_box( validator .validate_python(py, &input, None, None, None, None, false.into()) .unwrap(), ) }) } }) } pydantic-pydantic-core-d771df5/build.rs000066400000000000000000000032251473051353300201620ustar00rootroot00000000000000use std::env; use std::path::Path; use std::process::Command; use std::str::from_utf8; fn generate_self_schema() { println!("cargo:rerun-if-changed=python/pydantic_core/core_schema.py"); println!("cargo:rerun-if-changed=generate_self_schema.py"); if Path::new("./src/self_schema.py").exists() && option_env!("CI") == Some("true") { // self_schema.py already exists and CI indicates we're running on a github actions build, // don't bother generating again return; } let output = Command::new( env::var("PYTHON") .ok() .or_else(|| pyo3_build_config::get().executable.clone()) .unwrap_or_else(|| "python3".to_owned()), ) .arg("generate_self_schema.py") .output() .expect("failed to execute process"); if !output.status.success() { let stdout = from_utf8(&output.stdout).unwrap(); let stderr = from_utf8(&output.stderr).unwrap(); eprint!("{stdout}{stderr}"); panic!("generate_self_schema.py failed with {}", output.status); } } fn main() { pyo3_build_config::use_pyo3_cfgs(); if let Some(true) = version_check::supports_feature("coverage_attribute") { println!("cargo:rustc-cfg=has_coverage_attribute"); } println!("cargo:rustc-check-cfg=cfg(has_coverage_attribute)"); if std::env::var("RUSTFLAGS") .unwrap_or_default() .contains("-Cprofile-use=") { println!("cargo:rustc-cfg=specified_profile_use"); } println!("cargo:rustc-check-cfg=cfg(specified_profile_use)"); generate_self_schema(); println!("cargo:rustc-env=PROFILE={}", std::env::var("PROFILE").unwrap()); } pydantic-pydantic-core-d771df5/generate_self_schema.py000066400000000000000000000232611473051353300232140ustar00rootroot00000000000000""" This script generates the schema for the schema - e.g. a definition of what inputs can be provided to `SchemaValidator()`. The schema is generated from `python/pydantic_core/core_schema.py`. """ from __future__ import annotations as _annotations import decimal import importlib.util import re import sys from collections.abc import Callable from datetime import date, datetime, time, timedelta from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, ForwardRef, List, Pattern, Set, Type, Union from typing_extensions import TypedDict, get_args, get_origin, is_typeddict TypingUnionType = Type[Union[str, int]] try: from types import UnionType as TypesUnionType UnionType = Union[TypingUnionType, TypesUnionType] except ImportError: TypesUnionType = TypingUnionType UnionType = TypingUnionType THIS_DIR = Path(__file__).parent SAVE_PATH = THIS_DIR / 'src' / 'self_schema.py' if TYPE_CHECKING: from pydantic_core import core_schema else: # can't import core_schema.py directly as pydantic-core might not be installed core_schema_spec = importlib.util.spec_from_file_location( '_typing', str(THIS_DIR / 'python' / 'pydantic_core' / 'core_schema.py') ) core_schema = importlib.util.module_from_spec(core_schema_spec) core_schema_spec.loader.exec_module(core_schema) # the validator for referencing schema (Schema is used recursively, so has to use a reference) schema_ref_validator = {'type': 'definition-ref', 'schema_ref': 'root-schema'} def get_schema(obj: Any, definitions: dict[str, core_schema.CoreSchema]) -> core_schema.CoreSchema: # noqa: C901 if isinstance(obj, str): return {'type': obj} elif obj in (datetime, timedelta, date, time, bool, int, float, str, decimal.Decimal, complex): return {'type': obj.__name__.lower()} elif is_typeddict(obj): return type_dict_schema(obj, definitions) elif obj == Any or obj == type: return {'type': 'any'} if isinstance(obj, type) and issubclass(obj, core_schema.Protocol): return {'type': 'callable'} origin = get_origin(obj) assert origin is not None, f'origin cannot be None, obj={obj}, you probably need to fix generate_self_schema.py' if origin is Union or origin is TypesUnionType: return union_schema(obj, definitions) elif obj is Callable or origin is Callable: return {'type': 'callable'} elif origin is core_schema.Literal: expected = all_literal_values(obj) assert expected, f'literal "expected" cannot be empty, obj={obj}' return {'type': 'literal', 'expected': expected} elif issubclass(origin, List): return {'type': 'list', 'items_schema': get_schema(obj.__args__[0], definitions)} elif issubclass(origin, Set): return {'type': 'set', 'items_schema': get_schema(obj.__args__[0], definitions)} elif issubclass(origin, Dict): return { 'type': 'dict', 'keys_schema': get_schema(obj.__args__[0], definitions), 'values_schema': get_schema(obj.__args__[1], definitions), } elif issubclass(origin, Type): # can't really use 'is-instance' since this is used for the class_ parameter of 'is-instance' validators return {'type': 'any'} elif origin in (Pattern, re.Pattern): # can't really use 'is-instance' easily with Pattern, so we use `any` as a placeholder for now return {'type': 'any'} else: # debug(obj) raise TypeError(f'Unknown type: {obj!r}') def tagged_union(std_union_schema: Dict[str, Any], discriminator_key: str, ref: str | None = None) -> Dict[str, Any]: """ Build a tagged union schema from a standard union schema. """ tagged_choices = {} for choice in std_union_schema['choices']: literal = choice['fields'][discriminator_key]['schema']['expected'] assert isinstance(literal, list), 'literal expected must be a list' assert all(isinstance(arg, str) for arg in literal), 'literal expected must be a list of strings' first, *rest = literal tagged_choices[first] = choice for arg in rest: tagged_choices[arg] = choice s = {'type': 'tagged-union', 'discriminator': discriminator_key, 'choices': tagged_choices} if ref is not None: s['ref'] = ref return s defined_ser_schema = False def type_dict_schema( # noqa: C901 typed_dict: type[TypedDict], definitions: dict[str, core_schema.CoreSchema] ) -> dict[str, Any]: global defined_ser_schema required_keys = getattr(typed_dict, '__required_keys__', set()) fields = {} for field_name, field_type in typed_dict.__annotations__.items(): required = field_name in required_keys schema = None fr_arg = None if type(field_type) == ForwardRef: fr_arg = field_type.__forward_arg__ fr_arg, matched = re.subn(r'Required\[(.+)]', r'\1', fr_arg) if matched: required = True if 'CoreSchema' == fr_arg or re.search('[^a-zA-Z]CoreSchema', fr_arg): if fr_arg == 'CoreSchema': schema = schema_ref_validator elif fr_arg == 'List[CoreSchema]': schema = {'type': 'list', 'items_schema': schema_ref_validator} elif fr_arg == 'Dict[str, CoreSchema]': schema = {'type': 'dict', 'keys_schema': {'type': 'str'}, 'values_schema': schema_ref_validator} elif fr_arg == 'Dict[Hashable, CoreSchema]': schema = {'type': 'dict', 'keys_schema': {'type': 'any'}, 'values_schema': schema_ref_validator} elif fr_arg == 'List[Union[CoreSchema, Tuple[CoreSchema, str]]]': schema = { 'type': 'list', 'items_schema': { 'type': 'union', 'choices': [ schema_ref_validator, {'type': 'tuple', 'items_schema': [schema_ref_validator, {'type': 'str'}]}, ], }, } else: raise ValueError(f'Unknown Schema forward ref: {fr_arg}') else: field_type = eval_forward_ref(field_type) if schema is None: if get_origin(field_type) == core_schema.Required: required = True field_type = field_type.__args__[0] schema = get_schema(field_type, definitions) if fr_arg == 'SerSchema': if defined_ser_schema: schema = {'type': 'definition-ref', 'schema_ref': 'ser-schema'} else: defined_ser_schema = True definitions['ser-schema'] = tagged_union(schema, 'type', 'ser-schema') schema = {'type': 'definition-ref', 'schema_ref': 'ser-schema'} elif fr_arg.endswith('SerSchema'): schema = tagged_union(schema, 'type') # now_utc_offset is an int that must be in the range -24 hours to +24 hours, we manually add a constraint here if field_name == 'now_utc_offset': schema.update(gt=-86_400, lt=86_400) fields[field_name] = {'schema': schema, 'required': required} return {'type': 'typed-dict', 'fields': fields, 'extra_behavior': 'forbid'} def union_schema(union_type: UnionType, definitions) -> core_schema.UnionSchema | core_schema.DefinitionReferenceSchema: return {'type': 'union', 'choices': [get_schema(arg, definitions) for arg in union_type.__args__]} def all_literal_values(type_: type[core_schema.Literal]) -> list[any]: if get_origin(type_) is core_schema.Literal: values = get_args(type_) return [x for value in values for x in all_literal_values(value)] else: return [type_] def eval_forward_ref(type_: Any) -> Any: if sys.version_info < (3, 9): return type_._evaluate(core_schema.__dict__, None) elif sys.version_info < (3, 12, 4): return type_._evaluate(core_schema.__dict__, None, recursive_guard=set()) else: return type_._evaluate(core_schema.__dict__, None, type_params=set(), recursive_guard=set()) def main() -> None: schema_union = core_schema.CoreSchema assert get_origin(schema_union) is Union, 'expected core_schema.CoreSchema to be a Union' definitions: dict[str, core_schema.CoreSchema] = {} choices = {} for s in schema_union.__args__: type_ = s.__annotations__['type'] m = re.search(r"Literal\['(.+?)']", type_.__forward_arg__) assert m, f'Unknown schema type: {type_}' key = m.group(1) value = get_schema(s, definitions) choices[key] = value schema = core_schema.definitions_schema( schema=core_schema.definition_reference_schema(schema_ref='root-schema'), definitions=[ core_schema.tagged_union_schema(choices, discriminator='type', ref='root-schema'), *definitions.values(), ], ) python_code = ( f'# this file is auto-generated by generate_self_schema.py, DO NOT edit manually\nself_schema = {schema}\n' ) try: from black import Mode, TargetVersion, format_file_contents except ImportError: pass else: mode = Mode( line_length=120, string_normalization=False, magic_trailing_comma=False, target_versions={TargetVersion.PY37, TargetVersion.PY38, TargetVersion.PY39, TargetVersion.PY310}, ) python_code = format_file_contents(python_code, fast=False, mode=mode) SAVE_PATH.write_text(python_code) print(f'Self schema definition written to {SAVE_PATH}') if __name__ == '__main__': main() pydantic-pydantic-core-d771df5/package.json000066400000000000000000000013471473051353300210060ustar00rootroot00000000000000{ "name": "pydantic-core", "version": "1.0.0", "description": "for running wasm tests.", "author": "Samuel Colvin", "license": "MIT", "homepage": "https://github.com/pydantic/pydantic-core#readme", "main": "tests/emscripten_runner.js", "dependencies": { "prettier": "^2.7.1", "pyodide": "^0.26.3" }, "scripts": { "test": "node tests/emscripten_runner.js", "format": "prettier --write 'tests/emscripten_runner.js' 'wasm-preview/*.{html,js}'", "lint": "prettier --check 'tests/emscripten_runner.js' 'wasm-preview/*.{html,js}'" }, "prettier": { "singleQuote": true, "trailingComma": "all", "tabWidth": 2, "printWidth": 119, "bracketSpacing": false, "arrowParens": "avoid" } } pydantic-pydantic-core-d771df5/pyproject.toml000066400000000000000000000113711473051353300214320ustar00rootroot00000000000000[build-system] requires = [ 'maturin>=1,<2', 'typing-extensions >=4.6.0,!=4.7.0' ] build-backend = 'maturin' [project] name = 'pydantic_core' description = "Core functionality for Pydantic validation and serialization" requires-python = '>=3.8' authors = [ {name = 'Samuel Colvin', email = 's@muelcolvin.com'} ] classifiers = [ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Programming Language :: Rust', 'Framework :: Pydantic', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS', 'Typing :: Typed', ] dependencies = [ 'typing-extensions >=4.6.0,!=4.7.0' ] dynamic = [ 'description', 'license', 'readme', 'version' ] [project.urls] Homepage = 'https://github.com/pydantic/pydantic-core' Funding = 'https://github.com/sponsors/samuelcolvin' Source = 'https://github.com/pydantic/pydantic-core' [dependency-groups] testing = [ 'backports.zoneinfo; python_version < "3.9"', 'coverage', 'dirty-equals', 'inline-snapshot', 'hypothesis', # pandas doesn't offer prebuilt wheels for all versions and platforms we test in CI e.g. aarch64 musllinux 'pandas; python_version >= "3.9" and python_version < "3.13" and implementation_name == "cpython" and platform_machine == "x86_64"', 'pytest', # pytest-examples currently depends on aiohttp via black; we don't want to build it on platforms like aarch64 musllinux in CI 'pytest-examples; implementation_name == "cpython" and platform_machine == "x86_64"', 'pytest-speed', 'pytest-mock', 'pytest-pretty', 'pytest-timeout', 'python-dateutil', # numpy doesn't offer prebuilt wheels for all versions and platforms we test in CI e.g. aarch64 musllinux 'numpy; python_version >= "3.9" and python_version < "3.13" and implementation_name == "cpython" and platform_machine == "x86_64"', 'exceptiongroup; python_version < "3.11"', 'tzdata', 'typing_extensions', ] linting = [ 'griffe', 'pyright', 'ruff', 'mypy', ] wasm = [ 'typing_extensions', 'maturin>=1,<2', 'ruff', ] codspeed = [ # codspeed is only run on CI, with latest version of CPython 'pytest-codspeed; python_version == "3.13" and implementation_name == "cpython"', ] all = [ { include-group = 'testing' }, { include-group = 'linting' }, { include-group = 'wasm' }, ] [tool.maturin] python-source = "python" module-name = "pydantic_core._pydantic_core" bindings = 'pyo3' features = ["pyo3/extension-module"] [tool.ruff] line-length = 120 [tool.ruff.lint] extend-select = ['Q', 'RUF100', 'C90', 'I'] extend-ignore = [ 'E721', # using type() instead of isinstance() - we use this in tests ] flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'} mccabe = { max-complexity = 13 } isort = { known-first-party = ['pydantic_core', 'tests'] } [tool.ruff.format] quote-style = 'single' [tool.pytest.ini_options] testpaths = 'tests' log_format = '%(name)s %(levelname)s: %(message)s' filterwarnings = [ 'error', # Python 3.9 and below allowed truncation of float to integers in some # cases, by not making this an error we can test for this behaviour 'ignore:(.+)Implicit conversion to integers using __int__ is deprecated', ] timeout = 30 xfail_strict = true # min, max, mean, stddev, median, iqr, outliers, ops, rounds, iterations addopts = [ '--benchmark-columns', 'min,mean,stddev,outliers,rounds,iterations', '--benchmark-group-by', 'group', '--benchmark-warmup', 'on', '--benchmark-disable', # this is enable by `make benchmark` when you actually want to run benchmarks ] [tool.coverage.run] source = ['pydantic_core'] branch = true [tool.coverage.report] precision = 2 exclude_lines = [ 'pragma: no cover', 'raise NotImplementedError', 'if TYPE_CHECKING:', '@overload', ] # configuring https://github.com/pydantic/hooky [tool.hooky] reviewers = ['sydney-runkle', 'davidhewitt'] require_change_file = false [tool.pyright] include = ['python/pydantic_core', 'tests/test_typing.py'] reportUnnecessaryTypeIgnoreComment = true [tool.inline-snapshot.shortcuts] fix = ["create", "fix"] [tool.uv] # this ensures that `uv run` doesn't actually build the package; a `make` # command is needed to build package = false pydantic-pydantic-core-d771df5/python/000077500000000000000000000000001473051353300200345ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/python/pydantic_core/000077500000000000000000000000001473051353300226575ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/python/pydantic_core/__init__.py000066400000000000000000000101461473051353300247720ustar00rootroot00000000000000from __future__ import annotations import sys as _sys from typing import Any as _Any from ._pydantic_core import ( ArgsKwargs, MultiHostUrl, PydanticCustomError, PydanticKnownError, PydanticOmit, PydanticSerializationError, PydanticSerializationUnexpectedValue, PydanticUndefined, PydanticUndefinedType, PydanticUseDefault, SchemaError, SchemaSerializer, SchemaValidator, Some, TzInfo, Url, ValidationError, __version__, from_json, to_json, to_jsonable_python, validate_core_schema, ) from .core_schema import CoreConfig, CoreSchema, CoreSchemaType, ErrorType if _sys.version_info < (3, 11): from typing_extensions import NotRequired as _NotRequired else: from typing import NotRequired as _NotRequired if _sys.version_info < (3, 12): from typing_extensions import TypedDict as _TypedDict else: from typing import TypedDict as _TypedDict __all__ = [ '__version__', 'CoreConfig', 'CoreSchema', 'CoreSchemaType', 'SchemaValidator', 'SchemaSerializer', 'Some', 'Url', 'MultiHostUrl', 'ArgsKwargs', 'PydanticUndefined', 'PydanticUndefinedType', 'SchemaError', 'ErrorDetails', 'InitErrorDetails', 'ValidationError', 'PydanticCustomError', 'PydanticKnownError', 'PydanticOmit', 'PydanticUseDefault', 'PydanticSerializationError', 'PydanticSerializationUnexpectedValue', 'TzInfo', 'to_json', 'from_json', 'to_jsonable_python', 'validate_core_schema', ] class ErrorDetails(_TypedDict): type: str """ The type of error that occurred, this is an identifier designed for programmatic use that will change rarely or never. `type` is unique for each error message, and can hence be used as an identifier to build custom error messages. """ loc: tuple[int | str, ...] """Tuple of strings and ints identifying where in the schema the error occurred.""" msg: str """A human readable error message.""" input: _Any """The input data at this `loc` that caused the error.""" ctx: _NotRequired[dict[str, _Any]] """ Values which are required to render the error message, and could hence be useful in rendering custom error messages. Also useful for passing custom error data forward. """ class InitErrorDetails(_TypedDict): type: str | PydanticCustomError """The type of error that occurred, this should a "slug" identifier that changes rarely or never.""" loc: _NotRequired[tuple[int | str, ...]] """Tuple of strings and ints identifying where in the schema the error occurred.""" input: _Any """The input data at this `loc` that caused the error.""" ctx: _NotRequired[dict[str, _Any]] """ Values which are required to render the error message, and could hence be useful in rendering custom error messages. Also useful for passing custom error data forward. """ class ErrorTypeInfo(_TypedDict): """ Gives information about errors. """ type: ErrorType """The type of error that occurred, this should a "slug" identifier that changes rarely or never.""" message_template_python: str """String template to render a human readable error message from using context, when the input is Python.""" example_message_python: str """Example of a human readable error message, when the input is Python.""" message_template_json: _NotRequired[str] """String template to render a human readable error message from using context, when the input is JSON data.""" example_message_json: _NotRequired[str] """Example of a human readable error message, when the input is JSON data.""" example_context: dict[str, _Any] | None """Example of context values.""" class MultiHostHost(_TypedDict): """ A host part of a multi-host URL. """ username: str | None """The username part of this host, or `None`.""" password: str | None """The password part of this host, or `None`.""" host: str | None """The host part of this host, or `None`.""" port: int | None """The port part of this host, or `None`.""" pydantic-pydantic-core-d771df5/python/pydantic_core/_pydantic_core.pyi000066400000000000000000001207321473051353300263710ustar00rootroot00000000000000import datetime from collections.abc import Mapping from typing import Any, Callable, Generic, Literal, TypeVar, final from _typeshed import SupportsAllComparisons from typing_extensions import LiteralString, Self, TypeAlias from pydantic_core import ErrorDetails, ErrorTypeInfo, InitErrorDetails, MultiHostHost from pydantic_core.core_schema import CoreConfig, CoreSchema, ErrorType __all__ = [ '__version__', 'build_profile', 'build_info', '_recursion_limit', 'ArgsKwargs', 'SchemaValidator', 'SchemaSerializer', 'Url', 'MultiHostUrl', 'SchemaError', 'ValidationError', 'PydanticCustomError', 'PydanticKnownError', 'PydanticOmit', 'PydanticUseDefault', 'PydanticSerializationError', 'PydanticSerializationUnexpectedValue', 'PydanticUndefined', 'PydanticUndefinedType', 'Some', 'to_json', 'from_json', 'to_jsonable_python', 'list_all_errors', 'TzInfo', 'validate_core_schema', ] __version__: str build_profile: str build_info: str _recursion_limit: int _T = TypeVar('_T', default=Any, covariant=True) _StringInput: TypeAlias = 'dict[str, _StringInput]' @final class Some(Generic[_T]): """ Similar to Rust's [`Option::Some`](https://doc.rust-lang.org/std/option/enum.Option.html) type, this identifies a value as being present, and provides a way to access it. Generally used in a union with `None` to different between "some value which could be None" and no value. """ __match_args__ = ('value',) @property def value(self) -> _T: """ Returns the value wrapped by `Some`. """ @classmethod def __class_getitem__(cls, item: Any, /) -> type[Self]: ... @final class SchemaValidator: """ `SchemaValidator` is the Python wrapper for `pydantic-core`'s Rust validation logic, internally it owns one `CombinedValidator` which may in turn own more `CombinedValidator`s which make up the full schema validator. """ # note: pyo3 currently supports __new__, but not __init__, though we include __init__ stubs # and docstrings here (and in the following classes) for documentation purposes def __init__(self, schema: CoreSchema, config: CoreConfig | None = None) -> None: """Initializes the `SchemaValidator`. Arguments: schema: The `CoreSchema` to use for validation. config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to configure validation. """ def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: ... @property def title(self) -> str: """ The title of the schema, as used in the heading of [`ValidationError.__str__()`][pydantic_core.ValidationError]. """ def validate_python( self, input: Any, *, strict: bool | None = None, from_attributes: bool | None = None, context: Any | None = None, self_instance: Any | None = None, allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False, ) -> Any: """ Validate a Python object against the schema and return the validated object. Arguments: input: The Python object to validate. strict: Whether to validate the object in strict mode. If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. from_attributes: Whether to validate objects as inputs to models by extracting attributes. If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used. context: The context to use for validation, this is passed to functional validators as [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. self_instance: An instance of a model set attributes on from validation, this is used when running validation from the `__init__` method of a model. allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences and mappings are ignored. `'trailing-strings'` means any final unfinished JSON string is included in the result. Raises: ValidationError: If validation fails. Exception: Other error types maybe raised if internal errors occur. Returns: The validated object. """ def isinstance_python( self, input: Any, *, strict: bool | None = None, from_attributes: bool | None = None, context: Any | None = None, self_instance: Any | None = None, ) -> bool: """ Similar to [`validate_python()`][pydantic_core.SchemaValidator.validate_python] but returns a boolean. Arguments match `validate_python()`. This method will not raise `ValidationError`s but will raise internal errors. Returns: `True` if validation succeeds, `False` if validation fails. """ def validate_json( self, input: str | bytes | bytearray, *, strict: bool | None = None, context: Any | None = None, self_instance: Any | None = None, allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False, ) -> Any: """ Validate JSON data directly against the schema and return the validated Python object. This method should be significantly faster than `validate_python(json.loads(json_data))` as it avoids the need to create intermediate Python objects It also handles constructing the correct Python type even in strict mode, where `validate_python(json.loads(json_data))` would fail validation. Arguments: input: The JSON data to validate. strict: Whether to validate the object in strict mode. If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. context: The context to use for validation, this is passed to functional validators as [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. self_instance: An instance of a model set attributes on from validation. allow_partial: Whether to allow partial validation; if `True` incomplete JSON will be parsed successfully and errors in the last element of sequences and mappings are ignored. `'trailing-strings'` means any final unfinished JSON string is included in the result. Raises: ValidationError: If validation fails or if the JSON data is invalid. Exception: Other error types maybe raised if internal errors occur. Returns: The validated Python object. """ def validate_strings( self, input: _StringInput, *, strict: bool | None = None, context: Any | None = None, allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False, ) -> Any: """ Validate a string against the schema and return the validated Python object. This is similar to `validate_json` but applies to scenarios where the input will be a string but not JSON data, e.g. URL fragments, query parameters, etc. Arguments: input: The input as a string, or bytes/bytearray if `strict=False`. strict: Whether to validate the object in strict mode. If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. context: The context to use for validation, this is passed to functional validators as [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. allow_partial: Whether to allow partial validation; if `True` errors in the last element of sequences and mappings are ignored. `'trailing-strings'` means any final unfinished JSON string is included in the result. Raises: ValidationError: If validation fails or if the JSON data is invalid. Exception: Other error types maybe raised if internal errors occur. Returns: The validated Python object. """ def validate_assignment( self, obj: Any, field_name: str, field_value: Any, *, strict: bool | None = None, from_attributes: bool | None = None, context: Any | None = None, ) -> dict[str, Any] | tuple[dict[str, Any], dict[str, Any] | None, set[str]]: """ Validate an assignment to a field on a model. Arguments: obj: The model instance being assigned to. field_name: The name of the field to validate assignment for. field_value: The value to assign to the field. strict: Whether to validate the object in strict mode. If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. from_attributes: Whether to validate objects as inputs to models by extracting attributes. If `None`, the value of [`CoreConfig.from_attributes`][pydantic_core.core_schema.CoreConfig] is used. context: The context to use for validation, this is passed to functional validators as [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. Raises: ValidationError: If validation fails. Exception: Other error types maybe raised if internal errors occur. Returns: Either the model dict or a tuple of `(model_data, model_extra, fields_set)` """ def get_default_value(self, *, strict: bool | None = None, context: Any = None) -> Some | None: """ Get the default value for the schema, including running default value validation. Arguments: strict: Whether to validate the default value in strict mode. If `None`, the value of [`CoreConfig.strict`][pydantic_core.core_schema.CoreConfig] is used. context: The context to use for validation, this is passed to functional validators as [`info.context`][pydantic_core.core_schema.ValidationInfo.context]. Raises: ValidationError: If validation fails. Exception: Other error types maybe raised if internal errors occur. Returns: `None` if the schema has no default value, otherwise a [`Some`][pydantic_core.Some] containing the default. """ # In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional type inference # (e.g. when using `{'a': {'b': True}}`). _IncEx: TypeAlias = set[int] | set[str] | Mapping[int, _IncEx | bool] | Mapping[str, _IncEx | bool] @final class SchemaSerializer: """ `SchemaSerializer` is the Python wrapper for `pydantic-core`'s Rust serialization logic, internally it owns one `CombinedSerializer` which may in turn own more `CombinedSerializer`s which make up the full schema serializer. """ def __init__(self, schema: CoreSchema, config: CoreConfig | None = None) -> None: """Initializes the `SchemaSerializer`. Arguments: schema: The `CoreSchema` to use for serialization. config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to to configure serialization. """ def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: ... def to_python( self, value: Any, *, mode: str | None = None, include: _IncEx | None = None, exclude: _IncEx | None = None, by_alias: bool = True, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, warnings: bool | Literal['none', 'warn', 'error'] = True, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, context: Any | None = None, ) -> Any: """ Serialize/marshal a Python object to a Python object including transforming and filtering data. Arguments: value: The Python object to serialize. mode: The serialization mode to use, either `'python'` or `'json'`, defaults to `'python'`. In JSON mode, all values are converted to JSON compatible types, e.g. `None`, `int`, `float`, `str`, `list`, `dict`. include: A set of fields to include, if `None` all fields are included. exclude: A set of fields to exclude, if `None` no fields are excluded. by_alias: Whether to use the alias names of fields. exclude_unset: Whether to exclude fields that are not set, e.g. are not included in `__pydantic_fields_set__`. exclude_defaults: Whether to exclude fields that are equal to their default value. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors, "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. fallback: A function to call when an unknown value is encountered, if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. context: The context to use for serialization, this is passed to functional serializers as [`info.context`][pydantic_core.core_schema.SerializationInfo.context]. Raises: PydanticSerializationError: If serialization fails and no `fallback` function is provided. Returns: The serialized Python object. """ def to_json( self, value: Any, *, indent: int | None = None, include: _IncEx | None = None, exclude: _IncEx | None = None, by_alias: bool = True, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, warnings: bool | Literal['none', 'warn', 'error'] = True, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, context: Any | None = None, ) -> bytes: """ Serialize a Python object to JSON including transforming and filtering data. Arguments: value: The Python object to serialize. indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided. include: A set of fields to include, if `None` all fields are included. exclude: A set of fields to exclude, if `None` no fields are excluded. by_alias: Whether to use the alias names of fields. exclude_unset: Whether to exclude fields that are not set, e.g. are not included in `__pydantic_fields_set__`. exclude_defaults: Whether to exclude fields that are equal to their default value. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. warnings: How to handle invalid fields. False/"none" ignores them, True/"warn" logs errors, "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. fallback: A function to call when an unknown value is encountered, if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. context: The context to use for serialization, this is passed to functional serializers as [`info.context`][pydantic_core.core_schema.SerializationInfo.context]. Raises: PydanticSerializationError: If serialization fails and no `fallback` function is provided. Returns: JSON bytes. """ def to_json( value: Any, *, indent: int | None = None, include: _IncEx | None = None, exclude: _IncEx | None = None, by_alias: bool = True, exclude_none: bool = False, round_trip: bool = False, timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8', inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants', serialize_unknown: bool = False, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, context: Any | None = None, ) -> bytes: """ Serialize a Python object to JSON including transforming and filtering data. This is effectively a standalone version of [`SchemaSerializer.to_json`][pydantic_core.SchemaSerializer.to_json]. Arguments: value: The Python object to serialize. indent: If `None`, the JSON will be compact, otherwise it will be pretty-printed with the indent provided. include: A set of fields to include, if `None` all fields are included. exclude: A set of fields to exclude, if `None` no fields are excluded. by_alias: Whether to use the alias names of fields. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`. inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`. serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails `""` will be used. fallback: A function to call when an unknown value is encountered, if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. context: The context to use for serialization, this is passed to functional serializers as [`info.context`][pydantic_core.core_schema.SerializationInfo.context]. Raises: PydanticSerializationError: If serialization fails and no `fallback` function is provided. Returns: JSON bytes. """ def from_json( data: str | bytes | bytearray, *, allow_inf_nan: bool = True, cache_strings: bool | Literal['all', 'keys', 'none'] = True, allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False, ) -> Any: """ Deserialize JSON data to a Python object. This is effectively a faster version of `json.loads()`, with some extra functionality. Arguments: data: The JSON data to deserialize. allow_inf_nan: Whether to allow `Infinity`, `-Infinity` and `NaN` values as `json.loads()` does by default. cache_strings: Whether to cache strings to avoid constructing new Python objects, this should have a significant impact on performance while increasing memory usage slightly, `all/True` means cache all strings, `keys` means cache only dict keys, `none/False` means no caching. allow_partial: Whether to allow partial deserialization, if `True` JSON data is returned if the end of the input is reached before the full object is deserialized, e.g. `["aa", "bb", "c` would return `['aa', 'bb']`. `'trailing-strings'` means any final unfinished JSON string is included in the result. Raises: ValueError: If deserialization fails. Returns: The deserialized Python object. """ def to_jsonable_python( value: Any, *, include: _IncEx | None = None, exclude: _IncEx | None = None, by_alias: bool = True, exclude_none: bool = False, round_trip: bool = False, timedelta_mode: Literal['iso8601', 'float'] = 'iso8601', bytes_mode: Literal['utf8', 'base64', 'hex'] = 'utf8', inf_nan_mode: Literal['null', 'constants', 'strings'] = 'constants', serialize_unknown: bool = False, fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, context: Any | None = None, ) -> Any: """ Serialize/marshal a Python object to a JSON-serializable Python object including transforming and filtering data. This is effectively a standalone version of [`SchemaSerializer.to_python(mode='json')`][pydantic_core.SchemaSerializer.to_python]. Args: value: The Python object to serialize. include: A set of fields to include, if `None` all fields are included. exclude: A set of fields to exclude, if `None` no fields are excluded. by_alias: Whether to use the alias names of fields. exclude_none: Whether to exclude fields that have a value of `None`. round_trip: Whether to enable serialization and validation round-trip support. timedelta_mode: How to serialize `timedelta` objects, either `'iso8601'` or `'float'`. bytes_mode: How to serialize `bytes` objects, either `'utf8'`, `'base64'`, or `'hex'`. inf_nan_mode: How to serialize `Infinity`, `-Infinity` and `NaN` values, either `'null'`, `'constants'`, or `'strings'`. serialize_unknown: Attempt to serialize unknown types, `str(value)` will be used, if that fails `""` will be used. fallback: A function to call when an unknown value is encountered, if `None` a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. context: The context to use for serialization, this is passed to functional serializers as [`info.context`][pydantic_core.core_schema.SerializationInfo.context]. Raises: PydanticSerializationError: If serialization fails and no `fallback` function is provided. Returns: The serialized Python object. """ class Url(SupportsAllComparisons): """ A URL type, internal logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed by Mozilla. """ def __init__(self, url: str) -> None: ... def __new__(cls, url: str) -> Self: ... @property def scheme(self) -> str: ... @property def username(self) -> str | None: ... @property def password(self) -> str | None: ... @property def host(self) -> str | None: ... def unicode_host(self) -> str | None: ... @property def port(self) -> int | None: ... @property def path(self) -> str | None: ... @property def query(self) -> str | None: ... def query_params(self) -> list[tuple[str, str]]: ... @property def fragment(self) -> str | None: ... def unicode_string(self) -> str: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __deepcopy__(self, memo: dict) -> str: ... @classmethod def build( cls, *, scheme: str, username: str | None = None, password: str | None = None, host: str, port: int | None = None, path: str | None = None, query: str | None = None, fragment: str | None = None, ) -> Self: ... class MultiHostUrl(SupportsAllComparisons): """ A URL type with support for multiple hosts, as used by some databases for DSNs, e.g. `https://foo.com,bar.com/path`. Internal URL logic uses the [url rust crate](https://docs.rs/url/latest/url/) originally developed by Mozilla. """ def __init__(self, url: str) -> None: ... def __new__(cls, url: str) -> Self: ... @property def scheme(self) -> str: ... @property def path(self) -> str | None: ... @property def query(self) -> str | None: ... def query_params(self) -> list[tuple[str, str]]: ... @property def fragment(self) -> str | None: ... def hosts(self) -> list[MultiHostHost]: ... def unicode_string(self) -> str: ... def __repr__(self) -> str: ... def __str__(self) -> str: ... def __deepcopy__(self, memo: dict) -> Self: ... @classmethod def build( cls, *, scheme: str, hosts: list[MultiHostHost] | None = None, username: str | None = None, password: str | None = None, host: str | None = None, port: int | None = None, path: str | None = None, query: str | None = None, fragment: str | None = None, ) -> Self: ... @final class SchemaError(Exception): """ Information about errors that occur while building a [`SchemaValidator`][pydantic_core.SchemaValidator] or [`SchemaSerializer`][pydantic_core.SchemaSerializer]. """ def error_count(self) -> int: """ Returns: The number of errors in the schema. """ def errors(self) -> list[ErrorDetails]: """ Returns: A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the schema. """ class ValidationError(ValueError): """ `ValidationError` is the exception raised by `pydantic-core` when validation fails, it contains a list of errors which detail why validation failed. """ @classmethod def from_exception_data( cls, title: str, line_errors: list[InitErrorDetails], input_type: Literal['python', 'json'] = 'python', hide_input: bool = False, ) -> Self: """ Python constructor for a Validation Error. The API for constructing validation errors will probably change in the future, hence the static method rather than `__init__`. Arguments: title: The title of the error, as used in the heading of `str(validation_error)` line_errors: A list of [`InitErrorDetails`][pydantic_core.InitErrorDetails] which contain information about errors that occurred during validation. input_type: Whether the error is for a Python object or JSON. hide_input: Whether to hide the input value in the error message. """ @property def title(self) -> str: """ The title of the error, as used in the heading of `str(validation_error)`. """ def error_count(self) -> int: """ Returns: The number of errors in the validation error. """ def errors( self, *, include_url: bool = True, include_context: bool = True, include_input: bool = True ) -> list[ErrorDetails]: """ Details about each error in the validation error. Args: include_url: Whether to include a URL to documentation on the error each error. include_context: Whether to include the context of each error. include_input: Whether to include the input value of each error. Returns: A list of [`ErrorDetails`][pydantic_core.ErrorDetails] for each error in the validation error. """ def json( self, *, indent: int | None = None, include_url: bool = True, include_context: bool = True, include_input: bool = True, ) -> str: """ Same as [`errors()`][pydantic_core.ValidationError.errors] but returns a JSON string. Args: indent: The number of spaces to indent the JSON by, or `None` for no indentation - compact JSON. include_url: Whether to include a URL to documentation on the error each error. include_context: Whether to include the context of each error. include_input: Whether to include the input value of each error. Returns: a JSON string. """ def __repr__(self) -> str: """ A string representation of the validation error. Whether or not documentation URLs are included in the repr is controlled by the environment variable `PYDANTIC_ERRORS_INCLUDE_URL` being set to `1` or `true`; by default, URLs are shown. Due to implementation details, this environment variable can only be set once, before the first validation error is created. """ class PydanticCustomError(ValueError): """A custom exception providing flexible error handling for Pydantic validators. You can raise this error in custom validators when you'd like flexibility in regards to the error type, message, and context. Example: ```py from pydantic_core import PydanticCustomError def custom_validator(v) -> None: if v <= 10: raise PydanticCustomError('custom_value_error', 'Value must be greater than {value}', {'value': 10, 'extra_context': 'extra_data'}) return v ``` """ def __init__( self, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None ) -> None: """Initializes the `PydanticCustomError`. Arguments: error_type: The error type. message_template: The message template. context: The data to inject into the message template. """ def __new__( cls, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None ) -> Self: ... @property def context(self) -> dict[str, Any] | None: """Values which are required to render the error message, and could hence be useful in passing error data forward.""" @property def type(self) -> str: """The error type associated with the error. For consistency with Pydantic, this is typically a snake_case string.""" @property def message_template(self) -> str: """The message template associated with the error. This is a string that can be formatted with context variables in `{curly_braces}`.""" def message(self) -> str: """The formatted message associated with the error. This presents as the message template with context variables appropriately injected.""" @final class PydanticKnownError(ValueError): """A helper class for raising exceptions that mimic Pydantic's built-in exceptions, with more flexibility in regards to context. Unlike [`PydanticCustomError`][pydantic_core.PydanticCustomError], the `error_type` argument must be a known `ErrorType`. Example: ```py from pydantic_core import PydanticKnownError def custom_validator(v) -> None: if v <= 10: raise PydanticKnownError(error_type='greater_than', context={'gt': 10}) return v ``` """ def __init__(self, error_type: ErrorType, context: dict[str, Any] | None = None) -> None: """Initializes the `PydanticKnownError`. Arguments: error_type: The error type. context: The data to inject into the message template. """ def __new__(cls, error_type: ErrorType, context: dict[str, Any] | None = None) -> Self: ... @property def context(self) -> dict[str, Any] | None: """Values which are required to render the error message, and could hence be useful in passing error data forward.""" @property def type(self) -> ErrorType: """The type of the error.""" @property def message_template(self) -> str: """The message template associated with the provided error type. This is a string that can be formatted with context variables in `{curly_braces}`.""" def message(self) -> str: """The formatted message associated with the error. This presents as the message template with context variables appropriately injected.""" @final class PydanticOmit(Exception): """An exception to signal that a field should be omitted from a generated result. This could span from omitting a field from a JSON Schema to omitting a field from a serialized result. Upcoming: more robust support for using PydanticOmit in custom serializers is still in development. Right now, this is primarily used in the JSON Schema generation process. Example: ```py from typing import Callable from pydantic_core import PydanticOmit from pydantic import BaseModel from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue class MyGenerateJsonSchema(GenerateJsonSchema): def handle_invalid_for_json_schema(self, schema, error_info) -> JsonSchemaValue: raise PydanticOmit class Predicate(BaseModel): name: str = 'no-op' func: Callable = lambda x: x instance_example = Predicate() validation_schema = instance_example.model_json_schema(schema_generator=MyGenerateJsonSchema, mode='validation') print(validation_schema) ''' {'properties': {'name': {'default': 'no-op', 'title': 'Name', 'type': 'string'}}, 'title': 'Predicate', 'type': 'object'} ''' ``` For a more in depth example / explanation, see the [customizing JSON schema](../concepts/json_schema.md#customizing-the-json-schema-generation-process) docs. """ def __new__(cls) -> Self: ... @final class PydanticUseDefault(Exception): """An exception to signal that standard validation either failed or should be skipped, and the default value should be used instead. This warning can be raised in custom valiation functions to redirect the flow of validation. Example: ```py from pydantic_core import PydanticUseDefault from datetime import datetime from pydantic import BaseModel, field_validator class Event(BaseModel): name: str = 'meeting' time: datetime @field_validator('name', mode='plain') def name_must_be_present(cls, v) -> str: if not v or not isinstance(v, str): raise PydanticUseDefault() return v event1 = Event(name='party', time=datetime(2024, 1, 1, 12, 0, 0)) print(repr(event1)) # > Event(name='party', time=datetime.datetime(2024, 1, 1, 12, 0)) event2 = Event(time=datetime(2024, 1, 1, 12, 0, 0)) print(repr(event2)) # > Event(name='meeting', time=datetime.datetime(2024, 1, 1, 12, 0)) ``` For an additional example, seethe [validating partial json data](../concepts/json.md#partial-json-parsing) section of the Pydantic documentation. """ def __new__(cls) -> Self: ... @final class PydanticSerializationError(ValueError): """An error raised when an issue occurs during serialization. In custom serializers, this error can be used to indicate that serialization has failed. """ def __init__(self, message: str) -> None: """Initializes the `PydanticSerializationError`. Arguments: message: The message associated with the error. """ def __new__(cls, message: str) -> Self: ... @final class PydanticSerializationUnexpectedValue(ValueError): """An error raised when an unexpected value is encountered during serialization. This error is often caught and coerced into a warning, as `pydantic-core` generally makes a best attempt at serializing values, in contrast with validation where errors are eagerly raised. Example: ```py from pydantic import BaseModel, field_serializer from pydantic_core import PydanticSerializationUnexpectedValue class BasicPoint(BaseModel): x: int y: int @field_serializer('*') def serialize(self, v): if not isinstance(v, int): raise PydanticSerializationUnexpectedValue(f'Expected type `int`, got {type(v)} with value {v}') return v point = BasicPoint(x=1, y=2) # some sort of mutation point.x = 'a' print(point.model_dump()) ''' UserWarning: Pydantic serializer warnings: PydanticSerializationUnexpectedValue(Expected type `int`, got with value a) return self.__pydantic_serializer__.to_python( {'x': 'a', 'y': 2} ''' ``` This is often used internally in `pydantic-core` when unexpected types are encountered during serialization, but it can also be used by users in custom serializers, as seen above. """ def __init__(self, message: str) -> None: """Initializes the `PydanticSerializationUnexpectedValue`. Arguments: message: The message associated with the unexpected value. """ def __new__(cls, message: str | None = None) -> Self: ... @final class ArgsKwargs: """A construct used to store arguments and keyword arguments for a function call. This data structure is generally used to store information for core schemas associated with functions (like in an arguments schema). This data structure is also currently used for some validation against dataclasses. Example: ```py from pydantic.dataclasses import dataclass from pydantic import model_validator @dataclass class Model: a: int b: int @model_validator(mode="before") @classmethod def no_op_validator(cls, values): print(values) return values Model(1, b=2) #> ArgsKwargs((1,), {"b": 2}) Model(1, 2) #> ArgsKwargs((1, 2), {}) Model(a=1, b=2) #> ArgsKwargs((), {"a": 1, "b": 2}) ``` """ def __init__(self, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> None: """Initializes the `ArgsKwargs`. Arguments: args: The arguments (inherently ordered) for a function call. kwargs: The keyword arguments for a function call """ def __new__(cls, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> Self: ... @property def args(self) -> tuple[Any, ...]: """The arguments (inherently ordered) for a function call.""" @property def kwargs(self) -> dict[str, Any] | None: """The keyword arguments for a function call.""" @final class PydanticUndefinedType: """A type used as a sentinel for undefined values.""" def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... PydanticUndefined: PydanticUndefinedType def list_all_errors() -> list[ErrorTypeInfo]: """ Get information about all built-in errors. Returns: A list of `ErrorTypeInfo` typed dicts. """ @final class TzInfo(datetime.tzinfo): """An `pydantic-core` implementation of the abstract [`datetime.tzinfo`] class.""" # Docstrings for attributes sourced from the abstract base class, [`datetime.tzinfo`](https://docs.python.org/3/library/datetime.html#datetime.tzinfo). def tzname(self, dt: datetime.datetime | None) -> str | None: """Return the time zone name corresponding to the [`datetime`][datetime.datetime] object _dt_, as a string. For more info, see [`tzinfo.tzname`][datetime.tzinfo.tzname]. """ def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: """Return offset of local time from UTC, as a [`timedelta`][datetime.timedelta] object that is positive east of UTC. If local time is west of UTC, this should be negative. More info can be found at [`tzinfo.utcoffset`][datetime.tzinfo.utcoffset]. """ def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: """Return the daylight saving time (DST) adjustment, as a [`timedelta`][datetime.timedelta] object or `None` if DST information isn’t known. More info can be found at[`tzinfo.dst`][datetime.tzinfo.dst].""" def fromutc(self, dt: datetime.datetime) -> datetime.datetime: """Adjust the date and time data associated datetime object _dt_, returning an equivalent datetime in self’s local time. More info can be found at [`tzinfo.fromutc`][datetime.tzinfo.fromutc].""" def __deepcopy__(self, _memo: dict[Any, Any]) -> TzInfo: ... def validate_core_schema(schema: CoreSchema, *, strict: bool | None = None) -> CoreSchema: """Validate a core schema. This currently uses lax mode for validation (i.e. will coerce strings to dates and such) but may use strict mode in the future. We may also remove this function altogether, do not rely on it being present if you are using pydantic-core directly. """ pydantic-pydantic-core-d771df5/python/pydantic_core/core_schema.py000066400000000000000000004344371473051353300255200ustar00rootroot00000000000000""" This module contains definitions to build schemas which `pydantic_core` can validate and serialize. """ from __future__ import annotations as _annotations import sys import warnings from collections.abc import Mapping from datetime import date, datetime, time, timedelta from decimal import Decimal from typing import TYPE_CHECKING, Any, Callable, Dict, Hashable, List, Pattern, Set, Tuple, Type, Union from typing_extensions import deprecated if sys.version_info < (3, 12): from typing_extensions import TypedDict else: from typing import TypedDict if sys.version_info < (3, 11): from typing_extensions import Protocol, Required, TypeAlias else: from typing import Protocol, Required, TypeAlias if sys.version_info < (3, 9): from typing_extensions import Literal else: from typing import Literal if TYPE_CHECKING: from pydantic_core import PydanticUndefined else: # The initial build of pydantic_core requires PydanticUndefined to generate # the core schema; so we need to conditionally skip it. mypy doesn't like # this at all, hence the TYPE_CHECKING branch above. try: from pydantic_core import PydanticUndefined except ImportError: PydanticUndefined = object() ExtraBehavior = Literal['allow', 'forbid', 'ignore'] class CoreConfig(TypedDict, total=False): """ Base class for schema configuration options. Attributes: title: The name of the configuration. strict: Whether the configuration should strictly adhere to specified rules. extra_fields_behavior: The behavior for handling extra fields. typed_dict_total: Whether the TypedDict should be considered total. Default is `True`. from_attributes: Whether to use attributes for models, dataclasses, and tagged union keys. loc_by_alias: Whether to use the used alias (or first alias for "field required" errors) instead of `field_names` to construct error `loc`s. Default is `True`. revalidate_instances: Whether instances of models and dataclasses should re-validate. Default is 'never'. validate_default: Whether to validate default values during validation. Default is `False`. populate_by_name: Whether an aliased field may be populated by its name as given by the model attribute, as well as the alias. (Replaces 'allow_population_by_field_name' in Pydantic v1.) Default is `False`. str_max_length: The maximum length for string fields. str_min_length: The minimum length for string fields. str_strip_whitespace: Whether to strip whitespace from string fields. str_to_lower: Whether to convert string fields to lowercase. str_to_upper: Whether to convert string fields to uppercase. allow_inf_nan: Whether to allow infinity and NaN values for float fields. Default is `True`. ser_json_timedelta: The serialization option for `timedelta` values. Default is 'iso8601'. ser_json_bytes: The serialization option for `bytes` values. Default is 'utf8'. ser_json_inf_nan: The serialization option for infinity and NaN values in float fields. Default is 'null'. val_json_bytes: The validation option for `bytes` values, complementing ser_json_bytes. Default is 'utf8'. hide_input_in_errors: Whether to hide input data from `ValidationError` representation. validation_error_cause: Whether to add user-python excs to the __cause__ of a ValidationError. Requires exceptiongroup backport pre Python 3.11. coerce_numbers_to_str: Whether to enable coercion of any `Number` type to `str` (not applicable in `strict` mode). regex_engine: The regex engine to use for regex pattern validation. Default is 'rust-regex'. See `StringSchema`. cache_strings: Whether to cache strings. Default is `True`, `True` or `'all'` is required to cache strings during general validation since validators don't know if they're in a key or a value. """ title: str strict: bool # settings related to typed dicts, model fields, dataclass fields extra_fields_behavior: ExtraBehavior typed_dict_total: bool # default: True # used for models, dataclasses, and tagged union keys from_attributes: bool # whether to use the used alias (or first alias for "field required" errors) instead of field_names # to construct error `loc`s, default True loc_by_alias: bool # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' revalidate_instances: Literal['always', 'never', 'subclass-instances'] # whether to validate default values during validation, default False validate_default: bool # used on typed-dicts and arguments populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 # fields related to string fields only str_max_length: int str_min_length: int str_strip_whitespace: bool str_to_lower: bool str_to_upper: bool # fields related to float fields only allow_inf_nan: bool # default: True # the config options are used to customise serialization to JSON ser_json_timedelta: Literal['iso8601', 'float'] # default: 'iso8601' ser_json_bytes: Literal['utf8', 'base64', 'hex'] # default: 'utf8' ser_json_inf_nan: Literal['null', 'constants', 'strings'] # default: 'null' val_json_bytes: Literal['utf8', 'base64', 'hex'] # default: 'utf8' # used to hide input data from ValidationError repr hide_input_in_errors: bool validation_error_cause: bool # default: False coerce_numbers_to_str: bool # default: False regex_engine: Literal['rust-regex', 'python-re'] # default: 'rust-regex' cache_strings: Union[bool, Literal['all', 'keys', 'none']] # default: 'True' IncExCall: TypeAlias = 'set[int | str] | dict[int | str, IncExCall] | None' class SerializationInfo(Protocol): @property def include(self) -> IncExCall: ... @property def exclude(self) -> IncExCall: ... @property def context(self) -> Any | None: """Current serialization context.""" @property def mode(self) -> str: ... @property def by_alias(self) -> bool: ... @property def exclude_unset(self) -> bool: ... @property def exclude_defaults(self) -> bool: ... @property def exclude_none(self) -> bool: ... @property def serialize_as_any(self) -> bool: ... def round_trip(self) -> bool: ... def mode_is_json(self) -> bool: ... def __str__(self) -> str: ... def __repr__(self) -> str: ... class FieldSerializationInfo(SerializationInfo, Protocol): @property def field_name(self) -> str: ... class ValidationInfo(Protocol): """ Argument passed to validation functions. """ @property def context(self) -> Any | None: """Current validation context.""" ... @property def config(self) -> CoreConfig | None: """The CoreConfig that applies to this validation.""" ... @property def mode(self) -> Literal['python', 'json']: """The type of input data we are currently validating""" ... @property def data(self) -> Dict[str, Any]: """The data being validated for this model.""" ... @property def field_name(self) -> str | None: """ The name of the current field being validated if this validator is attached to a model field. """ ... ExpectedSerializationTypes = Literal[ 'none', 'int', 'bool', 'float', 'str', 'bytes', 'bytearray', 'list', 'tuple', 'set', 'frozenset', 'generator', 'dict', 'datetime', 'date', 'time', 'timedelta', 'url', 'multi-host-url', 'json', 'uuid', 'any', ] class SimpleSerSchema(TypedDict, total=False): type: Required[ExpectedSerializationTypes] def simple_ser_schema(type: ExpectedSerializationTypes) -> SimpleSerSchema: """ Returns a schema for serialization with a custom type. Args: type: The type to use for serialization """ return SimpleSerSchema(type=type) # (input_value: Any, /) -> Any GeneralPlainNoInfoSerializerFunction = Callable[[Any], Any] # (input_value: Any, info: FieldSerializationInfo, /) -> Any GeneralPlainInfoSerializerFunction = Callable[[Any, SerializationInfo], Any] # (model: Any, input_value: Any, /) -> Any FieldPlainNoInfoSerializerFunction = Callable[[Any, Any], Any] # (model: Any, input_value: Any, info: FieldSerializationInfo, /) -> Any FieldPlainInfoSerializerFunction = Callable[[Any, Any, FieldSerializationInfo], Any] SerializerFunction = Union[ GeneralPlainNoInfoSerializerFunction, GeneralPlainInfoSerializerFunction, FieldPlainNoInfoSerializerFunction, FieldPlainInfoSerializerFunction, ] WhenUsed = Literal['always', 'unless-none', 'json', 'json-unless-none'] """ Values have the following meanings: * `'always'` means always use * `'unless-none'` means use unless the value is `None` * `'json'` means use when serializing to JSON * `'json-unless-none'` means use when serializing to JSON and the value is not `None` """ class PlainSerializerFunctionSerSchema(TypedDict, total=False): type: Required[Literal['function-plain']] function: Required[SerializerFunction] is_field_serializer: bool # default False info_arg: bool # default False return_schema: CoreSchema # if omitted, AnySchema is used when_used: WhenUsed # default: 'always' def plain_serializer_function_ser_schema( function: SerializerFunction, *, is_field_serializer: bool | None = None, info_arg: bool | None = None, return_schema: CoreSchema | None = None, when_used: WhenUsed = 'always', ) -> PlainSerializerFunctionSerSchema: """ Returns a schema for serialization with a function, can be either a "general" or "field" function. Args: function: The function to use for serialization is_field_serializer: Whether the serializer is for a field, e.g. takes `model` as the first argument, and `info` includes `field_name` info_arg: Whether the function takes an `info` argument return_schema: Schema to use for serializing return value when_used: When the function should be called """ if when_used == 'always': # just to avoid extra elements in schema, and to use the actual default defined in rust when_used = None # type: ignore return _dict_not_none( type='function-plain', function=function, is_field_serializer=is_field_serializer, info_arg=info_arg, return_schema=return_schema, when_used=when_used, ) class SerializerFunctionWrapHandler(Protocol): # pragma: no cover def __call__(self, input_value: Any, index_key: int | str | None = None, /) -> Any: ... # (input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any GeneralWrapNoInfoSerializerFunction = Callable[[Any, SerializerFunctionWrapHandler], Any] # (input_value: Any, serializer: SerializerFunctionWrapHandler, info: SerializationInfo, /) -> Any GeneralWrapInfoSerializerFunction = Callable[[Any, SerializerFunctionWrapHandler, SerializationInfo], Any] # (model: Any, input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any FieldWrapNoInfoSerializerFunction = Callable[[Any, Any, SerializerFunctionWrapHandler], Any] # (model: Any, input_value: Any, serializer: SerializerFunctionWrapHandler, info: FieldSerializationInfo, /) -> Any FieldWrapInfoSerializerFunction = Callable[[Any, Any, SerializerFunctionWrapHandler, FieldSerializationInfo], Any] WrapSerializerFunction = Union[ GeneralWrapNoInfoSerializerFunction, GeneralWrapInfoSerializerFunction, FieldWrapNoInfoSerializerFunction, FieldWrapInfoSerializerFunction, ] class WrapSerializerFunctionSerSchema(TypedDict, total=False): type: Required[Literal['function-wrap']] function: Required[WrapSerializerFunction] is_field_serializer: bool # default False info_arg: bool # default False schema: CoreSchema # if omitted, the schema on which this serializer is defined is used return_schema: CoreSchema # if omitted, AnySchema is used when_used: WhenUsed # default: 'always' def wrap_serializer_function_ser_schema( function: WrapSerializerFunction, *, is_field_serializer: bool | None = None, info_arg: bool | None = None, schema: CoreSchema | None = None, return_schema: CoreSchema | None = None, when_used: WhenUsed = 'always', ) -> WrapSerializerFunctionSerSchema: """ Returns a schema for serialization with a wrap function, can be either a "general" or "field" function. Args: function: The function to use for serialization is_field_serializer: Whether the serializer is for a field, e.g. takes `model` as the first argument, and `info` includes `field_name` info_arg: Whether the function takes an `info` argument schema: The schema to use for the inner serialization return_schema: Schema to use for serializing return value when_used: When the function should be called """ if when_used == 'always': # just to avoid extra elements in schema, and to use the actual default defined in rust when_used = None # type: ignore return _dict_not_none( type='function-wrap', function=function, is_field_serializer=is_field_serializer, info_arg=info_arg, schema=schema, return_schema=return_schema, when_used=when_used, ) class FormatSerSchema(TypedDict, total=False): type: Required[Literal['format']] formatting_string: Required[str] when_used: WhenUsed # default: 'json-unless-none' def format_ser_schema(formatting_string: str, *, when_used: WhenUsed = 'json-unless-none') -> FormatSerSchema: """ Returns a schema for serialization using python's `format` method. Args: formatting_string: String defining the format to use when_used: Same meaning as for [general_function_plain_ser_schema], but with a different default """ if when_used == 'json-unless-none': # just to avoid extra elements in schema, and to use the actual default defined in rust when_used = None # type: ignore return _dict_not_none(type='format', formatting_string=formatting_string, when_used=when_used) class ToStringSerSchema(TypedDict, total=False): type: Required[Literal['to-string']] when_used: WhenUsed # default: 'json-unless-none' def to_string_ser_schema(*, when_used: WhenUsed = 'json-unless-none') -> ToStringSerSchema: """ Returns a schema for serialization using python's `str()` / `__str__` method. Args: when_used: Same meaning as for [general_function_plain_ser_schema], but with a different default """ s = dict(type='to-string') if when_used != 'json-unless-none': # just to avoid extra elements in schema, and to use the actual default defined in rust s['when_used'] = when_used return s # type: ignore class ModelSerSchema(TypedDict, total=False): type: Required[Literal['model']] cls: Required[Type[Any]] schema: Required[CoreSchema] def model_ser_schema(cls: Type[Any], schema: CoreSchema) -> ModelSerSchema: """ Returns a schema for serialization using a model. Args: cls: The expected class type, used to generate warnings if the wrong type is passed schema: Internal schema to use to serialize the model dict """ return ModelSerSchema(type='model', cls=cls, schema=schema) SerSchema = Union[ SimpleSerSchema, PlainSerializerFunctionSerSchema, WrapSerializerFunctionSerSchema, FormatSerSchema, ToStringSerSchema, ModelSerSchema, ] class InvalidSchema(TypedDict, total=False): type: Required[Literal['invalid']] ref: str metadata: Dict[str, Any] # note, we never plan to use this, but include it for type checking purposes to match # all other CoreSchema union members serialization: SerSchema def invalid_schema(ref: str | None = None, metadata: Dict[str, Any] | None = None) -> InvalidSchema: """ Returns an invalid schema, used to indicate that a schema is invalid. Returns a schema that matches any value, e.g.: Args: ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core """ return _dict_not_none(type='invalid', ref=ref, metadata=metadata) class ComputedField(TypedDict, total=False): type: Required[Literal['computed-field']] property_name: Required[str] return_schema: Required[CoreSchema] alias: str metadata: Dict[str, Any] def computed_field( property_name: str, return_schema: CoreSchema, *, alias: str | None = None, metadata: Dict[str, Any] | None = None ) -> ComputedField: """ ComputedFields are properties of a model or dataclass that are included in serialization. Args: property_name: The name of the property on the model or dataclass return_schema: The schema used for the type returned by the computed field alias: The name to use in the serialized output metadata: Any other information you want to include with the schema, not used by pydantic-core """ return _dict_not_none( type='computed-field', property_name=property_name, return_schema=return_schema, alias=alias, metadata=metadata ) class AnySchema(TypedDict, total=False): type: Required[Literal['any']] ref: str metadata: Dict[str, Any] serialization: SerSchema def any_schema( *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None ) -> AnySchema: """ Returns a schema that matches any value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.any_schema() v = SchemaValidator(schema) assert v.validate_python(1) == 1 ``` Args: ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='any', ref=ref, metadata=metadata, serialization=serialization) class NoneSchema(TypedDict, total=False): type: Required[Literal['none']] ref: str metadata: Dict[str, Any] serialization: SerSchema def none_schema( *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None ) -> NoneSchema: """ Returns a schema that matches a None value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.none_schema() v = SchemaValidator(schema) assert v.validate_python(None) is None ``` Args: ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='none', ref=ref, metadata=metadata, serialization=serialization) class BoolSchema(TypedDict, total=False): type: Required[Literal['bool']] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def bool_schema( strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> BoolSchema: """ Returns a schema that matches a bool value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.bool_schema() v = SchemaValidator(schema) assert v.validate_python('True') is True ``` Args: strict: Whether the value should be a bool or a value that can be converted to a bool ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='bool', strict=strict, ref=ref, metadata=metadata, serialization=serialization) class IntSchema(TypedDict, total=False): type: Required[Literal['int']] multiple_of: int le: int ge: int lt: int gt: int strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def int_schema( *, multiple_of: int | None = None, le: int | None = None, ge: int | None = None, lt: int | None = None, gt: int | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> IntSchema: """ Returns a schema that matches a int value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.int_schema(multiple_of=2, le=6, ge=2) v = SchemaValidator(schema) assert v.validate_python('4') == 4 ``` Args: multiple_of: The value must be a multiple of this number le: The value must be less than or equal to this number ge: The value must be greater than or equal to this number lt: The value must be strictly less than this number gt: The value must be strictly greater than this number strict: Whether the value should be a int or a value that can be converted to a int ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='int', multiple_of=multiple_of, le=le, ge=ge, lt=lt, gt=gt, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class FloatSchema(TypedDict, total=False): type: Required[Literal['float']] allow_inf_nan: bool # whether 'NaN', '+inf', '-inf' should be forbidden. default: True multiple_of: float le: float ge: float lt: float gt: float strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def float_schema( *, allow_inf_nan: bool | None = None, multiple_of: float | None = None, le: float | None = None, ge: float | None = None, lt: float | None = None, gt: float | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> FloatSchema: """ Returns a schema that matches a float value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.float_schema(le=0.8, ge=0.2) v = SchemaValidator(schema) assert v.validate_python('0.5') == 0.5 ``` Args: allow_inf_nan: Whether to allow inf and nan values multiple_of: The value must be a multiple of this number le: The value must be less than or equal to this number ge: The value must be greater than or equal to this number lt: The value must be strictly less than this number gt: The value must be strictly greater than this number strict: Whether the value should be a float or a value that can be converted to a float ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='float', allow_inf_nan=allow_inf_nan, multiple_of=multiple_of, le=le, ge=ge, lt=lt, gt=gt, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class DecimalSchema(TypedDict, total=False): type: Required[Literal['decimal']] allow_inf_nan: bool # whether 'NaN', '+inf', '-inf' should be forbidden. default: False multiple_of: Decimal le: Decimal ge: Decimal lt: Decimal gt: Decimal max_digits: int decimal_places: int strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def decimal_schema( *, allow_inf_nan: bool | None = None, multiple_of: Decimal | None = None, le: Decimal | None = None, ge: Decimal | None = None, lt: Decimal | None = None, gt: Decimal | None = None, max_digits: int | None = None, decimal_places: int | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> DecimalSchema: """ Returns a schema that matches a decimal value, e.g.: ```py from decimal import Decimal from pydantic_core import SchemaValidator, core_schema schema = core_schema.decimal_schema(le=0.8, ge=0.2) v = SchemaValidator(schema) assert v.validate_python('0.5') == Decimal('0.5') ``` Args: allow_inf_nan: Whether to allow inf and nan values multiple_of: The value must be a multiple of this number le: The value must be less than or equal to this number ge: The value must be greater than or equal to this number lt: The value must be strictly less than this number gt: The value must be strictly greater than this number max_digits: The maximum number of decimal digits allowed decimal_places: The maximum number of decimal places allowed strict: Whether the value should be a float or a value that can be converted to a float ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='decimal', gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class ComplexSchema(TypedDict, total=False): type: Required[Literal['complex']] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def complex_schema( *, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> ComplexSchema: """ Returns a schema that matches a complex value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.complex_schema() v = SchemaValidator(schema) assert v.validate_python('1+2j') == complex(1, 2) assert v.validate_python(complex(1, 2)) == complex(1, 2) ``` Args: strict: Whether the value should be a complex object instance or a value that can be converted to a complex object ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='complex', strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class StringSchema(TypedDict, total=False): type: Required[Literal['str']] pattern: Union[str, Pattern[str]] max_length: int min_length: int strip_whitespace: bool to_lower: bool to_upper: bool regex_engine: Literal['rust-regex', 'python-re'] # default: 'rust-regex' strict: bool coerce_numbers_to_str: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def str_schema( *, pattern: str | Pattern[str] | None = None, max_length: int | None = None, min_length: int | None = None, strip_whitespace: bool | None = None, to_lower: bool | None = None, to_upper: bool | None = None, regex_engine: Literal['rust-regex', 'python-re'] | None = None, strict: bool | None = None, coerce_numbers_to_str: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> StringSchema: """ Returns a schema that matches a string value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.str_schema(max_length=10, min_length=2) v = SchemaValidator(schema) assert v.validate_python('hello') == 'hello' ``` Args: pattern: A regex pattern that the value must match max_length: The value must be at most this length min_length: The value must be at least this length strip_whitespace: Whether to strip whitespace from the value to_lower: Whether to convert the value to lowercase to_upper: Whether to convert the value to uppercase regex_engine: The regex engine to use for pattern validation. Default is 'rust-regex'. - `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust crate, which is non-backtracking and therefore more DDoS resistant, but does not support all regex features. - `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module, which supports all regex features, but may be slower. strict: Whether the value should be a string or a value that can be converted to a string coerce_numbers_to_str: Whether to enable coercion of any `Number` type to `str` (not applicable in `strict` mode). ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='str', pattern=pattern, max_length=max_length, min_length=min_length, strip_whitespace=strip_whitespace, to_lower=to_lower, to_upper=to_upper, regex_engine=regex_engine, strict=strict, coerce_numbers_to_str=coerce_numbers_to_str, ref=ref, metadata=metadata, serialization=serialization, ) class BytesSchema(TypedDict, total=False): type: Required[Literal['bytes']] max_length: int min_length: int strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def bytes_schema( *, max_length: int | None = None, min_length: int | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> BytesSchema: """ Returns a schema that matches a bytes value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.bytes_schema(max_length=10, min_length=2) v = SchemaValidator(schema) assert v.validate_python(b'hello') == b'hello' ``` Args: max_length: The value must be at most this length min_length: The value must be at least this length strict: Whether the value should be a bytes or a value that can be converted to a bytes ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='bytes', max_length=max_length, min_length=min_length, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class DateSchema(TypedDict, total=False): type: Required[Literal['date']] strict: bool le: date ge: date lt: date gt: date now_op: Literal['past', 'future'] # defaults to current local utc offset from `time.localtime().tm_gmtoff` # value is restricted to -86_400 < offset < 86_400 by bounds in generate_self_schema.py now_utc_offset: int ref: str metadata: Dict[str, Any] serialization: SerSchema def date_schema( *, strict: bool | None = None, le: date | None = None, ge: date | None = None, lt: date | None = None, gt: date | None = None, now_op: Literal['past', 'future'] | None = None, now_utc_offset: int | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> DateSchema: """ Returns a schema that matches a date value, e.g.: ```py from datetime import date from pydantic_core import SchemaValidator, core_schema schema = core_schema.date_schema(le=date(2020, 1, 1), ge=date(2019, 1, 1)) v = SchemaValidator(schema) assert v.validate_python(date(2019, 6, 1)) == date(2019, 6, 1) ``` Args: strict: Whether the value should be a date or a value that can be converted to a date le: The value must be less than or equal to this date ge: The value must be greater than or equal to this date lt: The value must be strictly less than this date gt: The value must be strictly greater than this date now_op: The value must be in the past or future relative to the current date now_utc_offset: The value must be in the past or future relative to the current date with this utc offset ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='date', strict=strict, le=le, ge=ge, lt=lt, gt=gt, now_op=now_op, now_utc_offset=now_utc_offset, ref=ref, metadata=metadata, serialization=serialization, ) class TimeSchema(TypedDict, total=False): type: Required[Literal['time']] strict: bool le: time ge: time lt: time gt: time tz_constraint: Union[Literal['aware', 'naive'], int] microseconds_precision: Literal['truncate', 'error'] ref: str metadata: Dict[str, Any] serialization: SerSchema def time_schema( *, strict: bool | None = None, le: time | None = None, ge: time | None = None, lt: time | None = None, gt: time | None = None, tz_constraint: Literal['aware', 'naive'] | int | None = None, microseconds_precision: Literal['truncate', 'error'] = 'truncate', ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> TimeSchema: """ Returns a schema that matches a time value, e.g.: ```py from datetime import time from pydantic_core import SchemaValidator, core_schema schema = core_schema.time_schema(le=time(12, 0, 0), ge=time(6, 0, 0)) v = SchemaValidator(schema) assert v.validate_python(time(9, 0, 0)) == time(9, 0, 0) ``` Args: strict: Whether the value should be a time or a value that can be converted to a time le: The value must be less than or equal to this time ge: The value must be greater than or equal to this time lt: The value must be strictly less than this time gt: The value must be strictly greater than this time tz_constraint: The value must be timezone aware or naive, or an int to indicate required tz offset microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='time', strict=strict, le=le, ge=ge, lt=lt, gt=gt, tz_constraint=tz_constraint, microseconds_precision=microseconds_precision, ref=ref, metadata=metadata, serialization=serialization, ) class DatetimeSchema(TypedDict, total=False): type: Required[Literal['datetime']] strict: bool le: datetime ge: datetime lt: datetime gt: datetime now_op: Literal['past', 'future'] tz_constraint: Union[Literal['aware', 'naive'], int] # defaults to current local utc offset from `time.localtime().tm_gmtoff` # value is restricted to -86_400 < offset < 86_400 by bounds in generate_self_schema.py now_utc_offset: int microseconds_precision: Literal['truncate', 'error'] # default: 'truncate' ref: str metadata: Dict[str, Any] serialization: SerSchema def datetime_schema( *, strict: bool | None = None, le: datetime | None = None, ge: datetime | None = None, lt: datetime | None = None, gt: datetime | None = None, now_op: Literal['past', 'future'] | None = None, tz_constraint: Literal['aware', 'naive'] | int | None = None, now_utc_offset: int | None = None, microseconds_precision: Literal['truncate', 'error'] = 'truncate', ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> DatetimeSchema: """ Returns a schema that matches a datetime value, e.g.: ```py from datetime import datetime from pydantic_core import SchemaValidator, core_schema schema = core_schema.datetime_schema() v = SchemaValidator(schema) now = datetime.now() assert v.validate_python(str(now)) == now ``` Args: strict: Whether the value should be a datetime or a value that can be converted to a datetime le: The value must be less than or equal to this datetime ge: The value must be greater than or equal to this datetime lt: The value must be strictly less than this datetime gt: The value must be strictly greater than this datetime now_op: The value must be in the past or future relative to the current datetime tz_constraint: The value must be timezone aware or naive, or an int to indicate required tz offset TODO: use of a tzinfo where offset changes based on the datetime is not yet supported now_utc_offset: The value must be in the past or future relative to the current datetime with this utc offset microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='datetime', strict=strict, le=le, ge=ge, lt=lt, gt=gt, now_op=now_op, tz_constraint=tz_constraint, now_utc_offset=now_utc_offset, microseconds_precision=microseconds_precision, ref=ref, metadata=metadata, serialization=serialization, ) class TimedeltaSchema(TypedDict, total=False): type: Required[Literal['timedelta']] strict: bool le: timedelta ge: timedelta lt: timedelta gt: timedelta microseconds_precision: Literal['truncate', 'error'] ref: str metadata: Dict[str, Any] serialization: SerSchema def timedelta_schema( *, strict: bool | None = None, le: timedelta | None = None, ge: timedelta | None = None, lt: timedelta | None = None, gt: timedelta | None = None, microseconds_precision: Literal['truncate', 'error'] = 'truncate', ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> TimedeltaSchema: """ Returns a schema that matches a timedelta value, e.g.: ```py from datetime import timedelta from pydantic_core import SchemaValidator, core_schema schema = core_schema.timedelta_schema(le=timedelta(days=1), ge=timedelta(days=0)) v = SchemaValidator(schema) assert v.validate_python(timedelta(hours=12)) == timedelta(hours=12) ``` Args: strict: Whether the value should be a timedelta or a value that can be converted to a timedelta le: The value must be less than or equal to this timedelta ge: The value must be greater than or equal to this timedelta lt: The value must be strictly less than this timedelta gt: The value must be strictly greater than this timedelta microseconds_precision: The behavior when seconds have more than 6 digits or microseconds is too large ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='timedelta', strict=strict, le=le, ge=ge, lt=lt, gt=gt, microseconds_precision=microseconds_precision, ref=ref, metadata=metadata, serialization=serialization, ) class LiteralSchema(TypedDict, total=False): type: Required[Literal['literal']] expected: Required[List[Any]] ref: str metadata: Dict[str, Any] serialization: SerSchema def literal_schema( expected: list[Any], *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> LiteralSchema: """ Returns a schema that matches a literal value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.literal_schema(['hello', 'world']) v = SchemaValidator(schema) assert v.validate_python('hello') == 'hello' ``` Args: expected: The value must be one of these values ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='literal', expected=expected, ref=ref, metadata=metadata, serialization=serialization) class EnumSchema(TypedDict, total=False): type: Required[Literal['enum']] cls: Required[Any] members: Required[List[Any]] sub_type: Literal['str', 'int', 'float'] missing: Callable[[Any], Any] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def enum_schema( cls: Any, members: list[Any], *, sub_type: Literal['str', 'int', 'float'] | None = None, missing: Callable[[Any], Any] | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> EnumSchema: """ Returns a schema that matches an enum value, e.g.: ```py from enum import Enum from pydantic_core import SchemaValidator, core_schema class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 schema = core_schema.enum_schema(Color, list(Color.__members__.values())) v = SchemaValidator(schema) assert v.validate_python(2) is Color.GREEN ``` Args: cls: The enum class members: The members of the enum, generally `list(MyEnum.__members__.values())` sub_type: The type of the enum, either 'str' or 'int' or None for plain enums missing: A function to use when the value is not found in the enum, from `_missing_` strict: Whether to use strict mode, defaults to False ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='enum', cls=cls, members=members, sub_type=sub_type, missing=missing, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) # must match input/parse_json.rs::JsonType::try_from JsonType = Literal['null', 'bool', 'int', 'float', 'str', 'list', 'dict'] class IsInstanceSchema(TypedDict, total=False): type: Required[Literal['is-instance']] cls: Required[Any] cls_repr: str ref: str metadata: Dict[str, Any] serialization: SerSchema def is_instance_schema( cls: Any, *, cls_repr: str | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> IsInstanceSchema: """ Returns a schema that checks if a value is an instance of a class, equivalent to python's `isinstance` method, e.g.: ```py from pydantic_core import SchemaValidator, core_schema class A: pass schema = core_schema.is_instance_schema(cls=A) v = SchemaValidator(schema) v.validate_python(A()) ``` Args: cls: The value must be an instance of this class cls_repr: If provided this string is used in the validator name instead of `repr(cls)` ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='is-instance', cls=cls, cls_repr=cls_repr, ref=ref, metadata=metadata, serialization=serialization ) class IsSubclassSchema(TypedDict, total=False): type: Required[Literal['is-subclass']] cls: Required[Type[Any]] cls_repr: str ref: str metadata: Dict[str, Any] serialization: SerSchema def is_subclass_schema( cls: Type[Any], *, cls_repr: str | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> IsInstanceSchema: """ Returns a schema that checks if a value is a subtype of a class, equivalent to python's `issubclass` method, e.g.: ```py from pydantic_core import SchemaValidator, core_schema class A: pass class B(A): pass schema = core_schema.is_subclass_schema(cls=A) v = SchemaValidator(schema) v.validate_python(B) ``` Args: cls: The value must be a subclass of this class cls_repr: If provided this string is used in the validator name instead of `repr(cls)` ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='is-subclass', cls=cls, cls_repr=cls_repr, ref=ref, metadata=metadata, serialization=serialization ) class CallableSchema(TypedDict, total=False): type: Required[Literal['callable']] ref: str metadata: Dict[str, Any] serialization: SerSchema def callable_schema( *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None ) -> CallableSchema: """ Returns a schema that checks if a value is callable, equivalent to python's `callable` method, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.callable_schema() v = SchemaValidator(schema) v.validate_python(min) ``` Args: ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='callable', ref=ref, metadata=metadata, serialization=serialization) class UuidSchema(TypedDict, total=False): type: Required[Literal['uuid']] version: Literal[1, 3, 4, 5] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def uuid_schema( *, version: Literal[1, 3, 4, 5] | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> UuidSchema: return _dict_not_none( type='uuid', version=version, strict=strict, ref=ref, metadata=metadata, serialization=serialization ) class IncExSeqSerSchema(TypedDict, total=False): type: Required[Literal['include-exclude-sequence']] include: Set[int] exclude: Set[int] def filter_seq_schema(*, include: Set[int] | None = None, exclude: Set[int] | None = None) -> IncExSeqSerSchema: return _dict_not_none(type='include-exclude-sequence', include=include, exclude=exclude) IncExSeqOrElseSerSchema = Union[IncExSeqSerSchema, SerSchema] class ListSchema(TypedDict, total=False): type: Required[Literal['list']] items_schema: CoreSchema min_length: int max_length: int fail_fast: bool strict: bool ref: str metadata: Dict[str, Any] serialization: IncExSeqOrElseSerSchema def list_schema( items_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, fail_fast: bool | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: IncExSeqOrElseSerSchema | None = None, ) -> ListSchema: """ Returns a schema that matches a list value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.list_schema(core_schema.int_schema(), min_length=0, max_length=10) v = SchemaValidator(schema) assert v.validate_python(['4']) == [4] ``` Args: items_schema: The value must be a list of items that match this schema min_length: The value must be a list with at least this many items max_length: The value must be a list with at most this many items fail_fast: Stop validation on the first error strict: The value must be a list with exactly this many items ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='list', items_schema=items_schema, min_length=min_length, max_length=max_length, fail_fast=fail_fast, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) # @deprecated('tuple_positional_schema is deprecated. Use pydantic_core.core_schema.tuple_schema instead.') def tuple_positional_schema( items_schema: list[CoreSchema], *, extras_schema: CoreSchema | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: IncExSeqOrElseSerSchema | None = None, ) -> TupleSchema: """ Returns a schema that matches a tuple of schemas, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.tuple_positional_schema( [core_schema.int_schema(), core_schema.str_schema()] ) v = SchemaValidator(schema) assert v.validate_python((1, 'hello')) == (1, 'hello') ``` Args: items_schema: The value must be a tuple with items that match these schemas extras_schema: The value must be a tuple with items that match this schema This was inspired by JSON schema's `prefixItems` and `items` fields. In python's `typing.Tuple`, you can't specify a type for "extra" items -- they must all be the same type if the length is variable. So this field won't be set from a `typing.Tuple` annotation on a pydantic model. strict: The value must be a tuple with exactly this many items ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ if extras_schema is not None: variadic_item_index = len(items_schema) items_schema = items_schema + [extras_schema] else: variadic_item_index = None return tuple_schema( items_schema=items_schema, variadic_item_index=variadic_item_index, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) # @deprecated('tuple_variable_schema is deprecated. Use pydantic_core.core_schema.tuple_schema instead.') def tuple_variable_schema( items_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: IncExSeqOrElseSerSchema | None = None, ) -> TupleSchema: """ Returns a schema that matches a tuple of a given schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.tuple_variable_schema( items_schema=core_schema.int_schema(), min_length=0, max_length=10 ) v = SchemaValidator(schema) assert v.validate_python(('1', 2, 3)) == (1, 2, 3) ``` Args: items_schema: The value must be a tuple with items that match this schema min_length: The value must be a tuple with at least this many items max_length: The value must be a tuple with at most this many items strict: The value must be a tuple with exactly this many items ref: Optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return tuple_schema( items_schema=[items_schema or any_schema()], variadic_item_index=0, min_length=min_length, max_length=max_length, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class TupleSchema(TypedDict, total=False): type: Required[Literal['tuple']] items_schema: Required[List[CoreSchema]] variadic_item_index: int min_length: int max_length: int fail_fast: bool strict: bool ref: str metadata: Dict[str, Any] serialization: IncExSeqOrElseSerSchema def tuple_schema( items_schema: list[CoreSchema], *, variadic_item_index: int | None = None, min_length: int | None = None, max_length: int | None = None, fail_fast: bool | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: IncExSeqOrElseSerSchema | None = None, ) -> TupleSchema: """ Returns a schema that matches a tuple of schemas, with an optional variadic item, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.tuple_schema( [core_schema.int_schema(), core_schema.str_schema(), core_schema.float_schema()], variadic_item_index=1, ) v = SchemaValidator(schema) assert v.validate_python((1, 'hello', 'world', 1.5)) == (1, 'hello', 'world', 1.5) ``` Args: items_schema: The value must be a tuple with items that match these schemas variadic_item_index: The index of the schema in `items_schema` to be treated as variadic (following PEP 646) min_length: The value must be a tuple with at least this many items max_length: The value must be a tuple with at most this many items fail_fast: Stop validation on the first error strict: The value must be a tuple with exactly this many items ref: Optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='tuple', items_schema=items_schema, variadic_item_index=variadic_item_index, min_length=min_length, max_length=max_length, fail_fast=fail_fast, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class SetSchema(TypedDict, total=False): type: Required[Literal['set']] items_schema: CoreSchema min_length: int max_length: int fail_fast: bool strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def set_schema( items_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, fail_fast: bool | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> SetSchema: """ Returns a schema that matches a set of a given schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.set_schema( items_schema=core_schema.int_schema(), min_length=0, max_length=10 ) v = SchemaValidator(schema) assert v.validate_python({1, '2', 3}) == {1, 2, 3} ``` Args: items_schema: The value must be a set with items that match this schema min_length: The value must be a set with at least this many items max_length: The value must be a set with at most this many items fail_fast: Stop validation on the first error strict: The value must be a set with exactly this many items ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='set', items_schema=items_schema, min_length=min_length, max_length=max_length, fail_fast=fail_fast, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class FrozenSetSchema(TypedDict, total=False): type: Required[Literal['frozenset']] items_schema: CoreSchema min_length: int max_length: int fail_fast: bool strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def frozenset_schema( items_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, fail_fast: bool | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> FrozenSetSchema: """ Returns a schema that matches a frozenset of a given schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.frozenset_schema( items_schema=core_schema.int_schema(), min_length=0, max_length=10 ) v = SchemaValidator(schema) assert v.validate_python(frozenset(range(3))) == frozenset({0, 1, 2}) ``` Args: items_schema: The value must be a frozenset with items that match this schema min_length: The value must be a frozenset with at least this many items max_length: The value must be a frozenset with at most this many items fail_fast: Stop validation on the first error strict: The value must be a frozenset with exactly this many items ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='frozenset', items_schema=items_schema, min_length=min_length, max_length=max_length, fail_fast=fail_fast, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class GeneratorSchema(TypedDict, total=False): type: Required[Literal['generator']] items_schema: CoreSchema min_length: int max_length: int ref: str metadata: Dict[str, Any] serialization: IncExSeqOrElseSerSchema def generator_schema( items_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: IncExSeqOrElseSerSchema | None = None, ) -> GeneratorSchema: """ Returns a schema that matches a generator value, e.g.: ```py from typing import Iterator from pydantic_core import SchemaValidator, core_schema def gen() -> Iterator[int]: yield 1 schema = core_schema.generator_schema(items_schema=core_schema.int_schema()) v = SchemaValidator(schema) v.validate_python(gen()) ``` Unlike other types, validated generators do not raise ValidationErrors eagerly, but instead will raise a ValidationError when a violating value is actually read from the generator. This is to ensure that "validated" generators retain the benefit of lazy evaluation. Args: items_schema: The value must be a generator with items that match this schema min_length: The value must be a generator that yields at least this many items max_length: The value must be a generator that yields at most this many items ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='generator', items_schema=items_schema, min_length=min_length, max_length=max_length, ref=ref, metadata=metadata, serialization=serialization, ) IncExDict = Set[Union[int, str]] class IncExDictSerSchema(TypedDict, total=False): type: Required[Literal['include-exclude-dict']] include: IncExDict exclude: IncExDict def filter_dict_schema(*, include: IncExDict | None = None, exclude: IncExDict | None = None) -> IncExDictSerSchema: return _dict_not_none(type='include-exclude-dict', include=include, exclude=exclude) IncExDictOrElseSerSchema = Union[IncExDictSerSchema, SerSchema] class DictSchema(TypedDict, total=False): type: Required[Literal['dict']] keys_schema: CoreSchema # default: AnySchema values_schema: CoreSchema # default: AnySchema min_length: int max_length: int strict: bool ref: str metadata: Dict[str, Any] serialization: IncExDictOrElseSerSchema def dict_schema( keys_schema: CoreSchema | None = None, values_schema: CoreSchema | None = None, *, min_length: int | None = None, max_length: int | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> DictSchema: """ Returns a schema that matches a dict value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.dict_schema( keys_schema=core_schema.str_schema(), values_schema=core_schema.int_schema() ) v = SchemaValidator(schema) assert v.validate_python({'a': '1', 'b': 2}) == {'a': 1, 'b': 2} ``` Args: keys_schema: The value must be a dict with keys that match this schema values_schema: The value must be a dict with values that match this schema min_length: The value must be a dict with at least this many items max_length: The value must be a dict with at most this many items strict: Whether the keys and values should be validated with strict mode ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='dict', keys_schema=keys_schema, values_schema=values_schema, min_length=min_length, max_length=max_length, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) # (input_value: Any, /) -> Any NoInfoValidatorFunction = Callable[[Any], Any] class NoInfoValidatorFunctionSchema(TypedDict): type: Literal['no-info'] function: NoInfoValidatorFunction # (input_value: Any, info: ValidationInfo, /) -> Any WithInfoValidatorFunction = Callable[[Any, ValidationInfo], Any] class WithInfoValidatorFunctionSchema(TypedDict, total=False): type: Required[Literal['with-info']] function: Required[WithInfoValidatorFunction] field_name: str ValidationFunction = Union[NoInfoValidatorFunctionSchema, WithInfoValidatorFunctionSchema] class _ValidatorFunctionSchema(TypedDict, total=False): function: Required[ValidationFunction] schema: Required[CoreSchema] ref: str metadata: Dict[str, Any] serialization: SerSchema class BeforeValidatorFunctionSchema(_ValidatorFunctionSchema, total=False): type: Required[Literal['function-before']] json_schema_input_schema: CoreSchema def no_info_before_validator_function( function: NoInfoValidatorFunction, schema: CoreSchema, *, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> BeforeValidatorFunctionSchema: """ Returns a schema that calls a validator function before validating, no `info` argument is provided, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: bytes) -> str: return v.decode() + 'world' func_schema = core_schema.no_info_before_validator_function( function=fn, schema=core_schema.str_schema() ) schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) v = SchemaValidator(schema) assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} ``` Args: function: The validator function to call schema: The schema to validate the output of the validator function ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-before', function={'type': 'no-info', 'function': function}, schema=schema, ref=ref, json_schema_input_schema=json_schema_input_schema, metadata=metadata, serialization=serialization, ) def with_info_before_validator_function( function: WithInfoValidatorFunction, schema: CoreSchema, *, field_name: str | None = None, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> BeforeValidatorFunctionSchema: """ Returns a schema that calls a validator function before validation, the function is called with an `info` argument, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: bytes, info: core_schema.ValidationInfo) -> str: assert info.data is not None assert info.field_name is not None return v.decode() + 'world' func_schema = core_schema.with_info_before_validator_function( function=fn, schema=core_schema.str_schema(), field_name='a' ) schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) v = SchemaValidator(schema) assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} ``` Args: function: The validator function to call field_name: The name of the field schema: The schema to validate the output of the validator function ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-before', function=_dict_not_none(type='with-info', function=function, field_name=field_name), schema=schema, ref=ref, json_schema_input_schema=json_schema_input_schema, metadata=metadata, serialization=serialization, ) class AfterValidatorFunctionSchema(_ValidatorFunctionSchema, total=False): type: Required[Literal['function-after']] def no_info_after_validator_function( function: NoInfoValidatorFunction, schema: CoreSchema, *, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> AfterValidatorFunctionSchema: """ Returns a schema that calls a validator function after validating, no `info` argument is provided, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str) -> str: return v + 'world' func_schema = core_schema.no_info_after_validator_function(fn, core_schema.str_schema()) schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) v = SchemaValidator(schema) assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} ``` Args: function: The validator function to call after the schema is validated schema: The schema to validate before the validator function ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-after', function={'type': 'no-info', 'function': function}, schema=schema, ref=ref, json_schema_input_schema=json_schema_input_schema, metadata=metadata, serialization=serialization, ) def with_info_after_validator_function( function: WithInfoValidatorFunction, schema: CoreSchema, *, field_name: str | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> AfterValidatorFunctionSchema: """ Returns a schema that calls a validator function after validation, the function is called with an `info` argument, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str, info: core_schema.ValidationInfo) -> str: assert info.data is not None assert info.field_name is not None return v + 'world' func_schema = core_schema.with_info_after_validator_function( function=fn, schema=core_schema.str_schema(), field_name='a' ) schema = core_schema.typed_dict_schema({'a': core_schema.typed_dict_field(func_schema)}) v = SchemaValidator(schema) assert v.validate_python({'a': b'hello '}) == {'a': 'hello world'} ``` Args: function: The validator function to call after the schema is validated schema: The schema to validate before the validator function field_name: The name of the field this validators is applied to, if any ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-after', function=_dict_not_none(type='with-info', function=function, field_name=field_name), schema=schema, ref=ref, metadata=metadata, serialization=serialization, ) class ValidatorFunctionWrapHandler(Protocol): def __call__(self, input_value: Any, outer_location: str | int | None = None, /) -> Any: # pragma: no cover ... # (input_value: Any, validator: ValidatorFunctionWrapHandler, /) -> Any NoInfoWrapValidatorFunction = Callable[[Any, ValidatorFunctionWrapHandler], Any] class NoInfoWrapValidatorFunctionSchema(TypedDict): type: Literal['no-info'] function: NoInfoWrapValidatorFunction # (input_value: Any, validator: ValidatorFunctionWrapHandler, info: ValidationInfo, /) -> Any WithInfoWrapValidatorFunction = Callable[[Any, ValidatorFunctionWrapHandler, ValidationInfo], Any] class WithInfoWrapValidatorFunctionSchema(TypedDict, total=False): type: Required[Literal['with-info']] function: Required[WithInfoWrapValidatorFunction] field_name: str WrapValidatorFunction = Union[NoInfoWrapValidatorFunctionSchema, WithInfoWrapValidatorFunctionSchema] class WrapValidatorFunctionSchema(TypedDict, total=False): type: Required[Literal['function-wrap']] function: Required[WrapValidatorFunction] schema: Required[CoreSchema] ref: str json_schema_input_schema: CoreSchema metadata: Dict[str, Any] serialization: SerSchema def no_info_wrap_validator_function( function: NoInfoWrapValidatorFunction, schema: CoreSchema, *, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> WrapValidatorFunctionSchema: """ Returns a schema which calls a function with a `validator` callable argument which can optionally be used to call inner validation with the function logic, this is much like the "onion" implementation of middleware in many popular web frameworks, no `info` argument is passed, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn( v: str, validator: core_schema.ValidatorFunctionWrapHandler, ) -> str: return validator(input_value=v) + 'world' schema = core_schema.no_info_wrap_validator_function( function=fn, schema=core_schema.str_schema() ) v = SchemaValidator(schema) assert v.validate_python('hello ') == 'hello world' ``` Args: function: The validator function to call schema: The schema to validate the output of the validator function ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-wrap', function={'type': 'no-info', 'function': function}, schema=schema, json_schema_input_schema=json_schema_input_schema, ref=ref, metadata=metadata, serialization=serialization, ) def with_info_wrap_validator_function( function: WithInfoWrapValidatorFunction, schema: CoreSchema, *, field_name: str | None = None, json_schema_input_schema: CoreSchema | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> WrapValidatorFunctionSchema: """ Returns a schema which calls a function with a `validator` callable argument which can optionally be used to call inner validation with the function logic, this is much like the "onion" implementation of middleware in many popular web frameworks, an `info` argument is also passed, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn( v: str, validator: core_schema.ValidatorFunctionWrapHandler, info: core_schema.ValidationInfo, ) -> str: return validator(input_value=v) + 'world' schema = core_schema.with_info_wrap_validator_function( function=fn, schema=core_schema.str_schema() ) v = SchemaValidator(schema) assert v.validate_python('hello ') == 'hello world' ``` Args: function: The validator function to call schema: The schema to validate the output of the validator function field_name: The name of the field this validators is applied to, if any json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-wrap', function=_dict_not_none(type='with-info', function=function, field_name=field_name), schema=schema, json_schema_input_schema=json_schema_input_schema, ref=ref, metadata=metadata, serialization=serialization, ) class PlainValidatorFunctionSchema(TypedDict, total=False): type: Required[Literal['function-plain']] function: Required[ValidationFunction] ref: str json_schema_input_schema: CoreSchema metadata: Dict[str, Any] serialization: SerSchema def no_info_plain_validator_function( function: NoInfoValidatorFunction, *, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> PlainValidatorFunctionSchema: """ Returns a schema that uses the provided function for validation, no `info` argument is passed, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str) -> str: assert 'hello' in v return v + 'world' schema = core_schema.no_info_plain_validator_function(function=fn) v = SchemaValidator(schema) assert v.validate_python('hello ') == 'hello world' ``` Args: function: The validator function to call ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-plain', function={'type': 'no-info', 'function': function}, ref=ref, json_schema_input_schema=json_schema_input_schema, metadata=metadata, serialization=serialization, ) def with_info_plain_validator_function( function: WithInfoValidatorFunction, *, field_name: str | None = None, ref: str | None = None, json_schema_input_schema: CoreSchema | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> PlainValidatorFunctionSchema: """ Returns a schema that uses the provided function for validation, an `info` argument is passed, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str, info: core_schema.ValidationInfo) -> str: assert 'hello' in v return v + 'world' schema = core_schema.with_info_plain_validator_function(function=fn) v = SchemaValidator(schema) assert v.validate_python('hello ') == 'hello world' ``` Args: function: The validator function to call field_name: The name of the field this validators is applied to, if any ref: optional unique identifier of the schema, used to reference the schema in other places json_schema_input_schema: The core schema to be used to generate the corresponding JSON Schema input type metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='function-plain', function=_dict_not_none(type='with-info', function=function, field_name=field_name), ref=ref, json_schema_input_schema=json_schema_input_schema, metadata=metadata, serialization=serialization, ) class WithDefaultSchema(TypedDict, total=False): type: Required[Literal['default']] schema: Required[CoreSchema] default: Any default_factory: Union[Callable[[], Any], Callable[[Dict[str, Any]], Any]] default_factory_takes_data: bool on_error: Literal['raise', 'omit', 'default'] # default: 'raise' validate_default: bool # default: False strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def with_default_schema( schema: CoreSchema, *, default: Any = PydanticUndefined, default_factory: Union[Callable[[], Any], Callable[[Dict[str, Any]], Any], None] = None, default_factory_takes_data: bool | None = None, on_error: Literal['raise', 'omit', 'default'] | None = None, validate_default: bool | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> WithDefaultSchema: """ Returns a schema that adds a default value to the given schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.with_default_schema(core_schema.str_schema(), default='hello') wrapper_schema = core_schema.typed_dict_schema( {'a': core_schema.typed_dict_field(schema)} ) v = SchemaValidator(wrapper_schema) assert v.validate_python({}) == v.validate_python({'a': 'hello'}) ``` Args: schema: The schema to add a default value to default: The default value to use default_factory: A callable that returns the default value to use default_factory_takes_data: Whether the default factory takes a validated data argument on_error: What to do if the schema validation fails. One of 'raise', 'omit', 'default' validate_default: Whether the default value should be validated strict: Whether the underlying schema should be validated with strict mode ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ s = _dict_not_none( type='default', schema=schema, default_factory=default_factory, default_factory_takes_data=default_factory_takes_data, on_error=on_error, validate_default=validate_default, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) if default is not PydanticUndefined: s['default'] = default return s class NullableSchema(TypedDict, total=False): type: Required[Literal['nullable']] schema: Required[CoreSchema] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def nullable_schema( schema: CoreSchema, *, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> NullableSchema: """ Returns a schema that matches a nullable value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.nullable_schema(core_schema.str_schema()) v = SchemaValidator(schema) assert v.validate_python(None) is None ``` Args: schema: The schema to wrap strict: Whether the underlying schema should be validated with strict mode ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='nullable', schema=schema, strict=strict, ref=ref, metadata=metadata, serialization=serialization ) class UnionSchema(TypedDict, total=False): type: Required[Literal['union']] choices: Required[List[Union[CoreSchema, Tuple[CoreSchema, str]]]] # default true, whether to automatically collapse unions with one element to the inner validator auto_collapse: bool custom_error_type: str custom_error_message: str custom_error_context: Dict[str, Union[str, int, float]] mode: Literal['smart', 'left_to_right'] # default: 'smart' strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def union_schema( choices: list[CoreSchema | tuple[CoreSchema, str]], *, auto_collapse: bool | None = None, custom_error_type: str | None = None, custom_error_message: str | None = None, custom_error_context: dict[str, str | int] | None = None, mode: Literal['smart', 'left_to_right'] | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> UnionSchema: """ Returns a schema that matches a union value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.union_schema([core_schema.str_schema(), core_schema.int_schema()]) v = SchemaValidator(schema) assert v.validate_python('hello') == 'hello' assert v.validate_python(1) == 1 ``` Args: choices: The schemas to match. If a tuple, the second item is used as the label for the case. auto_collapse: whether to automatically collapse unions with one element to the inner validator, default true custom_error_type: The custom error type to use if the validation fails custom_error_message: The custom error message to use if the validation fails custom_error_context: The custom error context to use if the validation fails mode: How to select which choice to return * `smart` (default) will try to return the choice which is the closest match to the input value * `left_to_right` will return the first choice in `choices` which succeeds validation strict: Whether the underlying schemas should be validated with strict mode ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='union', choices=choices, auto_collapse=auto_collapse, custom_error_type=custom_error_type, custom_error_message=custom_error_message, custom_error_context=custom_error_context, mode=mode, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class TaggedUnionSchema(TypedDict, total=False): type: Required[Literal['tagged-union']] choices: Required[Dict[Hashable, CoreSchema]] discriminator: Required[Union[str, List[Union[str, int]], List[List[Union[str, int]]], Callable[[Any], Hashable]]] custom_error_type: str custom_error_message: str custom_error_context: Dict[str, Union[str, int, float]] strict: bool from_attributes: bool # default: True ref: str metadata: Dict[str, Any] serialization: SerSchema def tagged_union_schema( choices: Dict[Any, CoreSchema], discriminator: str | list[str | int] | list[list[str | int]] | Callable[[Any], Any], *, custom_error_type: str | None = None, custom_error_message: str | None = None, custom_error_context: dict[str, int | str | float] | None = None, strict: bool | None = None, from_attributes: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> TaggedUnionSchema: """ Returns a schema that matches a tagged union value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema apple_schema = core_schema.typed_dict_schema( { 'foo': core_schema.typed_dict_field(core_schema.str_schema()), 'bar': core_schema.typed_dict_field(core_schema.int_schema()), } ) banana_schema = core_schema.typed_dict_schema( { 'foo': core_schema.typed_dict_field(core_schema.str_schema()), 'spam': core_schema.typed_dict_field( core_schema.list_schema(items_schema=core_schema.int_schema()) ), } ) schema = core_schema.tagged_union_schema( choices={ 'apple': apple_schema, 'banana': banana_schema, }, discriminator='foo', ) v = SchemaValidator(schema) assert v.validate_python({'foo': 'apple', 'bar': '123'}) == {'foo': 'apple', 'bar': 123} assert v.validate_python({'foo': 'banana', 'spam': [1, 2, 3]}) == { 'foo': 'banana', 'spam': [1, 2, 3], } ``` Args: choices: The schemas to match When retrieving a schema from `choices` using the discriminator value, if the value is a str, it should be fed back into the `choices` map until a schema is obtained (This approach is to prevent multiple ownership of a single schema in Rust) discriminator: The discriminator to use to determine the schema to use * If `discriminator` is a str, it is the name of the attribute to use as the discriminator * If `discriminator` is a list of int/str, it should be used as a "path" to access the discriminator * If `discriminator` is a list of lists, each inner list is a path, and the first path that exists is used * If `discriminator` is a callable, it should return the discriminator when called on the value to validate; the callable can return `None` to indicate that there is no matching discriminator present on the input custom_error_type: The custom error type to use if the validation fails custom_error_message: The custom error message to use if the validation fails custom_error_context: The custom error context to use if the validation fails strict: Whether the underlying schemas should be validated with strict mode from_attributes: Whether to use the attributes of the object to retrieve the discriminator value ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='tagged-union', choices=choices, discriminator=discriminator, custom_error_type=custom_error_type, custom_error_message=custom_error_message, custom_error_context=custom_error_context, strict=strict, from_attributes=from_attributes, ref=ref, metadata=metadata, serialization=serialization, ) class ChainSchema(TypedDict, total=False): type: Required[Literal['chain']] steps: Required[List[CoreSchema]] ref: str metadata: Dict[str, Any] serialization: SerSchema def chain_schema( steps: list[CoreSchema], *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> ChainSchema: """ Returns a schema that chains the provided validation schemas, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str, info: core_schema.ValidationInfo) -> str: assert 'hello' in v return v + ' world' fn_schema = core_schema.with_info_plain_validator_function(function=fn) schema = core_schema.chain_schema( [fn_schema, fn_schema, fn_schema, core_schema.str_schema()] ) v = SchemaValidator(schema) assert v.validate_python('hello') == 'hello world world world' ``` Args: steps: The schemas to chain ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='chain', steps=steps, ref=ref, metadata=metadata, serialization=serialization) class LaxOrStrictSchema(TypedDict, total=False): type: Required[Literal['lax-or-strict']] lax_schema: Required[CoreSchema] strict_schema: Required[CoreSchema] strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def lax_or_strict_schema( lax_schema: CoreSchema, strict_schema: CoreSchema, *, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> LaxOrStrictSchema: """ Returns a schema that uses the lax or strict schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema def fn(v: str, info: core_schema.ValidationInfo) -> str: assert 'hello' in v return v + ' world' lax_schema = core_schema.int_schema(strict=False) strict_schema = core_schema.int_schema(strict=True) schema = core_schema.lax_or_strict_schema( lax_schema=lax_schema, strict_schema=strict_schema, strict=True ) v = SchemaValidator(schema) assert v.validate_python(123) == 123 schema = core_schema.lax_or_strict_schema( lax_schema=lax_schema, strict_schema=strict_schema, strict=False ) v = SchemaValidator(schema) assert v.validate_python('123') == 123 ``` Args: lax_schema: The lax schema to use strict_schema: The strict schema to use strict: Whether the strict schema should be used ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='lax-or-strict', lax_schema=lax_schema, strict_schema=strict_schema, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class JsonOrPythonSchema(TypedDict, total=False): type: Required[Literal['json-or-python']] json_schema: Required[CoreSchema] python_schema: Required[CoreSchema] ref: str metadata: Dict[str, Any] serialization: SerSchema def json_or_python_schema( json_schema: CoreSchema, python_schema: CoreSchema, *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> JsonOrPythonSchema: """ Returns a schema that uses the Json or Python schema depending on the input: ```py from pydantic_core import SchemaValidator, ValidationError, core_schema v = SchemaValidator( core_schema.json_or_python_schema( json_schema=core_schema.int_schema(), python_schema=core_schema.int_schema(strict=True), ) ) assert v.validate_json('"123"') == 123 try: v.validate_python('123') except ValidationError: pass else: raise AssertionError('Validation should have failed') ``` Args: json_schema: The schema to use for Json inputs python_schema: The schema to use for Python inputs ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='json-or-python', json_schema=json_schema, python_schema=python_schema, ref=ref, metadata=metadata, serialization=serialization, ) class TypedDictField(TypedDict, total=False): type: Required[Literal['typed-dict-field']] schema: Required[CoreSchema] required: bool validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] serialization_alias: str serialization_exclude: bool # default: False metadata: Dict[str, Any] def typed_dict_field( schema: CoreSchema, *, required: bool | None = None, validation_alias: str | list[str | int] | list[list[str | int]] | None = None, serialization_alias: str | None = None, serialization_exclude: bool | None = None, metadata: Dict[str, Any] | None = None, ) -> TypedDictField: """ Returns a schema that matches a typed dict field, e.g.: ```py from pydantic_core import core_schema field = core_schema.typed_dict_field(schema=core_schema.int_schema(), required=True) ``` Args: schema: The schema to use for the field required: Whether the field is required, otherwise uses the value from `total` on the typed dict validation_alias: The alias(es) to use to find the field in the validation data serialization_alias: The alias to use as a key when serializing serialization_exclude: Whether to exclude the field when serializing metadata: Any other information you want to include with the schema, not used by pydantic-core """ return _dict_not_none( type='typed-dict-field', schema=schema, required=required, validation_alias=validation_alias, serialization_alias=serialization_alias, serialization_exclude=serialization_exclude, metadata=metadata, ) class TypedDictSchema(TypedDict, total=False): type: Required[Literal['typed-dict']] fields: Required[Dict[str, TypedDictField]] cls: Type[Any] computed_fields: List[ComputedField] strict: bool extras_schema: CoreSchema # all these values can be set via config, equivalent fields have `typed_dict_` prefix extra_behavior: ExtraBehavior total: bool # default: True populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 ref: str metadata: Dict[str, Any] serialization: SerSchema config: CoreConfig def typed_dict_schema( fields: Dict[str, TypedDictField], *, cls: Type[Any] | None = None, computed_fields: list[ComputedField] | None = None, strict: bool | None = None, extras_schema: CoreSchema | None = None, extra_behavior: ExtraBehavior | None = None, total: bool | None = None, populate_by_name: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, config: CoreConfig | None = None, ) -> TypedDictSchema: """ Returns a schema that matches a typed dict, e.g.: ```py from typing_extensions import TypedDict from pydantic_core import SchemaValidator, core_schema class MyTypedDict(TypedDict): a: str wrapper_schema = core_schema.typed_dict_schema( {'a': core_schema.typed_dict_field(core_schema.str_schema())}, cls=MyTypedDict ) v = SchemaValidator(wrapper_schema) assert v.validate_python({'a': 'hello'}) == {'a': 'hello'} ``` Args: fields: The fields to use for the typed dict cls: The class to use for the typed dict computed_fields: Computed fields to use when serializing the model, only applies when directly inside a model strict: Whether the typed dict is strict extras_schema: The extra validator to use for the typed dict ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core extra_behavior: The extra behavior to use for the typed dict total: Whether the typed dict is total, otherwise uses `typed_dict_total` from config populate_by_name: Whether the typed dict should populate by name serialization: Custom serialization schema """ return _dict_not_none( type='typed-dict', fields=fields, cls=cls, computed_fields=computed_fields, strict=strict, extras_schema=extras_schema, extra_behavior=extra_behavior, total=total, populate_by_name=populate_by_name, ref=ref, metadata=metadata, serialization=serialization, config=config, ) class ModelField(TypedDict, total=False): type: Required[Literal['model-field']] schema: Required[CoreSchema] validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] serialization_alias: str serialization_exclude: bool # default: False frozen: bool metadata: Dict[str, Any] def model_field( schema: CoreSchema, *, validation_alias: str | list[str | int] | list[list[str | int]] | None = None, serialization_alias: str | None = None, serialization_exclude: bool | None = None, frozen: bool | None = None, metadata: Dict[str, Any] | None = None, ) -> ModelField: """ Returns a schema for a model field, e.g.: ```py from pydantic_core import core_schema field = core_schema.model_field(schema=core_schema.int_schema()) ``` Args: schema: The schema to use for the field validation_alias: The alias(es) to use to find the field in the validation data serialization_alias: The alias to use as a key when serializing serialization_exclude: Whether to exclude the field when serializing frozen: Whether the field is frozen metadata: Any other information you want to include with the schema, not used by pydantic-core """ return _dict_not_none( type='model-field', schema=schema, validation_alias=validation_alias, serialization_alias=serialization_alias, serialization_exclude=serialization_exclude, frozen=frozen, metadata=metadata, ) class ModelFieldsSchema(TypedDict, total=False): type: Required[Literal['model-fields']] fields: Required[Dict[str, ModelField]] model_name: str computed_fields: List[ComputedField] strict: bool extras_schema: CoreSchema # all these values can be set via config, equivalent fields have `typed_dict_` prefix extra_behavior: ExtraBehavior populate_by_name: bool # replaces `allow_population_by_field_name` in pydantic v1 from_attributes: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def model_fields_schema( fields: Dict[str, ModelField], *, model_name: str | None = None, computed_fields: list[ComputedField] | None = None, strict: bool | None = None, extras_schema: CoreSchema | None = None, extra_behavior: ExtraBehavior | None = None, populate_by_name: bool | None = None, from_attributes: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> ModelFieldsSchema: """ Returns a schema that matches a typed dict, e.g.: ```py from pydantic_core import SchemaValidator, core_schema wrapper_schema = core_schema.model_fields_schema( {'a': core_schema.model_field(core_schema.str_schema())} ) v = SchemaValidator(wrapper_schema) print(v.validate_python({'a': 'hello'})) #> ({'a': 'hello'}, None, {'a'}) ``` Args: fields: The fields to use for the typed dict model_name: The name of the model, used for error messages, defaults to "Model" computed_fields: Computed fields to use when serializing the model, only applies when directly inside a model strict: Whether the typed dict is strict extras_schema: The extra validator to use for the typed dict ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core extra_behavior: The extra behavior to use for the typed dict populate_by_name: Whether the typed dict should populate by name from_attributes: Whether the typed dict should be populated from attributes serialization: Custom serialization schema """ return _dict_not_none( type='model-fields', fields=fields, model_name=model_name, computed_fields=computed_fields, strict=strict, extras_schema=extras_schema, extra_behavior=extra_behavior, populate_by_name=populate_by_name, from_attributes=from_attributes, ref=ref, metadata=metadata, serialization=serialization, ) class ModelSchema(TypedDict, total=False): type: Required[Literal['model']] cls: Required[Type[Any]] generic_origin: Type[Any] schema: Required[CoreSchema] custom_init: bool root_model: bool post_init: str revalidate_instances: Literal['always', 'never', 'subclass-instances'] # default: 'never' strict: bool frozen: bool extra_behavior: ExtraBehavior config: CoreConfig ref: str metadata: Dict[str, Any] serialization: SerSchema def model_schema( cls: Type[Any], schema: CoreSchema, *, generic_origin: Type[Any] | None = None, custom_init: bool | None = None, root_model: bool | None = None, post_init: str | None = None, revalidate_instances: Literal['always', 'never', 'subclass-instances'] | None = None, strict: bool | None = None, frozen: bool | None = None, extra_behavior: ExtraBehavior | None = None, config: CoreConfig | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> ModelSchema: """ A model schema generally contains a typed-dict schema. It will run the typed dict validator, then create a new class and set the dict and fields set returned from the typed dict validator to `__dict__` and `__pydantic_fields_set__` respectively. Example: ```py from pydantic_core import CoreConfig, SchemaValidator, core_schema class MyModel: __slots__ = ( '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__', ) schema = core_schema.model_schema( cls=MyModel, config=CoreConfig(str_max_length=5), schema=core_schema.model_fields_schema( fields={'a': core_schema.model_field(core_schema.str_schema())}, ), ) v = SchemaValidator(schema) assert v.isinstance_python({'a': 'hello'}) is True assert v.isinstance_python({'a': 'too long'}) is False ``` Args: cls: The class to use for the model schema: The schema to use for the model generic_origin: The origin type used for this model, if it's a parametrized generic. Ex, if this model schema represents `SomeModel[int]`, generic_origin is `SomeModel` custom_init: Whether the model has a custom init method root_model: Whether the model is a `RootModel` post_init: The call after init to use for the model revalidate_instances: whether instances of models and dataclasses (including subclass instances) should re-validate defaults to config.revalidate_instances, else 'never' strict: Whether the model is strict frozen: Whether the model is frozen extra_behavior: The extra behavior to use for the model, used in serialization config: The config to use for the model ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='model', cls=cls, generic_origin=generic_origin, schema=schema, custom_init=custom_init, root_model=root_model, post_init=post_init, revalidate_instances=revalidate_instances, strict=strict, frozen=frozen, extra_behavior=extra_behavior, config=config, ref=ref, metadata=metadata, serialization=serialization, ) class DataclassField(TypedDict, total=False): type: Required[Literal['dataclass-field']] name: Required[str] schema: Required[CoreSchema] kw_only: bool # default: True init: bool # default: True init_only: bool # default: False frozen: bool # default: False validation_alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] serialization_alias: str serialization_exclude: bool # default: False metadata: Dict[str, Any] def dataclass_field( name: str, schema: CoreSchema, *, kw_only: bool | None = None, init: bool | None = None, init_only: bool | None = None, validation_alias: str | list[str | int] | list[list[str | int]] | None = None, serialization_alias: str | None = None, serialization_exclude: bool | None = None, metadata: Dict[str, Any] | None = None, frozen: bool | None = None, ) -> DataclassField: """ Returns a schema for a dataclass field, e.g.: ```py from pydantic_core import SchemaValidator, core_schema field = core_schema.dataclass_field( name='a', schema=core_schema.str_schema(), kw_only=False ) schema = core_schema.dataclass_args_schema('Foobar', [field]) v = SchemaValidator(schema) assert v.validate_python({'a': 'hello'}) == ({'a': 'hello'}, None) ``` Args: name: The name to use for the argument parameter schema: The schema to use for the argument parameter kw_only: Whether the field can be set with a positional argument as well as a keyword argument init: Whether the field should be validated during initialization init_only: Whether the field should be omitted from `__dict__` and passed to `__post_init__` validation_alias: The alias(es) to use to find the field in the validation data serialization_alias: The alias to use as a key when serializing serialization_exclude: Whether to exclude the field when serializing metadata: Any other information you want to include with the schema, not used by pydantic-core frozen: Whether the field is frozen """ return _dict_not_none( type='dataclass-field', name=name, schema=schema, kw_only=kw_only, init=init, init_only=init_only, validation_alias=validation_alias, serialization_alias=serialization_alias, serialization_exclude=serialization_exclude, metadata=metadata, frozen=frozen, ) class DataclassArgsSchema(TypedDict, total=False): type: Required[Literal['dataclass-args']] dataclass_name: Required[str] fields: Required[List[DataclassField]] computed_fields: List[ComputedField] populate_by_name: bool # default: False collect_init_only: bool # default: False ref: str metadata: Dict[str, Any] serialization: SerSchema extra_behavior: ExtraBehavior def dataclass_args_schema( dataclass_name: str, fields: list[DataclassField], *, computed_fields: List[ComputedField] | None = None, populate_by_name: bool | None = None, collect_init_only: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, extra_behavior: ExtraBehavior | None = None, ) -> DataclassArgsSchema: """ Returns a schema for validating dataclass arguments, e.g.: ```py from pydantic_core import SchemaValidator, core_schema field_a = core_schema.dataclass_field( name='a', schema=core_schema.str_schema(), kw_only=False ) field_b = core_schema.dataclass_field( name='b', schema=core_schema.bool_schema(), kw_only=False ) schema = core_schema.dataclass_args_schema('Foobar', [field_a, field_b]) v = SchemaValidator(schema) assert v.validate_python({'a': 'hello', 'b': True}) == ({'a': 'hello', 'b': True}, None) ``` Args: dataclass_name: The name of the dataclass being validated fields: The fields to use for the dataclass computed_fields: Computed fields to use when serializing the dataclass populate_by_name: Whether to populate by name collect_init_only: Whether to collect init only fields into a dict to pass to `__post_init__` ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema extra_behavior: How to handle extra fields """ return _dict_not_none( type='dataclass-args', dataclass_name=dataclass_name, fields=fields, computed_fields=computed_fields, populate_by_name=populate_by_name, collect_init_only=collect_init_only, ref=ref, metadata=metadata, serialization=serialization, extra_behavior=extra_behavior, ) class DataclassSchema(TypedDict, total=False): type: Required[Literal['dataclass']] cls: Required[Type[Any]] generic_origin: Type[Any] schema: Required[CoreSchema] fields: Required[List[str]] cls_name: str post_init: bool # default: False revalidate_instances: Literal['always', 'never', 'subclass-instances'] # default: 'never' strict: bool # default: False frozen: bool # default False ref: str metadata: Dict[str, Any] serialization: SerSchema slots: bool config: CoreConfig def dataclass_schema( cls: Type[Any], schema: CoreSchema, fields: List[str], *, generic_origin: Type[Any] | None = None, cls_name: str | None = None, post_init: bool | None = None, revalidate_instances: Literal['always', 'never', 'subclass-instances'] | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, frozen: bool | None = None, slots: bool | None = None, config: CoreConfig | None = None, ) -> DataclassSchema: """ Returns a schema for a dataclass. As with `ModelSchema`, this schema can only be used as a field within another schema, not as the root type. Args: cls: The dataclass type, used to perform subclass checks schema: The schema to use for the dataclass fields fields: Fields of the dataclass, this is used in serialization and in validation during re-validation and while validating assignment generic_origin: The origin type used for this dataclass, if it's a parametrized generic. Ex, if this model schema represents `SomeDataclass[int]`, generic_origin is `SomeDataclass` cls_name: The name to use in error locs, etc; this is useful for generics (default: `cls.__name__`) post_init: Whether to call `__post_init__` after validation revalidate_instances: whether instances of models and dataclasses (including subclass instances) should re-validate defaults to config.revalidate_instances, else 'never' strict: Whether to require an exact instance of `cls` ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema frozen: Whether the dataclass is frozen slots: Whether `slots=True` on the dataclass, means each field is assigned independently, rather than simply setting `__dict__`, default false """ return _dict_not_none( type='dataclass', cls=cls, generic_origin=generic_origin, fields=fields, cls_name=cls_name, schema=schema, post_init=post_init, revalidate_instances=revalidate_instances, strict=strict, ref=ref, metadata=metadata, serialization=serialization, frozen=frozen, slots=slots, config=config, ) class ArgumentsParameter(TypedDict, total=False): name: Required[str] schema: Required[CoreSchema] mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] # default positional_or_keyword alias: Union[str, List[Union[str, int]], List[List[Union[str, int]]]] def arguments_parameter( name: str, schema: CoreSchema, *, mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None, alias: str | list[str | int] | list[list[str | int]] | None = None, ) -> ArgumentsParameter: """ Returns a schema that matches an argument parameter, e.g.: ```py from pydantic_core import SchemaValidator, core_schema param = core_schema.arguments_parameter( name='a', schema=core_schema.str_schema(), mode='positional_only' ) schema = core_schema.arguments_schema([param]) v = SchemaValidator(schema) assert v.validate_python(('hello',)) == (('hello',), {}) ``` Args: name: The name to use for the argument parameter schema: The schema to use for the argument parameter mode: The mode to use for the argument parameter alias: The alias to use for the argument parameter """ return _dict_not_none(name=name, schema=schema, mode=mode, alias=alias) VarKwargsMode: TypeAlias = Literal['uniform', 'unpacked-typed-dict'] class ArgumentsSchema(TypedDict, total=False): type: Required[Literal['arguments']] arguments_schema: Required[List[ArgumentsParameter]] populate_by_name: bool var_args_schema: CoreSchema var_kwargs_mode: VarKwargsMode var_kwargs_schema: CoreSchema ref: str metadata: Dict[str, Any] serialization: SerSchema def arguments_schema( arguments: list[ArgumentsParameter], *, populate_by_name: bool | None = None, var_args_schema: CoreSchema | None = None, var_kwargs_mode: VarKwargsMode | None = None, var_kwargs_schema: CoreSchema | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> ArgumentsSchema: """ Returns a schema that matches an arguments schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema param_a = core_schema.arguments_parameter( name='a', schema=core_schema.str_schema(), mode='positional_only' ) param_b = core_schema.arguments_parameter( name='b', schema=core_schema.bool_schema(), mode='positional_only' ) schema = core_schema.arguments_schema([param_a, param_b]) v = SchemaValidator(schema) assert v.validate_python(('hello', True)) == (('hello', True), {}) ``` Args: arguments: The arguments to use for the arguments schema populate_by_name: Whether to populate by name var_args_schema: The variable args schema to use for the arguments schema var_kwargs_mode: The validation mode to use for variadic keyword arguments. If `'uniform'`, every value of the keyword arguments will be validated against the `var_kwargs_schema` schema. If `'unpacked-typed-dict'`, the `var_kwargs_schema` argument must be a [`typed_dict_schema`][pydantic_core.core_schema.typed_dict_schema] var_kwargs_schema: The variable kwargs schema to use for the arguments schema ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='arguments', arguments_schema=arguments, populate_by_name=populate_by_name, var_args_schema=var_args_schema, var_kwargs_mode=var_kwargs_mode, var_kwargs_schema=var_kwargs_schema, ref=ref, metadata=metadata, serialization=serialization, ) class CallSchema(TypedDict, total=False): type: Required[Literal['call']] arguments_schema: Required[CoreSchema] function: Required[Callable[..., Any]] function_name: str # default function.__name__ return_schema: CoreSchema ref: str metadata: Dict[str, Any] serialization: SerSchema def call_schema( arguments: CoreSchema, function: Callable[..., Any], *, function_name: str | None = None, return_schema: CoreSchema | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> CallSchema: """ Returns a schema that matches an arguments schema, then calls a function, e.g.: ```py from pydantic_core import SchemaValidator, core_schema param_a = core_schema.arguments_parameter( name='a', schema=core_schema.str_schema(), mode='positional_only' ) param_b = core_schema.arguments_parameter( name='b', schema=core_schema.bool_schema(), mode='positional_only' ) args_schema = core_schema.arguments_schema([param_a, param_b]) schema = core_schema.call_schema( arguments=args_schema, function=lambda a, b: a + str(not b), return_schema=core_schema.str_schema(), ) v = SchemaValidator(schema) assert v.validate_python((('hello', True))) == 'helloFalse' ``` Args: arguments: The arguments to use for the arguments schema function: The function to use for the call schema function_name: The function name to use for the call schema, if not provided `function.__name__` is used return_schema: The return schema to use for the call schema ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='call', arguments_schema=arguments, function=function, function_name=function_name, return_schema=return_schema, ref=ref, metadata=metadata, serialization=serialization, ) class CustomErrorSchema(TypedDict, total=False): type: Required[Literal['custom-error']] schema: Required[CoreSchema] custom_error_type: Required[str] custom_error_message: str custom_error_context: Dict[str, Union[str, int, float]] ref: str metadata: Dict[str, Any] serialization: SerSchema def custom_error_schema( schema: CoreSchema, custom_error_type: str, *, custom_error_message: str | None = None, custom_error_context: dict[str, Any] | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> CustomErrorSchema: """ Returns a schema that matches a custom error value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.custom_error_schema( schema=core_schema.int_schema(), custom_error_type='MyError', custom_error_message='Error msg', ) v = SchemaValidator(schema) v.validate_python(1) ``` Args: schema: The schema to use for the custom error schema custom_error_type: The custom error type to use for the custom error schema custom_error_message: The custom error message to use for the custom error schema custom_error_context: The custom error context to use for the custom error schema ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='custom-error', schema=schema, custom_error_type=custom_error_type, custom_error_message=custom_error_message, custom_error_context=custom_error_context, ref=ref, metadata=metadata, serialization=serialization, ) class JsonSchema(TypedDict, total=False): type: Required[Literal['json']] schema: CoreSchema ref: str metadata: Dict[str, Any] serialization: SerSchema def json_schema( schema: CoreSchema | None = None, *, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> JsonSchema: """ Returns a schema that matches a JSON value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema dict_schema = core_schema.model_fields_schema( { 'field_a': core_schema.model_field(core_schema.str_schema()), 'field_b': core_schema.model_field(core_schema.bool_schema()), }, ) class MyModel: __slots__ = ( '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__', ) field_a: str field_b: bool json_schema = core_schema.json_schema(schema=dict_schema) schema = core_schema.model_schema(cls=MyModel, schema=json_schema) v = SchemaValidator(schema) m = v.validate_python('{"field_a": "hello", "field_b": true}') assert isinstance(m, MyModel) ``` Args: schema: The schema to use for the JSON schema ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none(type='json', schema=schema, ref=ref, metadata=metadata, serialization=serialization) class UrlSchema(TypedDict, total=False): type: Required[Literal['url']] max_length: int allowed_schemes: List[str] host_required: bool # default False default_host: str default_port: int default_path: str strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def url_schema( *, max_length: int | None = None, allowed_schemes: list[str] | None = None, host_required: bool | None = None, default_host: str | None = None, default_port: int | None = None, default_path: str | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> UrlSchema: """ Returns a schema that matches a URL value, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.url_schema() v = SchemaValidator(schema) print(v.validate_python('https://example.com')) #> https://example.com/ ``` Args: max_length: The maximum length of the URL allowed_schemes: The allowed URL schemes host_required: Whether the URL must have a host default_host: The default host to use if the URL does not have a host default_port: The default port to use if the URL does not have a port default_path: The default path to use if the URL does not have a path strict: Whether to use strict URL parsing ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='url', max_length=max_length, allowed_schemes=allowed_schemes, host_required=host_required, default_host=default_host, default_port=default_port, default_path=default_path, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class MultiHostUrlSchema(TypedDict, total=False): type: Required[Literal['multi-host-url']] max_length: int allowed_schemes: List[str] host_required: bool # default False default_host: str default_port: int default_path: str strict: bool ref: str metadata: Dict[str, Any] serialization: SerSchema def multi_host_url_schema( *, max_length: int | None = None, allowed_schemes: list[str] | None = None, host_required: bool | None = None, default_host: str | None = None, default_port: int | None = None, default_path: str | None = None, strict: bool | None = None, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> MultiHostUrlSchema: """ Returns a schema that matches a URL value with possibly multiple hosts, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.multi_host_url_schema() v = SchemaValidator(schema) print(v.validate_python('redis://localhost,0.0.0.0,127.0.0.1')) #> redis://localhost,0.0.0.0,127.0.0.1 ``` Args: max_length: The maximum length of the URL allowed_schemes: The allowed URL schemes host_required: Whether the URL must have a host default_host: The default host to use if the URL does not have a host default_port: The default port to use if the URL does not have a port default_path: The default path to use if the URL does not have a path strict: Whether to use strict URL parsing ref: optional unique identifier of the schema, used to reference the schema in other places metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='multi-host-url', max_length=max_length, allowed_schemes=allowed_schemes, host_required=host_required, default_host=default_host, default_port=default_port, default_path=default_path, strict=strict, ref=ref, metadata=metadata, serialization=serialization, ) class DefinitionsSchema(TypedDict, total=False): type: Required[Literal['definitions']] schema: Required[CoreSchema] definitions: Required[List[CoreSchema]] metadata: Dict[str, Any] serialization: SerSchema def definitions_schema(schema: CoreSchema, definitions: list[CoreSchema]) -> DefinitionsSchema: """ Build a schema that contains both an inner schema and a list of definitions which can be used within the inner schema. ```py from pydantic_core import SchemaValidator, core_schema schema = core_schema.definitions_schema( core_schema.list_schema(core_schema.definition_reference_schema('foobar')), [core_schema.int_schema(ref='foobar')], ) v = SchemaValidator(schema) assert v.validate_python([1, 2, '3']) == [1, 2, 3] ``` Args: schema: The inner schema definitions: List of definitions which can be referenced within inner schema """ return DefinitionsSchema(type='definitions', schema=schema, definitions=definitions) class DefinitionReferenceSchema(TypedDict, total=False): type: Required[Literal['definition-ref']] schema_ref: Required[str] ref: str metadata: Dict[str, Any] serialization: SerSchema def definition_reference_schema( schema_ref: str, ref: str | None = None, metadata: Dict[str, Any] | None = None, serialization: SerSchema | None = None, ) -> DefinitionReferenceSchema: """ Returns a schema that points to a schema stored in "definitions", this is useful for nested recursive models and also when you want to define validators separately from the main schema, e.g.: ```py from pydantic_core import SchemaValidator, core_schema schema_definition = core_schema.definition_reference_schema('list-schema') schema = core_schema.definitions_schema( schema=schema_definition, definitions=[ core_schema.list_schema(items_schema=schema_definition, ref='list-schema'), ], ) v = SchemaValidator(schema) assert v.validate_python([()]) == [[]] ``` Args: schema_ref: The schema ref to use for the definition reference schema metadata: Any other information you want to include with the schema, not used by pydantic-core serialization: Custom serialization schema """ return _dict_not_none( type='definition-ref', schema_ref=schema_ref, ref=ref, metadata=metadata, serialization=serialization ) MYPY = False # See https://github.com/python/mypy/issues/14034 for details, in summary mypy is extremely slow to process this # union which kills performance not just for pydantic, but even for code using pydantic if not MYPY: CoreSchema = Union[ InvalidSchema, AnySchema, NoneSchema, BoolSchema, IntSchema, FloatSchema, DecimalSchema, StringSchema, BytesSchema, DateSchema, TimeSchema, DatetimeSchema, TimedeltaSchema, LiteralSchema, EnumSchema, IsInstanceSchema, IsSubclassSchema, CallableSchema, ListSchema, TupleSchema, SetSchema, FrozenSetSchema, GeneratorSchema, DictSchema, AfterValidatorFunctionSchema, BeforeValidatorFunctionSchema, WrapValidatorFunctionSchema, PlainValidatorFunctionSchema, WithDefaultSchema, NullableSchema, UnionSchema, TaggedUnionSchema, ChainSchema, LaxOrStrictSchema, JsonOrPythonSchema, TypedDictSchema, ModelFieldsSchema, ModelSchema, DataclassArgsSchema, DataclassSchema, ArgumentsSchema, CallSchema, CustomErrorSchema, JsonSchema, UrlSchema, MultiHostUrlSchema, DefinitionsSchema, DefinitionReferenceSchema, UuidSchema, ComplexSchema, ] elif False: CoreSchema: TypeAlias = Mapping[str, Any] # to update this, call `pytest -k test_core_schema_type_literal` and copy the output CoreSchemaType = Literal[ 'invalid', 'any', 'none', 'bool', 'int', 'float', 'decimal', 'str', 'bytes', 'date', 'time', 'datetime', 'timedelta', 'literal', 'enum', 'is-instance', 'is-subclass', 'callable', 'list', 'tuple', 'set', 'frozenset', 'generator', 'dict', 'function-after', 'function-before', 'function-wrap', 'function-plain', 'default', 'nullable', 'union', 'tagged-union', 'chain', 'lax-or-strict', 'json-or-python', 'typed-dict', 'model-fields', 'model', 'dataclass-args', 'dataclass', 'arguments', 'call', 'custom-error', 'json', 'url', 'multi-host-url', 'definitions', 'definition-ref', 'uuid', 'complex', ] CoreSchemaFieldType = Literal['model-field', 'dataclass-field', 'typed-dict-field', 'computed-field'] # used in _pydantic_core.pyi::PydanticKnownError # to update this, call `pytest -k test_all_errors` and copy the output ErrorType = Literal[ 'no_such_attribute', 'json_invalid', 'json_type', 'needs_python_object', 'recursion_loop', 'missing', 'frozen_field', 'frozen_instance', 'extra_forbidden', 'invalid_key', 'get_attribute_error', 'model_type', 'model_attributes_type', 'dataclass_type', 'dataclass_exact_type', 'none_required', 'greater_than', 'greater_than_equal', 'less_than', 'less_than_equal', 'multiple_of', 'finite_number', 'too_short', 'too_long', 'iterable_type', 'iteration_error', 'string_type', 'string_sub_type', 'string_unicode', 'string_too_short', 'string_too_long', 'string_pattern_mismatch', 'enum', 'dict_type', 'mapping_type', 'list_type', 'tuple_type', 'set_type', 'bool_type', 'bool_parsing', 'int_type', 'int_parsing', 'int_parsing_size', 'int_from_float', 'float_type', 'float_parsing', 'bytes_type', 'bytes_too_short', 'bytes_too_long', 'bytes_invalid_encoding', 'value_error', 'assertion_error', 'literal_error', 'date_type', 'date_parsing', 'date_from_datetime_parsing', 'date_from_datetime_inexact', 'date_past', 'date_future', 'time_type', 'time_parsing', 'datetime_type', 'datetime_parsing', 'datetime_object_invalid', 'datetime_from_date_parsing', 'datetime_past', 'datetime_future', 'timezone_naive', 'timezone_aware', 'timezone_offset', 'time_delta_type', 'time_delta_parsing', 'frozen_set_type', 'is_instance_of', 'is_subclass_of', 'callable_type', 'union_tag_invalid', 'union_tag_not_found', 'arguments_type', 'missing_argument', 'unexpected_keyword_argument', 'missing_keyword_only_argument', 'unexpected_positional_argument', 'missing_positional_only_argument', 'multiple_argument_values', 'url_type', 'url_parsing', 'url_syntax_violation', 'url_too_long', 'url_scheme', 'uuid_type', 'uuid_parsing', 'uuid_version', 'decimal_type', 'decimal_parsing', 'decimal_max_digits', 'decimal_max_places', 'decimal_whole_digits', 'complex_type', 'complex_str_parsing', ] def _dict_not_none(**kwargs: Any) -> Any: return {k: v for k, v in kwargs.items() if v is not None} ############################################################################### # All this stuff is deprecated by #980 and will be removed eventually # They're kept because some code external code will be using them @deprecated('`field_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.') def field_before_validator_function(function: WithInfoValidatorFunction, field_name: str, schema: CoreSchema, **kwargs): warnings.warn( '`field_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.', DeprecationWarning, ) return with_info_before_validator_function(function, schema, field_name=field_name, **kwargs) @deprecated('`general_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.') def general_before_validator_function(*args, **kwargs): warnings.warn( '`general_before_validator_function` is deprecated, use `with_info_before_validator_function` instead.', DeprecationWarning, ) return with_info_before_validator_function(*args, **kwargs) @deprecated('`field_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.') def field_after_validator_function(function: WithInfoValidatorFunction, field_name: str, schema: CoreSchema, **kwargs): warnings.warn( '`field_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.', DeprecationWarning, ) return with_info_after_validator_function(function, schema, field_name=field_name, **kwargs) @deprecated('`general_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.') def general_after_validator_function(*args, **kwargs): warnings.warn( '`general_after_validator_function` is deprecated, use `with_info_after_validator_function` instead.', DeprecationWarning, ) return with_info_after_validator_function(*args, **kwargs) @deprecated('`field_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.') def field_wrap_validator_function( function: WithInfoWrapValidatorFunction, field_name: str, schema: CoreSchema, **kwargs ): warnings.warn( '`field_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.', DeprecationWarning, ) return with_info_wrap_validator_function(function, schema, field_name=field_name, **kwargs) @deprecated('`general_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.') def general_wrap_validator_function(*args, **kwargs): warnings.warn( '`general_wrap_validator_function` is deprecated, use `with_info_wrap_validator_function` instead.', DeprecationWarning, ) return with_info_wrap_validator_function(*args, **kwargs) @deprecated('`field_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.') def field_plain_validator_function(function: WithInfoValidatorFunction, field_name: str, **kwargs): warnings.warn( '`field_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.', DeprecationWarning, ) return with_info_plain_validator_function(function, field_name=field_name, **kwargs) @deprecated('`general_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.') def general_plain_validator_function(*args, **kwargs): warnings.warn( '`general_plain_validator_function` is deprecated, use `with_info_plain_validator_function` instead.', DeprecationWarning, ) return with_info_plain_validator_function(*args, **kwargs) _deprecated_import_lookup = { 'FieldValidationInfo': ValidationInfo, 'FieldValidatorFunction': WithInfoValidatorFunction, 'GeneralValidatorFunction': WithInfoValidatorFunction, 'FieldWrapValidatorFunction': WithInfoWrapValidatorFunction, } if TYPE_CHECKING: FieldValidationInfo = ValidationInfo def __getattr__(attr_name: str) -> object: new_attr = _deprecated_import_lookup.get(attr_name) if new_attr is None: raise AttributeError(f"module 'pydantic_core' has no attribute '{attr_name}'") else: import warnings msg = f'`{attr_name}` is deprecated, use `{new_attr.__name__}` instead.' warnings.warn(msg, DeprecationWarning, stacklevel=1) return new_attr pydantic-pydantic-core-d771df5/python/pydantic_core/py.typed000066400000000000000000000000001473051353300243440ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/src/000077500000000000000000000000001473051353300173025ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/src/argument_markers.rs000066400000000000000000000057021473051353300232220ustar00rootroot00000000000000use pyo3::basic::CompareOp; use pyo3::exceptions::PyNotImplementedError; use pyo3::prelude::*; use pyo3::sync::GILOnceCell; use pyo3::types::{PyDict, PyTuple}; use crate::tools::safe_repr; #[pyclass(module = "pydantic_core._pydantic_core", get_all, frozen, freelist = 100)] #[derive(Debug, Clone)] pub struct ArgsKwargs { pub(crate) args: Py, pub(crate) kwargs: Option>, } impl ArgsKwargs { fn eq(&self, py: Python, other: &Self) -> PyResult { if self.args.bind(py).eq(other.args.bind(py))? { match (&self.kwargs, &other.kwargs) { (Some(d1), Some(d2)) => d1.bind(py).eq(d2.bind(py)), (None, None) => Ok(true), _ => Ok(false), } } else { Ok(false) } } } #[pymethods] impl ArgsKwargs { #[new] #[pyo3(signature = (args, kwargs = None))] fn py_new(py: Python, args: &Bound<'_, PyTuple>, kwargs: Option<&Bound<'_, PyDict>>) -> Self { Self { args: args.into_py(py), kwargs: match kwargs { Some(d) if !d.is_empty() => Some(d.to_owned().unbind()), _ => None, }, } } fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyObject { match op { CompareOp::Eq => match self.eq(py, other) { Ok(b) => b.into_py(py), Err(e) => e.into_py(py), }, CompareOp::Ne => match self.eq(py, other) { Ok(b) => (!b).into_py(py), Err(e) => e.into_py(py), }, _ => py.NotImplemented(), } } pub fn __repr__(&self, py: Python) -> String { let args = safe_repr(self.args.bind(py)); match self.kwargs { Some(ref d) => format!("ArgsKwargs({args}, {})", safe_repr(d.bind(py))), None => format!("ArgsKwargs({args})"), } } } static UNDEFINED_CELL: GILOnceCell> = GILOnceCell::new(); #[pyclass(module = "pydantic_core._pydantic_core", frozen)] #[derive(Debug)] pub struct PydanticUndefinedType {} #[pymethods] impl PydanticUndefinedType { #[new] pub fn py_new(_py: Python) -> PyResult { Err(PyNotImplementedError::new_err( "Creating instances of \"UndefinedType\" is not supported", )) } #[staticmethod] pub fn new(py: Python) -> Py { UNDEFINED_CELL .get_or_init(py, || PydanticUndefinedType {}.into_py(py).extract(py).unwrap()) .clone() } fn __repr__(&self) -> &'static str { "PydanticUndefined" } fn __copy__(&self, py: Python) -> Py { UNDEFINED_CELL.get(py).unwrap().clone() } #[pyo3(signature = (_memo, /))] fn __deepcopy__(&self, py: Python, _memo: &Bound<'_, PyAny>) -> Py { self.__copy__(py) } fn __reduce__(&self) -> &'static str { "PydanticUndefined" } } pydantic-pydantic-core-d771df5/src/build_tools.rs000066400000000000000000000136461473051353300222010ustar00rootroot00000000000000use std::error::Error; use std::fmt; use pyo3::exceptions::PyException; use pyo3::prelude::*; use pyo3::types::{PyDict, PyList, PyString}; use pyo3::{intern, FromPyObject, PyErrArguments}; use crate::errors::ValError; use crate::input::InputType; use crate::tools::SchemaDict; use crate::ValidationError; pub fn schema_or_config<'py, T>( schema: &Bound<'py, PyDict>, config: Option<&Bound<'py, PyDict>>, schema_key: &Bound<'py, PyString>, config_key: &Bound<'py, PyString>, ) -> PyResult> where T: FromPyObject<'py>, { match schema.get_as(schema_key)? { Some(v) => Ok(Some(v)), None => match config { Some(config) => config.get_as(config_key), None => Ok(None), }, } } pub fn schema_or_config_same<'py, T>( schema: &Bound<'py, PyDict>, config: Option<&Bound<'py, PyDict>>, key: &Bound<'py, PyString>, ) -> PyResult> where T: FromPyObject<'py>, { schema_or_config(schema, config, key, key) } pub fn is_strict(schema: &Bound<'_, PyDict>, config: Option<&Bound<'_, PyDict>>) -> PyResult { let py = schema.py(); Ok(schema_or_config_same(schema, config, intern!(py, "strict"))?.unwrap_or(false)) } enum SchemaErrorEnum { Message(String), ValidationError(ValidationError), } // we could perhaps do clever things here to store each schema error, or have different types for the top // level error group, and other errors, we could perhaps also support error groups!? #[pyclass(extends=PyException, module="pydantic_core._pydantic_core")] pub struct SchemaError(SchemaErrorEnum); impl fmt::Debug for SchemaError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "SchemaError({:?})", self.message()) } } impl fmt::Display for SchemaError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.message()) } } impl Error for SchemaError { #[cfg_attr(has_coverage_attribute, coverage(off))] fn source(&self) -> Option<&(dyn Error + 'static)> { None } } impl SchemaError { pub fn new_err(args: A) -> PyErr where A: PyErrArguments + Send + Sync + 'static, { PyErr::new::(args) } pub fn from_val_error(py: Python, error: ValError) -> PyErr { match error { ValError::LineErrors(raw_errors) => { let line_errors = raw_errors.into_iter().map(|e| e.into_py(py)).collect(); let validation_error = ValidationError::new(line_errors, "Schema".to_object(py), InputType::Python, false); let schema_error = SchemaError(SchemaErrorEnum::ValidationError(validation_error)); match Py::new(py, schema_error) { Ok(err) => PyErr::from_value_bound(err.into_bound(py).into_any()), Err(err) => err, } } ValError::InternalErr(err) => err, ValError::Omit => Self::new_err("Unexpected Omit error."), ValError::UseDefault => Self::new_err("Unexpected UseDefault error."), } } fn message(&self) -> &str { match &self.0 { SchemaErrorEnum::Message(message) => message.as_str(), SchemaErrorEnum::ValidationError(_) => "", } } } #[pymethods] impl SchemaError { #[new] fn py_new(message: String) -> Self { Self(SchemaErrorEnum::Message(message)) } fn error_count(&self) -> usize { match &self.0 { SchemaErrorEnum::Message(_) => 0, SchemaErrorEnum::ValidationError(error) => error.error_count(), } } fn errors(&self, py: Python) -> PyResult> { match &self.0 { SchemaErrorEnum::Message(_) => Ok(PyList::empty_bound(py).unbind()), SchemaErrorEnum::ValidationError(error) => error.errors(py, false, false, true), } } fn __str__(&self, py: Python) -> String { match &self.0 { SchemaErrorEnum::Message(message) => message.clone(), SchemaErrorEnum::ValidationError(error) => error.display(py, Some("Invalid Schema:"), false), } } fn __repr__(&self, py: Python) -> String { match &self.0 { SchemaErrorEnum::Message(message) => format!("SchemaError({message:?})"), SchemaErrorEnum::ValidationError(error) => error.display(py, Some("Invalid Schema:"), false), } } } macro_rules! py_schema_error_type { ($msg:expr) => { crate::tools::py_error_type!(crate::build_tools::SchemaError; $msg) }; ($msg:expr, $( $msg_args:expr ),+ ) => { crate::tools::py_error_type!(crate::build_tools::SchemaError; $msg, $( $msg_args ),+) }; } pub(crate) use py_schema_error_type; macro_rules! py_schema_err { ($msg:expr) => { Err(crate::build_tools::py_schema_error_type!($msg)) }; ($msg:expr, $( $msg_args:expr ),+ ) => { Err(crate::build_tools::py_schema_error_type!($msg, $( $msg_args ),+)) }; } pub(crate) use py_schema_err; #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub(crate) enum ExtraBehavior { Allow, Forbid, Ignore, } impl ExtraBehavior { pub fn from_schema_or_config( py: Python, schema: &Bound<'_, PyDict>, config: Option<&Bound<'_, PyDict>>, default: Self, ) -> PyResult { let extra_behavior = schema_or_config::>>( schema, config, intern!(py, "extra_behavior"), intern!(py, "extra_fields_behavior"), )? .flatten(); let res = match extra_behavior.as_ref().map(|s| s.to_str()).transpose()? { Some("allow") => Self::Allow, Some("ignore") => Self::Ignore, Some("forbid") => Self::Forbid, Some(v) => return py_schema_err!("Invalid extra_behavior: `{}`", v), None => default, }; Ok(res) } } pydantic-pydantic-core-d771df5/src/common/000077500000000000000000000000001473051353300205725ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/src/common/mod.rs000066400000000000000000000000261473051353300217150ustar00rootroot00000000000000pub(crate) mod union; pydantic-pydantic-core-d771df5/src/common/union.rs000066400000000000000000000023341473051353300222720ustar00rootroot00000000000000use pyo3::prelude::*; use pyo3::{PyTraverseError, PyVisit}; use crate::lookup_key::LookupKey; use crate::py_gc::PyGcTraverse; #[derive(Debug, Clone)] pub enum Discriminator { /// use `LookupKey` to find the tag, same as we do to find values in typed_dict aliases LookupKey(LookupKey), /// call a function to find the tag to use Function(PyObject), } impl Discriminator { pub fn new(py: Python, raw: &Bound<'_, PyAny>) -> PyResult { if raw.is_callable() { return Ok(Self::Function(raw.to_object(py))); } let lookup_key = LookupKey::from_py(py, raw, None)?; Ok(Self::LookupKey(lookup_key)) } pub fn to_string_py(&self, py: Python) -> PyResult { match self { Self::Function(f) => Ok(format!("{}()", f.getattr(py, "__name__")?)), Self::LookupKey(lookup_key) => Ok(lookup_key.to_string()), } } } impl PyGcTraverse for Discriminator { fn py_gc_traverse(&self, visit: &PyVisit<'_>) -> Result<(), PyTraverseError> { match self { Self::Function(obj) => visit.call(obj)?, Self::LookupKey(_) => {} } Ok(()) } } pub(crate) const SMALL_UNION_THRESHOLD: usize = 4; pydantic-pydantic-core-d771df5/src/definitions.rs000066400000000000000000000213171473051353300221670ustar00rootroot00000000000000/// Definition / reference management /// Our definitions system is very similar to json schema's: there's ref strings and a definitions section /// Unlike json schema we let you put definitions inline, not just in a single '#/$defs/' block or similar. /// We use DefinitionsBuilder to collect the references / definitions into a single vector /// and then get a definition from a reference using an integer id (just for performance of not using a HashMap) use std::{ borrow::Borrow, collections::hash_map::Entry, fmt::Debug, sync::{ atomic::{AtomicBool, Ordering}, Arc, OnceLock, Weak, }, }; use pyo3::{prelude::*, PyTraverseError, PyVisit}; use ahash::AHashMap; use crate::{build_tools::py_schema_err, py_gc::PyGcTraverse}; /// Definitions are validators and serializers that are /// shared by reference. /// They come into play whenever there is recursion, e.g. /// if you have validators A -> B -> A then A will be shared /// by reference so that the SchemaValidator itself can own it. /// These primarily get used by DefinitionRefValidator and DefinitionRefSerializer, /// other validators / serializers primarily pass them around without interacting with them. /// They get indexed by a ReferenceId, which are integer identifiers /// that are handed out and managed by DefinitionsBuilder when the Schema{Validator,Serializer} /// gets build. pub struct Definitions(AHashMap, Definition>); struct Definition { value: Arc>, name: Arc, } /// Reference to a definition. pub struct DefinitionRef { reference: Arc, // We use a weak reference to the definition to avoid a reference cycle // when recursive definitions are used. value: Weak>, name: Arc, } // DefinitionRef can always be cloned (#[derive(Clone)] would require T: Clone) impl Clone for DefinitionRef { fn clone(&self) -> Self { Self { reference: self.reference.clone(), value: self.value.clone(), name: self.name.clone(), } } } impl DefinitionRef { pub fn id(&self) -> usize { Weak::as_ptr(&self.value) as usize } pub fn get_or_init_name(&self, init: impl FnOnce(&T) -> String) -> &str { let Some(definition) = self.value.upgrade() else { return "..."; }; match definition.get() { Some(value) => self.name.get_or_init(|| init(value)), None => "...", } } pub fn read(&self, f: impl FnOnce(Option<&T>) -> R) -> R { f(self.value.upgrade().as_ref().and_then(|value| value.get())) } } impl Debug for DefinitionRef { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // To avoid possible infinite recursion from recursive definitions, // a DefinitionRef just displays debug as its name self.name.fmt(f) } } impl Debug for Definitions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Formatted as a list for backwards compatibility; in principle // this could be formatted as a map. Maybe change in a future // minor release of pydantic. write![f, "["]?; let mut first = true; for def in self.0.values() { write![f, "{sep}{def:?}", sep = if first { "" } else { ", " }]?; first = false; } write![f, "]"]?; Ok(()) } } impl Debug for Definition { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.value.get() { Some(value) => value.fmt(f), None => "...".fmt(f), } } } impl PyGcTraverse for DefinitionRef { fn py_gc_traverse(&self, visit: &PyVisit<'_>) -> Result<(), PyTraverseError> { if let Some(value) = self.value.upgrade().as_ref().and_then(|v| v.get()) { value.py_gc_traverse(visit)?; } Ok(()) } } impl PyGcTraverse for Definitions { fn py_gc_traverse(&self, visit: &PyVisit<'_>) -> Result<(), PyTraverseError> { for value in self.0.values() { if let Some(value) = value.value.get() { value.py_gc_traverse(visit)?; } } Ok(()) } } #[derive(Debug)] pub struct DefinitionsBuilder { definitions: Definitions, } impl DefinitionsBuilder { pub fn new() -> Self { Self { definitions: Definitions(AHashMap::new()), } } /// Get a ReferenceId for the given reference string. pub fn get_definition(&mut self, reference: &str) -> DefinitionRef { // We either need a String copy or two hashmap lookups // Neither is better than the other // We opted for the easier outward facing API let reference = Arc::new(reference.to_string()); let value = match self.definitions.0.entry(reference.clone()) { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(Definition { value: Arc::new(OnceLock::new()), name: Arc::new(LazyName::new()), }), }; DefinitionRef { reference, value: Arc::downgrade(&value.value), name: value.name.clone(), } } /// Add a definition, returning the ReferenceId that maps to it pub fn add_definition(&mut self, reference: String, value: T) -> PyResult> { let reference = Arc::new(reference); let value = match self.definitions.0.entry(reference.clone()) { Entry::Occupied(entry) => { let definition = entry.into_mut(); match definition.value.set(value) { Ok(()) => definition, Err(_) => return py_schema_err!("Duplicate ref: `{}`", reference), } } Entry::Vacant(entry) => entry.insert(Definition { value: Arc::new(OnceLock::from(value)), name: Arc::new(LazyName::new()), }), }; Ok(DefinitionRef { reference, value: Arc::downgrade(&value.value), name: value.name.clone(), }) } /// Consume this Definitions into a vector of items, indexed by each items ReferenceId pub fn finish(self) -> PyResult> { for (reference, def) in &self.definitions.0 { if def.value.get().is_none() { return py_schema_err!("Definitions error: definition `{}` was never filled", reference); } } Ok(self.definitions) } } /// Because definitions can create recursive structures, we often need to be able to populate /// values lazily from these structures in a way that avoids infinite recursion. This structure /// avoids infinite recursion by returning a default value when a recursion loop is detected. pub(crate) struct RecursionSafeCache { cache: OnceLock, in_recursion: AtomicBool, } impl Clone for RecursionSafeCache { fn clone(&self) -> Self { Self { cache: self.cache.clone(), in_recursion: AtomicBool::new(false), } } } impl RecursionSafeCache { /// Creates a new RecursionSafeCache pub(crate) fn new() -> Self { Self { cache: OnceLock::new(), in_recursion: AtomicBool::new(false), } } /// Gets or initialized the cached value, returning the default in the case of recursion loops pub(crate) fn get_or_init(&self, init: impl FnOnce() -> T, recursive_default: &'static D) -> &D where T: Borrow, { if let Some(cached) = self.cache.get() { return cached.borrow(); } if self .in_recursion .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) .is_err() { return recursive_default; } let result = self.cache.get_or_init(init).borrow(); self.in_recursion.store(false, Ordering::SeqCst); result } /// Gets the value, if it is set fn get(&self) -> Option<&T> { self.cache.get() } } #[derive(Clone)] struct LazyName(RecursionSafeCache); impl LazyName { fn new() -> Self { Self(RecursionSafeCache::new()) } /// Gets the validator name, returning the default in the case of recursion loops fn get_or_init(&self, init: impl FnOnce() -> String) -> &str { self.0.get_or_init(init, "...") } } impl Debug for LazyName { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.get().map_or("...", String::as_str).fmt(f) } } pydantic-pydantic-core-d771df5/src/errors/000077500000000000000000000000001473051353300206165ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/src/errors/line_error.rs000066400000000000000000000120171473051353300233250ustar00rootroot00000000000000use pyo3::exceptions::PyTypeError; use pyo3::prelude::*; use pyo3::DowncastError; use pyo3::DowncastIntoError; use jiter::JsonValue; use crate::input::BorrowInput; use crate::input::Input; use super::location::{LocItem, Location}; use super::types::ErrorType; pub type ValResult = Result; pub trait ToErrorValue { fn to_error_value(&self) -> InputValue; } impl<'a, T: BorrowInput<'a>> ToErrorValue for T { fn to_error_value(&self) -> InputValue { Input::as_error_value(self.borrow_input()) } } impl ToErrorValue for &'_ dyn ToErrorValue { fn to_error_value(&self) -> InputValue { (**self).to_error_value() } } #[cfg_attr(debug_assertions, derive(Debug))] pub enum ValError { LineErrors(Vec), InternalErr(PyErr), Omit, UseDefault, } impl From for ValError { fn from(py_err: PyErr) -> Self { Self::InternalErr(py_err) } } impl From> for ValError { fn from(py_downcast: DowncastError) -> Self { Self::InternalErr(PyTypeError::new_err(py_downcast.to_string())) } } impl From> for ValError { fn from(py_downcast: DowncastIntoError) -> Self { Self::InternalErr(PyTypeError::new_err(py_downcast.to_string())) } } impl From> for ValError { fn from(line_errors: Vec) -> Self { Self::LineErrors(line_errors) } } impl ValError { pub fn new(error_type: ErrorType, input: impl ToErrorValue) -> ValError { Self::LineErrors(vec![ValLineError::new(error_type, input)]) } pub fn new_with_loc(error_type: ErrorType, input: impl ToErrorValue, loc: impl Into) -> ValError { Self::LineErrors(vec![ValLineError::new_with_loc(error_type, input, loc)]) } pub fn new_custom_input(error_type: ErrorType, input_value: InputValue) -> ValError { Self::LineErrors(vec![ValLineError::new_custom_input(error_type, input_value)]) } /// helper function to call with_outer on line items if applicable pub fn with_outer_location(self, into_loc_item: impl Into) -> Self { let loc_item = into_loc_item.into(); match self { Self::LineErrors(mut line_errors) => { for line_error in &mut line_errors { line_error.location.with_outer(loc_item.clone()); } Self::LineErrors(line_errors) } other => other, } } } /// A `ValLineError` is a single error that occurred during validation which is converted to a `PyLineError` /// to eventually form a `ValidationError`. /// I don't like the name `ValLineError`, but it's the best I could come up with (for now). #[cfg_attr(debug_assertions, derive(Debug))] pub struct ValLineError { pub error_type: ErrorType, // location is reversed so that adding an "outer" location item is pushing, it's reversed before showing to the user pub location: Location, pub input_value: InputValue, } impl ValLineError { pub fn new(error_type: ErrorType, input: impl ToErrorValue) -> ValLineError { Self { error_type, input_value: input.to_error_value(), location: Location::default(), } } pub fn new_with_loc(error_type: ErrorType, input: impl ToErrorValue, loc: impl Into) -> ValLineError { Self { error_type, input_value: input.to_error_value(), location: Location::new_some(loc.into()), } } pub fn new_with_full_loc(error_type: ErrorType, input: impl ToErrorValue, location: Location) -> ValLineError { Self { error_type, input_value: input.to_error_value(), location, } } pub fn new_custom_input(error_type: ErrorType, input_value: InputValue) -> ValLineError { Self { error_type, input_value, location: Location::default(), } } /// location is stored reversed so it's quicker to add "outer" items as that's what we always do /// hence `push` here instead of `insert` pub fn with_outer_location(mut self, into_loc_item: impl Into) -> Self { self.location.with_outer(into_loc_item.into()); self } // change the error_type on a error in place pub fn with_type(mut self, error_type: ErrorType) -> Self { self.error_type = error_type; self } pub fn first_loc_item(&self) -> Option<&LocItem> { match &self.location { Location::Empty => None, // last because order is reversed Location::List(loc_items) => loc_items.last(), } } } #[cfg_attr(debug_assertions, derive(Debug))] #[derive(Clone)] pub enum InputValue { Python(PyObject), Json(JsonValue<'static>), } impl ToPyObject for InputValue { fn to_object(&self, py: Python) -> PyObject { match self { Self::Python(input) => input.clone_ref(py), Self::Json(input) => input.to_object(py), } } } pydantic-pydantic-core-d771df5/src/errors/location.rs000066400000000000000000000152711473051353300230020ustar00rootroot00000000000000use pyo3::exceptions::PyTypeError; use pyo3::sync::GILOnceCell; use std::borrow::Cow; use std::fmt; use pyo3::prelude::*; use pyo3::types::{PyList, PyTuple}; use serde::ser::SerializeSeq; use serde::{Serialize, Serializer}; use crate::lookup_key::{LookupPath, PathItem}; /// Used to store individual items of the error location, e.g. a string for key/field names /// or a number for array indices. #[derive(Clone, Eq, PartialEq)] #[cfg_attr(debug_assertions, derive(Debug))] pub enum LocItem { /// string type key, used to identify items from a dict or anything that implements `__getitem__` S(String), /// integer key, used to get: /// * items from a list /// * items from a tuple /// * dict with int keys `Dict[int, ...]` (python only) /// * with integer keys in tagged unions I(i64), } impl fmt::Display for LocItem { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::S(s) if s.contains('.') => write!(f, "`{s}`"), Self::S(s) => write!(f, "{s}"), Self::I(i) => write!(f, "{i}"), } } } impl From for LocItem { fn from(s: String) -> Self { Self::S(s) } } impl From<&String> for LocItem { fn from(s: &String) -> Self { s.to_string().into() } } impl From<&str> for LocItem { fn from(s: &str) -> Self { Self::S(s.to_string()) } } impl From> for LocItem { fn from(s: Cow<'_, str>) -> Self { Self::S(s.into_owned()) } } impl From for LocItem { fn from(i: i64) -> Self { Self::I(i) } } impl From for LocItem { fn from(u: usize) -> Self { Self::I(u as i64) } } /// eventually it might be good to combine PathItem and LocItem impl From for LocItem { fn from(path_item: PathItem) -> Self { match path_item { PathItem::S(s, _) => s.into(), PathItem::Pos(val) => val.into(), PathItem::Neg(val) => { let neg_value = -(val as i64); neg_value.into() } } } } impl ToPyObject for LocItem { fn to_object(&self, py: Python<'_>) -> PyObject { match self { Self::S(val) => val.to_object(py), Self::I(val) => val.to_object(py), } } } impl Serialize for LocItem { fn serialize(&self, serializer: S) -> Result where S: Serializer, { match self { Self::S(s) => serializer.serialize_str(s.as_str()), Self::I(loc) => serializer.serialize_i64(*loc), } } } /// Error locations are represented by a vector of `LocItem`s. /// e.g. if the error occurred in the third member of a list called `foo`, /// the location would be `["foo", 2]`. /// Note: location in List is stored in **REVERSE** so adding an "outer" item to location involves /// pushing to the vec which is faster than inserting and shifting everything along. /// Then when "using" location in `Display` and `ToPyObject` order has to be reversed #[derive(Clone)] #[cfg_attr(debug_assertions, derive(Debug))] pub enum Location { // no location, avoid creating an unnecessary vec Empty, // store the in a vec of LocItems, Note: this is the REVERSE of location, see above // we could perhaps use a smallvec or similar here, probably only worth it if we store a Cow in LocItem List(Vec), } impl Default for Location { fn default() -> Self { Self::Empty } } static EMPTY_TUPLE: GILOnceCell = GILOnceCell::new(); impl ToPyObject for Location { fn to_object(&self, py: Python<'_>) -> PyObject { match self { Self::List(loc) => PyTuple::new_bound(py, loc.iter().rev()).to_object(py), Self::Empty => EMPTY_TUPLE .get_or_init(py, || PyTuple::empty_bound(py).to_object(py)) .clone_ref(py), } } } impl From<&LookupPath> for Location { fn from(lookup_path: &LookupPath) -> Self { let v = lookup_path .iter() .rev() .map(|path_item| path_item.clone().into()) .collect(); Self::List(v) } } impl fmt::Display for Location { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::List(loc) => { let loc_str = loc.iter().rev().map(ToString::to_string).collect::>(); writeln!(f, "{}", loc_str.join(".")) } Self::Empty => Ok(()), } } } impl Location { /// create a new location vec with a value, 3 is plucked out of thin air, should it just be 1? pub fn new_some(item: LocItem) -> Self { let mut loc = Vec::with_capacity(3); loc.push(item); Self::List(loc) } pub fn with_outer(&mut self, loc_item: LocItem) { match self { Self::List(ref mut loc) => loc.push(loc_item), Self::Empty => { *self = Self::new_some(loc_item); } }; } } impl Serialize for Location { fn serialize(&self, serializer: S) -> Result where S: Serializer, { match self { Self::Empty => serializer.serialize_seq(Some(0))?.end(), Self::List(loc) => { let mut seq = serializer.serialize_seq(Some(loc.len()))?; for e in loc.iter().rev() { seq.serialize_element(e)?; } seq.end() } } } } impl TryFrom>> for Location { type Error = PyErr; /// Only ever called by ValidationError -> PyLineError to convert user input to our internal Location /// Thus this expects the location to *not* be reversed and reverses it before storing it. fn try_from(location: Option<&Bound<'_, PyAny>>) -> PyResult { if let Some(location) = location { let mut loc_vec: Vec = if let Ok(tuple) = location.downcast::() { tuple.iter().map(Into::into).collect() } else if let Ok(list) = location.downcast::() { list.iter().map(Into::into).collect() } else { return Err(PyTypeError::new_err( "Location must be a list or tuple of strings and ints", )); }; if loc_vec.is_empty() { Ok(Self::Empty) } else { // Don't force Python users to give use the location reversed // just be we internally store it like that loc_vec.reverse(); Ok(Self::List(loc_vec)) } } else { Ok(Self::Empty) } } } pydantic-pydantic-core-d771df5/src/errors/mod.rs000066400000000000000000000021211473051353300217370ustar00rootroot00000000000000use pyo3::prelude::*; mod line_error; mod location; mod types; mod validation_exception; mod value_exception; pub use self::line_error::{InputValue, ToErrorValue, ValError, ValLineError, ValResult}; pub use self::location::LocItem; pub use self::types::{list_all_errors, ErrorType, ErrorTypeDefaults, Number}; pub use self::validation_exception::ValidationError; pub use self::value_exception::{PydanticCustomError, PydanticKnownError, PydanticOmit, PydanticUseDefault}; pub fn py_err_string(py: Python, err: PyErr) -> String { let value = err.value_bound(py); match value.get_type().qualname() { Ok(type_name) => match value.str() { Ok(py_str) => { let str_cow = py_str.to_string_lossy(); let str = str_cow.as_ref(); if !str.is_empty() { format!("{type_name}: {str}") } else { type_name.to_string() } } Err(_) => format!("{type_name}: "), }, Err(_) => "Unknown Error".to_string(), } } pydantic-pydantic-core-d771df5/src/errors/types.rs000066400000000000000000001051161473051353300223340ustar00rootroot00000000000000use std::any::type_name; use std::borrow::Cow; use std::fmt; use pyo3::exceptions::{PyKeyError, PyTypeError}; use pyo3::prelude::*; use pyo3::sync::GILOnceCell; use pyo3::types::{PyDict, PyList}; use ahash::AHashMap; use num_bigint::BigInt; use strum::{Display, EnumMessage, IntoEnumIterator}; use strum_macros::EnumIter; use crate::input::{InputType, Int}; use crate::tools::{extract_i64, py_err, py_error_type}; use super::PydanticCustomError; #[pyfunction] pub fn list_all_errors(py: Python) -> PyResult> { let mut errors: Vec> = Vec::with_capacity(100); for error_type in ErrorType::iter() { if !matches!(error_type, ErrorType::CustomError { .. }) { let d = PyDict::new_bound(py); d.set_item("type", error_type.to_string())?; let message_template_python = error_type.message_template_python(); d.set_item("message_template_python", message_template_python)?; d.set_item( "example_message_python", error_type.render_message(py, InputType::Python)?, )?; let message_template_json = error_type.message_template_json(); if message_template_python != message_template_json { d.set_item("message_template_json", message_template_json)?; d.set_item("example_message_json", error_type.render_message(py, InputType::Json)?)?; } d.set_item("example_context", error_type.py_dict(py)?)?; errors.push(d); } } Ok(PyList::new_bound(py, errors)) } fn field_from_context<'py, T: FromPyObject<'py>>( context: Option<&Bound<'py, PyDict>>, field_name: &str, enum_name: &str, type_name_fn: fn() -> &'static str, ) -> PyResult { context .ok_or_else(|| py_error_type!(PyTypeError; "{}: '{}' required in context", enum_name, field_name))? .get_item(field_name)? .ok_or_else(|| py_error_type!(PyTypeError; "{}: '{}' required in context", enum_name, field_name))? .extract::() .map_err(|_| py_error_type!(PyTypeError; "{}: '{}' context value must be a {}", enum_name, field_name, type_name_fn())) } fn cow_field_from_context<'py, T: FromPyObject<'py>, B: ToOwned + ?Sized + 'static>( context: Option<&Bound<'py, PyDict>>, field_name: &str, enum_name: &str, _type_name_fn: fn() -> &'static str, ) -> PyResult> { let res: T = field_from_context(context, field_name, enum_name, || { type_name::().split("::").last().unwrap() })?; Ok(Cow::Owned(res)) } macro_rules! basic_error_default { ( $item:ident $(,)? ) => { pub const $item: ErrorType = ErrorType::$item { context: None }; }; ( $item:ident, $($key:ident),* $(,)? ) => {}; // With more parameters enum item must be explicitly created } macro_rules! error_types { ( $( $item:ident { $($key:ident: {ctx_type: $ctx_type:ty, ctx_fn: $ctx_fn:path}),* $(,)? }, )+ ) => { #[derive(Clone, Debug, Display, EnumMessage, EnumIter)] #[strum(serialize_all = "snake_case")] pub enum ErrorType { $( $item { context: Option>, $($key: $ctx_type,)* } ),+, } impl ErrorType { pub fn new(py: Python, value: &str, context: Option>) -> PyResult { let lookup = ERROR_TYPE_LOOKUP.get_or_init(py, Self::build_lookup); let error_type = match lookup.get(value) { Some(error_type) => error_type.clone(), None => return py_err!(PyKeyError; "Invalid error type: '{}'", value), }; match error_type { $( Self::$item { .. } => { Ok(Self::$item { $( $key: $ctx_fn(context.as_ref(), stringify!($key), stringify!($item), || stringify!($ctx_type))?, )* context: context.map(|c| c.unbind()), }) }, )+ } } fn py_dict_update_ctx(&self, py: Python, dict: &Bound<'_, PyDict>) -> PyResult { use pyo3::types::PyMapping; match self { $( Self::$item { context, $($key,)* } => { $( dict.set_item::<&str, Py>(stringify!($key), $key.to_object(py))?; )* if let Some(ctx) = context { dict.update(ctx.bind(py).downcast::()?)?; Ok(true) } else { Ok(false) } }, )+ } } } pub struct ErrorTypeDefaults {} // Allow unused default constants as they are generated by macro. // Also allow camel case as constants so we dont need to do case conversion of macro // generated names. Enums are also then easier to find when searching. #[allow(dead_code, non_upper_case_globals)] impl ErrorTypeDefaults { $( basic_error_default!($item, $($key),*); )+ } }; } // Definite each validation error. // NOTE: if an error has parameters: // * the variables in the message need to match the enum struct // * you need to add an entry to the `render` enum to render the error message as a template error_types! { // --------------------- // Assignment errors NoSuchAttribute { attribute: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // JSON errors JsonInvalid { error: {ctx_type: String, ctx_fn: field_from_context}, }, JsonType {}, NeedsPythonObject { method_name: {ctx_type: String, ctx_fn: field_from_context} }, // --------------------- // recursion error RecursionLoop {}, // --------------------- // typed dict specific errors Missing {}, FrozenField {}, FrozenInstance {}, ExtraForbidden {}, InvalidKey {}, GetAttributeError { error: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // model class specific errors ModelType { class_name: {ctx_type: String, ctx_fn: field_from_context}, }, ModelAttributesType {}, // --------------------- // dataclass errors (we don't talk about ArgsKwargs here for simplicity) DataclassType { class_name: {ctx_type: String, ctx_fn: field_from_context}, }, DataclassExactType { class_name: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // None errors NoneRequired {}, // --------------------- // generic comparison errors GreaterThan { gt: {ctx_type: Number, ctx_fn: field_from_context}, }, GreaterThanEqual { ge: {ctx_type: Number, ctx_fn: field_from_context}, }, LessThan { lt: {ctx_type: Number, ctx_fn: field_from_context}, }, LessThanEqual { le: {ctx_type: Number, ctx_fn: field_from_context}, }, MultipleOf { multiple_of: {ctx_type: Number, ctx_fn: field_from_context}, }, FiniteNumber {}, // --------------------- // generic length errors - used for everything with a length except strings and bytes which need custom messages TooShort { field_type: {ctx_type: String, ctx_fn: field_from_context}, min_length: {ctx_type: usize, ctx_fn: field_from_context}, actual_length: {ctx_type: usize, ctx_fn: field_from_context}, }, TooLong { field_type: {ctx_type: String, ctx_fn: field_from_context}, max_length: {ctx_type: usize, ctx_fn: field_from_context}, actual_length: {ctx_type: Option, ctx_fn: field_from_context}, }, // --------------------- // generic collection and iteration errors IterableType {}, IterationError { error: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // string errors StringType {}, StringSubType {}, StringUnicode {}, StringTooShort { min_length: {ctx_type: usize, ctx_fn: field_from_context}, }, StringTooLong { max_length: {ctx_type: usize, ctx_fn: field_from_context}, }, StringPatternMismatch { pattern: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // enum errors Enum { expected: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // dict errors DictType {}, MappingType { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, // --------------------- // list errors ListType {}, // --------------------- // tuple errors TupleType {}, // --------------------- // set errors SetType {}, // --------------------- // bool errors BoolType {}, BoolParsing {}, // --------------------- // int errors IntType {}, IntParsing {}, IntParsingSize {}, IntFromFloat {}, // --------------------- // float errors FloatType {}, FloatParsing {}, // --------------------- // bytes errors BytesType {}, BytesTooShort { min_length: {ctx_type: usize, ctx_fn: field_from_context}, }, BytesTooLong { max_length: {ctx_type: usize, ctx_fn: field_from_context}, }, BytesInvalidEncoding { encoding: {ctx_type: String, ctx_fn: field_from_context}, encoding_error: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // python errors from functions ValueError { error: {ctx_type: Option, ctx_fn: field_from_context}, // Use Option because EnumIter requires Default to be implemented }, AssertionError { error: {ctx_type: Option, ctx_fn: field_from_context}, // Use Option because EnumIter requires Default to be implemented }, // Note: strum message and serialize are not used here CustomError { // context is a common field in all enums error_type: {ctx_type: String, ctx_fn: field_from_context}, message_template: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // literals LiteralError { expected: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // date errors DateType {}, DateParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, DateFromDatetimeParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, DateFromDatetimeInexact {}, DatePast {}, DateFuture {}, // --------------------- // date errors TimeType {}, TimeParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, // --------------------- // datetime errors DatetimeType {}, DatetimeParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, DatetimeObjectInvalid { error: {ctx_type: String, ctx_fn: field_from_context}, }, DatetimeFromDateParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, DatetimePast {}, DatetimeFuture {}, // --------------------- // timezone errors TimezoneNaive {}, TimezoneAware {}, TimezoneOffset { tz_expected: {ctx_type: i32, ctx_fn: field_from_context}, tz_actual: {ctx_type: i32, ctx_fn: field_from_context}, }, // --------------------- // timedelta errors TimeDeltaType {}, TimeDeltaParsing { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, // --------------------- // frozenset errors FrozenSetType {}, // --------------------- // introspection types - e.g. isinstance, callable IsInstanceOf { class: {ctx_type: String, ctx_fn: field_from_context}, }, IsSubclassOf { class: {ctx_type: String, ctx_fn: field_from_context}, }, CallableType {}, // --------------------- // union errors UnionTagInvalid { discriminator: {ctx_type: String, ctx_fn: field_from_context}, tag: {ctx_type: String, ctx_fn: field_from_context}, expected_tags: {ctx_type: String, ctx_fn: field_from_context}, }, UnionTagNotFound { discriminator: {ctx_type: String, ctx_fn: field_from_context}, }, // --------------------- // argument errors ArgumentsType {}, MissingArgument {}, UnexpectedKeywordArgument {}, MissingKeywordOnlyArgument {}, UnexpectedPositionalArgument {}, MissingPositionalOnlyArgument {}, MultipleArgumentValues {}, // --------------------- // URL errors UrlType {}, UrlParsing { // would be great if this could be a static cow, waiting for https://github.com/servo/rust-url/issues/801 error: {ctx_type: String, ctx_fn: field_from_context}, }, UrlSyntaxViolation { error: {ctx_type: Cow<'static, str>, ctx_fn: cow_field_from_context}, }, UrlTooLong { max_length: {ctx_type: usize, ctx_fn: field_from_context}, }, UrlScheme { expected_schemes: {ctx_type: String, ctx_fn: field_from_context}, }, // UUID errors, UuidType {}, UuidParsing { error: {ctx_type: String, ctx_fn: field_from_context}, }, UuidVersion { expected_version: {ctx_type: usize, ctx_fn: field_from_context}, }, // Decimal errors DecimalType {}, DecimalParsing {}, DecimalMaxDigits { max_digits: {ctx_type: u64, ctx_fn: field_from_context}, }, DecimalMaxPlaces { decimal_places: {ctx_type: u64, ctx_fn: field_from_context}, }, DecimalWholeDigits { whole_digits: {ctx_type: u64, ctx_fn: field_from_context}, }, // Complex errors ComplexType {}, ComplexStrParsing {}, } macro_rules! render { ($template:ident, $($value:ident),* $(,)?) => { Ok( $template $( .replace(concat!("{", stringify!($value), "}"), $value) )* ) }; } macro_rules! to_string_render { ($template:ident, $($value:ident),* $(,)?) => { Ok( $template $( .replace(concat!("{", stringify!($value), "}"), &$value.to_string()) )* ) }; } fn plural_s + PartialEq>(value: T) -> &'static str { if value == 1.into() { "" } else { "s" } } static ERROR_TYPE_LOOKUP: GILOnceCell> = GILOnceCell::new(); impl ErrorType { pub fn new_custom_error(py: Python, custom_error: PydanticCustomError) -> Self { Self::CustomError { error_type: custom_error.error_type().to_owned(), message_template: custom_error.message_template().to_owned(), context: custom_error.context(py), } } pub fn message_template_python(&self) -> &'static str { match self { Self::NoSuchAttribute {..} => "Object has no attribute '{attribute}'", Self::JsonInvalid {..} => "Invalid JSON: {error}", Self::JsonType {..} => "JSON input should be string, bytes or bytearray", Self::NeedsPythonObject {..} => "Cannot check `{method_name}` when validating from json, use a JsonOrPython validator instead", Self::RecursionLoop {..} => "Recursion error - cyclic reference detected", Self::Missing {..} => "Field required", Self::FrozenField {..} => "Field is frozen", Self::FrozenInstance {..} => "Instance is frozen", Self::ExtraForbidden {..} => "Extra inputs are not permitted", Self::InvalidKey {..} => "Keys should be strings", Self::GetAttributeError {..} => "Error extracting attribute: {error}", Self::ModelType {..} => "Input should be a valid dictionary or instance of {class_name}", Self::ModelAttributesType {..} => "Input should be a valid dictionary or object to extract fields from", Self::DataclassType {..} => "Input should be a dictionary or an instance of {class_name}", Self::DataclassExactType {..} => "Input should be an instance of {class_name}", Self::NoneRequired {..} => "Input should be None", Self::GreaterThan {..} => "Input should be greater than {gt}", Self::GreaterThanEqual {..} => "Input should be greater than or equal to {ge}", Self::LessThan {..} => "Input should be less than {lt}", Self::LessThanEqual {..} => "Input should be less than or equal to {le}", Self::MultipleOf {..} => "Input should be a multiple of {multiple_of}", Self::FiniteNumber {..} => "Input should be a finite number", Self::TooShort {..} => "{field_type} should have at least {min_length} item{expected_plural} after validation, not {actual_length}", Self::TooLong {..} => "{field_type} should have at most {max_length} item{expected_plural} after validation, not {actual_length}", Self::IterableType {..} => "Input should be iterable", Self::IterationError {..} => "Error iterating over object, error: {error}", Self::StringType {..} => "Input should be a valid string", Self::StringSubType {..} => "Input should be a string, not an instance of a subclass of str", Self::StringUnicode {..} => "Input should be a valid string, unable to parse raw data as a unicode string", Self::StringTooShort {..} => "String should have at least {min_length} character{expected_plural}", Self::StringTooLong {..} => "String should have at most {max_length} character{expected_plural}", Self::StringPatternMismatch {..} => "String should match pattern '{pattern}'", Self::Enum {..} => "Input should be {expected}", Self::DictType {..} => "Input should be a valid dictionary", Self::MappingType {..} => "Input should be a valid mapping, error: {error}", Self::ListType {..} => "Input should be a valid list", Self::TupleType {..} => "Input should be a valid tuple", Self::SetType {..} => "Input should be a valid set", Self::BoolType {..} => "Input should be a valid boolean", Self::BoolParsing {..} => "Input should be a valid boolean, unable to interpret input", Self::IntType {..} => "Input should be a valid integer", Self::IntParsing {..} => "Input should be a valid integer, unable to parse string as an integer", Self::IntFromFloat {..} => "Input should be a valid integer, got a number with a fractional part", Self::IntParsingSize {..} => "Unable to parse input string as an integer, exceeded maximum size", Self::FloatType {..} => "Input should be a valid number", Self::FloatParsing {..} => "Input should be a valid number, unable to parse string as a number", Self::BytesType {..} => "Input should be a valid bytes", Self::BytesTooShort {..} => "Data should have at least {min_length} byte{expected_plural}", Self::BytesTooLong {..} => "Data should have at most {max_length} byte{expected_plural}", Self::BytesInvalidEncoding { .. } => "Data should be valid {encoding}: {encoding_error}", Self::ValueError {..} => "Value error, {error}", Self::AssertionError {..} => "Assertion failed, {error}", Self::CustomError {..} => "", // custom errors are handled separately Self::LiteralError {..} => "Input should be {expected}", Self::DateType {..} => "Input should be a valid date", Self::DateParsing {..} => "Input should be a valid date in the format YYYY-MM-DD, {error}", Self::DateFromDatetimeParsing {..} => "Input should be a valid date or datetime, {error}", Self::DateFromDatetimeInexact {..} => "Datetimes provided to dates should have zero time - e.g. be exact dates", Self::DatePast {..} => "Date should be in the past", Self::DateFuture {..} => "Date should be in the future", Self::TimeType {..} => "Input should be a valid time", Self::TimeParsing {..} => "Input should be in a valid time format, {error}", Self::DatetimeType {..} => "Input should be a valid datetime", Self::DatetimeParsing {..} => "Input should be a valid datetime, {error}", Self::DatetimeObjectInvalid {..} => "Invalid datetime object, got {error}", Self::DatetimeFromDateParsing {..} => "Input should be a valid datetime or date, {error}", Self::DatetimePast {..} => "Input should be in the past", Self::DatetimeFuture {..} => "Input should be in the future", Self::TimezoneNaive {..} => "Input should not have timezone info", Self::TimezoneAware {..} => "Input should have timezone info", Self::TimezoneOffset {..} => "Timezone offset of {tz_expected} required, got {tz_actual}", Self::TimeDeltaType {..} => "Input should be a valid timedelta", Self::TimeDeltaParsing {..} => "Input should be a valid timedelta, {error}", Self::FrozenSetType {..} => "Input should be a valid frozenset", Self::IsInstanceOf {..} => "Input should be an instance of {class}", Self::IsSubclassOf {..} => "Input should be a subclass of {class}", Self::CallableType {..} => "Input should be callable", Self::UnionTagInvalid {..} => "Input tag '{tag}' found using {discriminator} does not match any of the expected tags: {expected_tags}", Self::UnionTagNotFound {..} => "Unable to extract tag using discriminator {discriminator}", Self::ArgumentsType {..} => "Arguments must be a tuple, list or a dictionary", Self::MissingArgument {..} => "Missing required argument", Self::UnexpectedKeywordArgument {..} => "Unexpected keyword argument", Self::MissingKeywordOnlyArgument {..} => "Missing required keyword only argument", Self::UnexpectedPositionalArgument {..} => "Unexpected positional argument", Self::MissingPositionalOnlyArgument {..} => "Missing required positional only argument", Self::MultipleArgumentValues {..} => "Got multiple values for argument", Self::UrlType {..} => "URL input should be a string or URL", Self::UrlParsing {..} => "Input should be a valid URL, {error}", Self::UrlSyntaxViolation {..} => "Input violated strict URL syntax rules, {error}", Self::UrlTooLong {..} => "URL should have at most {max_length} character{expected_plural}", Self::UrlScheme {..} => "URL scheme should be {expected_schemes}", Self::UuidType {..} => "UUID input should be a string, bytes or UUID object", Self::UuidParsing {..} => "Input should be a valid UUID, {error}", Self::UuidVersion {..} => "UUID version {expected_version} expected", Self::DecimalType {..} => "Decimal input should be an integer, float, string or Decimal object", Self::DecimalParsing {..} => "Input should be a valid decimal", Self::DecimalMaxDigits {..} => "Decimal input should have no more than {max_digits} digit{expected_plural} in total", Self::DecimalMaxPlaces {..} => "Decimal input should have no more than {decimal_places} decimal place{expected_plural}", Self::DecimalWholeDigits {..} => "Decimal input should have no more than {whole_digits} digit{expected_plural} before the decimal point", Self::ComplexType {..} => "Input should be a valid python complex object, a number, or a valid complex string following the rules at https://docs.python.org/3/library/functions.html#complex", Self::ComplexStrParsing {..} => "Input should be a valid complex string following the rules at https://docs.python.org/3/library/functions.html#complex", } } pub fn message_template_json(&self) -> &'static str { match self { Self::NoneRequired { .. } => "Input should be null", Self::ListType { .. } | Self::TupleType { .. } | Self::IterableType { .. } | Self::SetType { .. } | Self::FrozenSetType { .. } => "Input should be a valid array", Self::ModelType { .. } | Self::ModelAttributesType { .. } | Self::DictType { .. } | Self::DataclassType { .. } => "Input should be an object", Self::TimeDeltaType { .. } => "Input should be a valid duration", Self::TimeDeltaParsing { .. } => "Input should be a valid duration, {error}", Self::ArgumentsType { .. } => "Arguments must be an array or an object", _ => self.message_template_python(), } } pub fn valid_type(py: Python, error_type: &str) -> bool { let lookup = ERROR_TYPE_LOOKUP.get_or_init(py, Self::build_lookup); lookup.contains_key(error_type) } fn build_lookup() -> AHashMap { let mut lookup = AHashMap::new(); for error_type in Self::iter() { if !matches!(error_type, Self::CustomError { .. }) { lookup.insert(error_type.to_string(), error_type); } } lookup } pub fn type_string(&self) -> String { match self { Self::CustomError { error_type, .. } => error_type.clone(), _ => self.to_string(), } } pub fn render_message(&self, py: Python, input_type: InputType) -> PyResult { let tmpl = match input_type { InputType::Python => self.message_template_python(), _ => self.message_template_json(), }; match self { Self::NoSuchAttribute { attribute, .. } => render!(tmpl, attribute), Self::JsonInvalid { error, .. } => render!(tmpl, error), Self::NeedsPythonObject { method_name, .. } => render!(tmpl, method_name), Self::GetAttributeError { error, .. } => render!(tmpl, error), Self::ModelType { class_name, .. } => render!(tmpl, class_name), Self::DataclassType { class_name, .. } => render!(tmpl, class_name), Self::DataclassExactType { class_name, .. } => render!(tmpl, class_name), Self::GreaterThan { gt, .. } => to_string_render!(tmpl, gt), Self::GreaterThanEqual { ge, .. } => to_string_render!(tmpl, ge), Self::LessThan { lt, .. } => to_string_render!(tmpl, lt), Self::LessThanEqual { le, .. } => to_string_render!(tmpl, le), Self::MultipleOf { multiple_of, .. } => to_string_render!(tmpl, multiple_of), Self::TooShort { field_type, min_length, actual_length, .. } => { let expected_plural = plural_s(*min_length); to_string_render!(tmpl, field_type, min_length, actual_length, expected_plural,) } Self::TooLong { field_type, max_length, actual_length, .. } => { let expected_plural = plural_s(*max_length); let actual_length = actual_length.map_or(Cow::Borrowed("more"), |v| Cow::Owned(v.to_string())); to_string_render!(tmpl, field_type, max_length, actual_length, expected_plural,) } Self::IterationError { error, .. } => render!(tmpl, error), Self::StringTooShort { min_length, .. } => { let expected_plural = plural_s(*min_length); to_string_render!(tmpl, min_length, expected_plural) } Self::StringTooLong { max_length, .. } => { let expected_plural = plural_s(*max_length); to_string_render!(tmpl, max_length, expected_plural) } Self::StringPatternMismatch { pattern, .. } => render!(tmpl, pattern), Self::Enum { expected, .. } => to_string_render!(tmpl, expected), Self::MappingType { error, .. } => render!(tmpl, error), Self::BytesTooShort { min_length, .. } => { let expected_plural = plural_s(*min_length); to_string_render!(tmpl, min_length, expected_plural) } Self::BytesTooLong { max_length, .. } => { let expected_plural = plural_s(*max_length); to_string_render!(tmpl, max_length, expected_plural) } Self::BytesInvalidEncoding { encoding, encoding_error, .. } => render!(tmpl, encoding, encoding_error), Self::ValueError { error, .. } => { let error = &error .as_ref() .map_or(Cow::Borrowed("None"), |v| Cow::Owned(v.bind(py).to_string())); render!(tmpl, error) } Self::AssertionError { error, .. } => { let error = &error .as_ref() .map_or(Cow::Borrowed("None"), |v| Cow::Owned(v.bind(py).to_string())); render!(tmpl, error) } Self::CustomError { message_template, context, .. } => PydanticCustomError::format_message(message_template, context.as_ref().map(|c| c.bind(py))), Self::LiteralError { expected, .. } => render!(tmpl, expected), Self::DateParsing { error, .. } => render!(tmpl, error), Self::DateFromDatetimeParsing { error, .. } => render!(tmpl, error), Self::TimeParsing { error, .. } => render!(tmpl, error), Self::DatetimeParsing { error, .. } => render!(tmpl, error), Self::DatetimeFromDateParsing { error, .. } => render!(tmpl, error), Self::DatetimeObjectInvalid { error, .. } => render!(tmpl, error), Self::TimezoneOffset { tz_expected, tz_actual, .. } => to_string_render!(tmpl, tz_expected, tz_actual), Self::TimeDeltaParsing { error, .. } => render!(tmpl, error), Self::IsInstanceOf { class, .. } => render!(tmpl, class), Self::IsSubclassOf { class, .. } => render!(tmpl, class), Self::UnionTagInvalid { discriminator, tag, expected_tags, .. } => render!(tmpl, discriminator, tag, expected_tags), Self::UnionTagNotFound { discriminator, .. } => render!(tmpl, discriminator), Self::UrlParsing { error, .. } => render!(tmpl, error), Self::UrlSyntaxViolation { error, .. } => render!(tmpl, error), Self::UrlTooLong { max_length, .. } => { let expected_plural = plural_s(*max_length); to_string_render!(tmpl, max_length, expected_plural) } Self::UrlScheme { expected_schemes, .. } => render!(tmpl, expected_schemes), Self::UuidParsing { error, .. } => render!(tmpl, error), Self::UuidVersion { expected_version, .. } => to_string_render!(tmpl, expected_version), Self::DecimalMaxDigits { max_digits, .. } => { let expected_plural = plural_s(*max_digits); to_string_render!(tmpl, max_digits, expected_plural) } Self::DecimalMaxPlaces { decimal_places, .. } => { let expected_plural = plural_s(*decimal_places); to_string_render!(tmpl, decimal_places, expected_plural) } Self::DecimalWholeDigits { whole_digits, .. } => { let expected_plural = plural_s(*whole_digits); to_string_render!(tmpl, whole_digits, expected_plural) } _ => Ok(tmpl.to_string()), } } pub fn py_dict(&self, py: Python) -> PyResult>> { let dict = PyDict::new_bound(py); let custom_ctx_used = self.py_dict_update_ctx(py, &dict)?; if let Self::CustomError { .. } = self { if custom_ctx_used { // Custom error type and message are handled separately by the caller. // They are added to the root of the ErrorDetails. dict.del_item("error_type")?; dict.del_item("message_template")?; Ok(Some(dict.into())) } else { Ok(None) } } else if custom_ctx_used || !dict.is_empty() { Ok(Some(dict.into())) } else { Ok(None) } } } #[derive(Clone, Debug)] pub enum Number { Int(i64), BigInt(BigInt), Float(f64), String(String), } impl Default for Number { fn default() -> Self { Self::Int(0) } } impl From for Number { fn from(f: f64) -> Self { Self::Float(f) } } impl From for Number { fn from(s: String) -> Self { Self::String(s) } } impl From for Number { fn from(i: Int) -> Self { match i { Int::I64(i) => Number::Int(i), Int::Big(b) => Number::BigInt(b), } } } impl FromPyObject<'_> for Number { fn extract_bound(obj: &Bound<'_, PyAny>) -> PyResult { if let Some(int) = extract_i64(obj) { Ok(Number::Int(int)) } else if let Ok(float) = obj.extract::() { Ok(Number::Float(float)) } else if let Ok(string) = obj.extract::() { Ok(Number::String(string)) } else { py_err!(PyTypeError; "Expected int or float or String, got {}", obj.get_type()) } } } impl fmt::Display for Number { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Float(s) => write!(f, "{s}"), Self::Int(i) => write!(f, "{i}"), Self::BigInt(i) => write!(f, "{i}"), Self::String(s) => write!(f, "{s}"), } } } impl ToPyObject for Number { fn to_object(&self, py: Python<'_>) -> PyObject { match self { Self::Int(i) => i.into_py(py), Self::BigInt(i) => i.clone().into_py(py), Self::Float(f) => f.into_py(py), Self::String(s) => s.into_py(py), } } } pydantic-pydantic-core-d771df5/src/errors/validation_exception.rs000066400000000000000000000547161473051353300254110ustar00rootroot00000000000000use std::fmt; use std::fmt::{Display, Write}; use std::str::from_utf8; use pyo3::exceptions::{PyKeyError, PyTypeError, PyValueError}; use pyo3::ffi; use pyo3::intern; use pyo3::prelude::*; use pyo3::sync::GILOnceCell; use pyo3::types::{PyDict, PyList, PyString, PyType}; use serde::ser::{Error, SerializeMap, SerializeSeq}; use serde::{Serialize, Serializer}; use serde_json::ser::PrettyFormatter; use crate::build_tools::py_schema_error_type; use crate::errors::LocItem; use crate::get_pydantic_version; use crate::input::InputType; use crate::serializers::{DuckTypingSerMode, Extra, SerMode, SerializationState}; use crate::tools::{safe_repr, write_truncated_to_limited_bytes, SchemaDict}; use super::line_error::ValLineError; use super::location::Location; use super::types::ErrorType; use super::value_exception::PydanticCustomError; use super::{InputValue, ValError}; #[pyclass(extends=PyValueError, module="pydantic_core._pydantic_core", subclass)] #[derive(Clone)] #[cfg_attr(debug_assertions, derive(Debug))] pub struct ValidationError { line_errors: Vec, title: PyObject, input_type: InputType, hide_input: bool, } impl ValidationError { pub fn new(line_errors: Vec, title: PyObject, input_type: InputType, hide_input: bool) -> Self { Self { line_errors, title, input_type, hide_input, } } pub fn from_val_error( py: Python, title: PyObject, input_type: InputType, error: ValError, outer_location: Option, hide_input: bool, validation_error_cause: bool, ) -> PyErr { match error { ValError::LineErrors(raw_errors) => { let line_errors: Vec = match outer_location { Some(outer_location) => raw_errors .into_iter() .map(|e| e.with_outer_location(outer_location.clone()).into_py(py)) .collect(), None => raw_errors.into_iter().map(|e| e.into_py(py)).collect(), }; let validation_error = Self::new(line_errors, title, input_type, hide_input); match Py::new(py, validation_error) { Ok(err) => { if validation_error_cause { // Will return an import error if the backport was needed and not installed: if let Some(cause_problem) = ValidationError::maybe_add_cause(err.borrow(py), py) { return cause_problem; } } PyErr::from_value_bound(err.into_bound(py).into_any()) } Err(err) => err, } } ValError::InternalErr(err) => err, ValError::Omit => Self::omit_error(), ValError::UseDefault => Self::use_default_error(), } } pub fn display(&self, py: Python, prefix_override: Option<&'static str>, hide_input: bool) -> String { let url_prefix = get_url_prefix(py, include_url_env(py)); let line_errors = pretty_py_line_errors(py, self.input_type, self.line_errors.iter(), url_prefix, hide_input); if let Some(prefix) = prefix_override { format!("{prefix}\n{line_errors}") } else { let count = self.line_errors.len(); let plural = if count == 1 { "" } else { "s" }; let title: &str = self.title.extract(py).unwrap(); format!("{count} validation error{plural} for {title}\n{line_errors}") } } pub fn omit_error() -> PyErr { py_schema_error_type!("Uncaught Omit error, please check your usage of `default` validators.") } pub fn use_default_error() -> PyErr { py_schema_error_type!("Uncaught UseDefault error, please check your usage of `default` validators.") } fn maybe_add_cause(self_: PyRef<'_, Self>, py: Python) -> Option { let mut user_py_errs = vec![]; for line_error in &self_.line_errors { if let ErrorType::AssertionError { error: Some(err), context: _, } | ErrorType::ValueError { error: Some(err), context: _, } = &line_error.error_type { let note: PyObject = if let Location::Empty = &line_error.location { "Pydantic: cause of loc: root".into_py(py) } else { format!( "Pydantic: cause of loc: {}", // Location formats with a newline at the end, hence the trim() line_error.location.to_string().trim() ) .into_py(py) }; // Notes only support 3.11 upwards: #[cfg(Py_3_11)] { // Add the location context as a note, no direct c api for this, // fine performance wise, add_note() goes directly to C: "(PyCFunction)BaseException_add_note": // https://github.com/python/cpython/blob/main/Objects/exceptions.c if err.call_method1(py, "add_note", (format!("\n{note}"),)).is_ok() { user_py_errs.push(err.clone_ref(py)); } } // Pre 3.11 notes support, use a UserWarning exception instead: #[cfg(not(Py_3_11))] { use pyo3::exceptions::PyUserWarning; let wrapped = PyUserWarning::new_err((note,)); wrapped.set_cause(py, Some(PyErr::from_value_bound(err.clone_ref(py).into_bound(py)))); user_py_errs.push(wrapped); } } } // Only add the cause if there are actually python user exceptions to show: if !user_py_errs.is_empty() { let title = "Pydantic User Code Exceptions"; // Native ExceptionGroup(s) only supported 3.11 and later: #[cfg(Py_3_11)] let cause = { use pyo3::exceptions::PyBaseExceptionGroup; Some(PyBaseExceptionGroup::new_err((title, user_py_errs)).into_py(py)) }; // Pre 3.11 ExceptionGroup support, use the python backport instead: // If something's gone wrong with the backport, just don't add the cause: #[cfg(not(Py_3_11))] let cause = { use pyo3::exceptions::PyImportError; match py.import_bound("exceptiongroup") { Ok(py_mod) => match py_mod.getattr("ExceptionGroup") { Ok(group_cls) => match group_cls.call1((title, user_py_errs)) { Ok(group_instance) => Some(group_instance.into_py(py)), Err(_) => None, }, Err(_) => None, }, Err(_) => return Some(PyImportError::new_err("validation_error_cause flag requires the exceptiongroup module backport to be installed when used on Python <3.11.")), } }; // Set the cause to the ValidationError: if let Some(cause) = cause { unsafe { // PyException_SetCause _steals_ a reference to cause, so must use .into_ptr() ffi::PyException_SetCause(self_.as_ptr(), cause.into_ptr()); } } } None } } static URL_ENV_VAR: GILOnceCell = GILOnceCell::new(); fn include_url_env(py: Python) -> bool { *URL_ENV_VAR.get_or_init(py, || { // Check the legacy env var first. // Using `var_os` here instead of `var` because we don't care about // the value (or whether we're able to decode it as UTF-8), just // whether it exists (and if it does, whether it's non-empty). match std::env::var_os("PYDANTIC_ERRORS_OMIT_URL") { Some(val) => { // We don't care whether warning succeeded or not, hence the assignment let _ = PyErr::warn_bound( py, &py.get_type_bound::(), "PYDANTIC_ERRORS_OMIT_URL is deprecated, use PYDANTIC_ERRORS_INCLUDE_URL instead", 1, ); // If OMIT_URL exists but is empty, we include the URL: val.is_empty() } // If the legacy env var doesn't exist, check the documented one: None => match std::env::var("PYDANTIC_ERRORS_INCLUDE_URL") { Ok(val) => val == "1" || val.to_lowercase() == "true", Err(_) => true, }, } }) } static URL_PREFIX: GILOnceCell = GILOnceCell::new(); fn get_formated_url(py: Python) -> &'static str { let pydantic_version = match get_pydantic_version(py) { // include major and minor version only Some(value) => value.split('.').collect::>()[..2].join("."), None => "latest".to_string(), }; URL_PREFIX.get_or_init(py, || format!("https://errors.pydantic.dev/{pydantic_version}/v/")) } fn get_url_prefix(py: Python, include_url: bool) -> Option<&str> { if include_url { Some(get_formated_url(py)) } else { None } } // used to convert a validation error back to ValError for wrap functions impl ValidationError { pub(crate) fn into_val_error(self) -> ValError { self.line_errors.into_iter().map(Into::into).collect::>().into() } } #[pymethods] impl ValidationError { #[new] #[pyo3(signature = (title, line_errors, input_type="python", hide_input=false))] fn py_new(title: PyObject, line_errors: Vec, input_type: &str, hide_input: bool) -> PyResult { Ok(Self { line_errors, title, input_type: InputType::try_from(input_type)?, hide_input, }) } #[classmethod] #[pyo3(signature = (title, line_errors, input_type="python", hide_input=false))] fn from_exception_data<'py>( cls: &Bound<'py, PyType>, title: PyObject, line_errors: Bound<'_, PyList>, input_type: &str, hide_input: bool, ) -> PyResult> { cls.call1(( title, line_errors .iter() .map(|error| PyLineError::try_from(&error)) .collect::>>()?, InputType::try_from(input_type)?, hide_input, )) } #[getter] fn title(&self, py: Python) -> PyObject { self.title.clone_ref(py) } pub fn error_count(&self) -> usize { self.line_errors.len() } #[pyo3(signature = (*, include_url = true, include_context = true, include_input = true))] pub fn errors( &self, py: Python, include_url: bool, include_context: bool, include_input: bool, ) -> PyResult> { let url_prefix = get_url_prefix(py, include_url); let mut iteration_error = None; let list = PyList::new_bound( py, // PyList::new takes ExactSizeIterator, so if an error occurs during iteration we // fill the list with None before returning the error; the list will then be thrown // away safely. self.line_errors.iter().map(|e| -> PyObject { if iteration_error.is_some() { return py.None(); } e.as_dict(py, url_prefix, include_context, self.input_type, include_input) .unwrap_or_else(|err| { iteration_error = Some(err); py.None() }) }), ); if let Some(err) = iteration_error { Err(err) } else { Ok(list.into()) } } #[pyo3(signature = (*, indent = None, include_url = true, include_context = true, include_input = true))] pub fn json<'py>( &self, py: Python<'py>, indent: Option, include_url: bool, include_context: bool, include_input: bool, ) -> PyResult> { let state = SerializationState::new("iso8601", "utf8", "constants")?; let extra = state.extra( py, &SerMode::Json, true, false, false, true, None, DuckTypingSerMode::SchemaBased, None, ); let serializer = ValidationErrorSerializer { py, line_errors: &self.line_errors, url_prefix: get_url_prefix(py, include_url), include_context, include_input, extra: &extra, input_type: &self.input_type, }; let writer: Vec = Vec::with_capacity(self.line_errors.len() * 200); let bytes = match indent { Some(indent) => { let indent = vec![b' '; indent]; let formatter = PrettyFormatter::with_indent(&indent); let mut ser = crate::serializers::ser::PythonSerializer::with_formatter(writer, formatter); serializer.serialize(&mut ser).map_err(json_py_err)?; ser.into_inner() } None => { let mut ser = crate::serializers::ser::PythonSerializer::new(writer); serializer.serialize(&mut ser).map_err(json_py_err)?; ser.into_inner() } }; let s = from_utf8(&bytes).map_err(json_py_err)?; Ok(PyString::new_bound(py, s)) } fn __repr__(&self, py: Python) -> String { self.display(py, None, self.hide_input) } fn __str__(&self, py: Python) -> String { self.__repr__(py) } fn __reduce__<'py>(slf: &Bound<'py, Self>) -> PyResult<(Bound<'py, PyAny>, PyObject)> { let py = slf.py(); let callable = slf.getattr("from_exception_data")?; let borrow = slf.try_borrow()?; let args = ( borrow.title.bind(py), borrow.errors(py, include_url_env(py), true, true)?, borrow.input_type.into_py(py), borrow.hide_input, ) .into_py(slf.py()); Ok((callable, args)) } } pub fn pretty_py_line_errors<'a>( py: Python, input_type: InputType, line_errors_iter: impl Iterator, url_prefix: Option<&str>, hide_input: bool, ) -> String { line_errors_iter .map(|i| i.pretty(py, input_type, url_prefix, hide_input)) .collect::, _>>() .unwrap_or_else(|err| vec![format!("[error formatting line errors: {err}]")]) .join("\n") } /// `PyLineError` are the public version of `ValLineError`, as help and used in `ValidationError`s #[pyclass] #[derive(Clone)] #[cfg_attr(debug_assertions, derive(Debug))] pub struct PyLineError { error_type: ErrorType, location: Location, input_value: PyObject, } impl IntoPy for ValLineError { fn into_py(self, py: Python<'_>) -> PyLineError { PyLineError { error_type: self.error_type, location: self.location, input_value: self.input_value.to_object(py), } } } impl From for ValLineError { /// Used to extract line errors from a validation error for wrap functions fn from(other: PyLineError) -> ValLineError { ValLineError { error_type: other.error_type, location: other.location, input_value: InputValue::Python(other.input_value), } } } impl TryFrom<&Bound<'_, PyAny>> for PyLineError { type Error = PyErr; fn try_from(value: &Bound<'_, PyAny>) -> PyResult { let dict = value.downcast::()?; let py = value.py(); let type_raw = dict .get_item(intern!(py, "type"))? .ok_or_else(|| PyKeyError::new_err("type"))?; let error_type = if let Ok(type_str) = type_raw.downcast::() { let context: Option> = dict.get_as(intern!(py, "ctx"))?; ErrorType::new(py, type_str.to_str()?, context)? } else if let Ok(custom_error) = type_raw.extract::() { ErrorType::new_custom_error(py, custom_error) } else { return Err(PyTypeError::new_err( "`type` should be a `str` or `PydanticCustomError`", )); }; let location = Location::try_from(dict.get_item("loc")?.as_ref())?; let input_value = match dict.get_item("input")? { Some(i) => i.into_py(py), None => py.None(), }; Ok(Self { error_type, location, input_value, }) } } impl PyLineError { fn get_error_url(&self, url_prefix: &str) -> String { format!("{url_prefix}{}", self.error_type.type_string()) } pub fn as_dict( &self, py: Python, url_prefix: Option<&str>, include_context: bool, input_type: InputType, include_input: bool, ) -> PyResult { let dict = PyDict::new_bound(py); dict.set_item("type", self.error_type.type_string())?; dict.set_item("loc", self.location.to_object(py))?; dict.set_item("msg", self.error_type.render_message(py, input_type)?)?; if include_input { dict.set_item("input", &self.input_value)?; } if include_context { if let Some(context) = self.error_type.py_dict(py)? { dict.set_item("ctx", context)?; } } if let Some(url_prefix) = url_prefix { match self.error_type { ErrorType::CustomError { .. } => { // Don't add URLs for custom errors } _ => { dict.set_item("url", self.get_error_url(url_prefix))?; } } } Ok(dict.into_py(py)) } fn pretty( &self, py: Python, input_type: InputType, url_prefix: Option<&str>, hide_input: bool, ) -> Result { let mut output = String::with_capacity(200); write!(output, "{}", self.location)?; let message = match self.error_type.render_message(py, input_type) { Ok(message) => message, Err(err) => format!("(error rendering message: {err})"), }; write!(output, " {message} [type={}", self.error_type.type_string())?; if !hide_input { let input_value = self.input_value.bind(py); let input_str = safe_repr(input_value); write!(output, ", input_value=")?; write_truncated_to_limited_bytes(&mut output, &input_str.to_string(), 50)?; if let Ok(type_) = input_value.get_type().qualname() { write!(output, ", input_type={type_}")?; } } if let Some(url_prefix) = url_prefix { match self.error_type { ErrorType::CustomError { .. } => { // Don't display URLs for custom errors output.push(']'); } _ => { write!( output, "]\n For further information visit {}", self.get_error_url(url_prefix) )?; } } } else { output.push(']'); } Ok(output) } } pub(super) fn json_py_err(error: impl Display) -> PyErr { PyValueError::new_err(format!("Error serializing ValidationError to JSON: {error}")) } pub(super) fn py_err_json(error: PyErr) -> S::Error where S: Serializer, { S::Error::custom(error.to_string()) } struct ValidationErrorSerializer<'py> { py: Python<'py>, line_errors: &'py [PyLineError], url_prefix: Option<&'py str>, include_context: bool, include_input: bool, extra: &'py Extra<'py>, input_type: &'py InputType, } impl Serialize for ValidationErrorSerializer<'_> { fn serialize(&self, serializer: S) -> Result where S: Serializer, { let mut seq = serializer.serialize_seq(Some(self.line_errors.len()))?; for line_error in self.line_errors { let line_s = PyLineErrorSerializer { py: self.py, line_error, url_prefix: self.url_prefix, include_context: self.include_context, include_input: self.include_input, extra: self.extra, input_type: self.input_type, }; seq.serialize_element(&line_s)?; } seq.end() } } struct PyLineErrorSerializer<'py> { py: Python<'py>, line_error: &'py PyLineError, url_prefix: Option<&'py str>, include_context: bool, include_input: bool, extra: &'py Extra<'py>, input_type: &'py InputType, } impl Serialize for PyLineErrorSerializer<'_> { fn serialize(&self, serializer: S) -> Result where S: Serializer, { let py = self.py; let size = 3 + [self.url_prefix.is_some(), self.include_context, self.include_input] .into_iter() .filter(|b| *b) .count(); let mut map = serializer.serialize_map(Some(size))?; map.serialize_entry("type", &self.line_error.error_type.type_string())?; map.serialize_entry("loc", &self.line_error.location)?; let msg = self .line_error .error_type .render_message(py, *self.input_type) .map_err(py_err_json::)?; map.serialize_entry("msg", &msg)?; if self.include_input { map.serialize_entry( "input", &self.extra.serialize_infer(self.line_error.input_value.bind(py)), )?; } if self.include_context { if let Some(context) = self.line_error.error_type.py_dict(py).map_err(py_err_json::)? { map.serialize_entry("ctx", &self.extra.serialize_infer(context.bind(py)))?; } } if let Some(url_prefix) = self.url_prefix { map.serialize_entry("url", &self.line_error.get_error_url(url_prefix))?; } map.end() } } pydantic-pydantic-core-d771df5/src/errors/value_exception.rs000066400000000000000000000124301473051353300243560ustar00rootroot00000000000000use pyo3::exceptions::{PyException, PyValueError}; use pyo3::prelude::*; use pyo3::types::{PyDict, PyString}; use crate::input::InputType; use crate::tools::extract_i64; use super::line_error::ToErrorValue; use super::{ErrorType, ValError}; #[pyclass(extends=PyException, module="pydantic_core._pydantic_core")] #[derive(Debug, Clone)] pub struct PydanticOmit {} impl PydanticOmit { pub(crate) fn new_err() -> PyErr { PyErr::new::(()) } } #[pymethods] impl PydanticOmit { #[new] pub fn py_new() -> Self { Self {} } fn __str__(&self) -> &'static str { self.__repr__() } fn __repr__(&self) -> &'static str { "PydanticOmit()" } } #[pyclass(extends=PyException, module="pydantic_core._pydantic_core")] #[derive(Debug, Clone)] pub struct PydanticUseDefault {} #[pymethods] impl PydanticUseDefault { #[new] pub fn py_new() -> Self { Self {} } fn __str__(&self) -> &'static str { self.__repr__() } fn __repr__(&self) -> &'static str { "PydanticUseDefault()" } } #[pyclass(extends=PyValueError, module="pydantic_core._pydantic_core", subclass)] #[derive(Debug, Clone, Default)] pub struct PydanticCustomError { error_type: String, message_template: String, context: Option>, } #[pymethods] impl PydanticCustomError { #[new] #[pyo3(signature = (error_type, message_template, context = None))] pub fn py_new(error_type: String, message_template: String, context: Option>) -> Self { Self { error_type, message_template, context: context.map(Bound::unbind), } } #[getter(r#type)] pub fn error_type(&self) -> &str { &self.error_type } #[getter] pub fn message_template(&self) -> &str { &self.message_template } #[getter] pub fn context(&self, py: Python) -> Option> { self.context.as_ref().map(|c| c.clone_ref(py)) } pub fn message(&self, py: Python) -> PyResult { Self::format_message(&self.message_template, self.context.as_ref().map(|c| c.bind(py))) } fn __str__(&self, py: Python) -> PyResult { self.message(py) } fn __repr__(&self, py: Python) -> PyResult { let msg = self.message(py)?; match self.context.as_ref() { Some(ctx) => Ok(format!("{msg} [type={}, context={}]", self.error_type, ctx.bind(py))), None => Ok(format!("{msg} [type={}, context=None]", self.error_type)), } } } impl PydanticCustomError { pub fn into_val_error(self, input: impl ToErrorValue) -> ValError { let error_type = ErrorType::CustomError { error_type: self.error_type, message_template: self.message_template, context: self.context, }; ValError::new(error_type, input) } pub fn format_message(message_template: &str, context: Option<&Bound<'_, PyDict>>) -> PyResult { let mut message = message_template.to_string(); if let Some(ctx) = context { for (key, value) in ctx.iter() { let key = key.downcast::()?; if let Ok(py_str) = value.downcast::() { message = message.replace(&format!("{{{}}}", key.to_str()?), py_str.to_str()?); } else if let Some(value_int) = extract_i64(&value) { message = message.replace(&format!("{{{}}}", key.to_str()?), &value_int.to_string()); } else { // fallback for anything else just in case message = message.replace(&format!("{{{}}}", key.to_str()?), &value.to_string()); } } } Ok(message) } } #[pyclass(extends=PyValueError, module="pydantic_core._pydantic_core")] #[derive(Debug, Clone)] pub struct PydanticKnownError { error_type: ErrorType, } #[pymethods] impl PydanticKnownError { #[new] #[pyo3(signature = (error_type, context=None))] pub fn py_new(py: Python, error_type: &str, context: Option>) -> PyResult { let error_type = ErrorType::new(py, error_type, context)?; Ok(Self { error_type }) } #[getter(r#type)] pub fn error_type(&self) -> String { self.error_type.to_string() } #[getter] pub fn message_template(&self) -> &'static str { self.error_type.message_template_python() } #[getter] pub fn context(&self, py: Python) -> PyResult>> { self.error_type.py_dict(py) } pub fn message(&self, py: Python) -> PyResult { self.error_type.render_message(py, InputType::Python) } fn __str__(&self, py: Python) -> PyResult { self.message(py) } fn __repr__(&self, py: Python) -> PyResult { let msg = self.message(py)?; match self.context(py)?.as_ref() { Some(ctx) => Ok(format!("{msg} [type={}, context={}]", self.error_type(), ctx.bind(py))), None => Ok(format!("{msg} [type={}, context=None]", self.error_type())), } } } impl PydanticKnownError { pub fn into_val_error(self, input: impl ToErrorValue) -> ValError { ValError::new(self.error_type, input) } } pydantic-pydantic-core-d771df5/src/input/000077500000000000000000000000001473051353300204415ustar00rootroot00000000000000pydantic-pydantic-core-d771df5/src/input/datetime.rs000066400000000000000000000471351473051353300226150ustar00rootroot00000000000000use pyo3::intern; use pyo3::prelude::*; use pyo3::exceptions::PyValueError; use pyo3::pyclass::CompareOp; use pyo3::types::{PyDate, PyDateTime, PyDelta, PyDeltaAccess, PyDict, PyTime, PyTzInfo}; use speedate::MicrosecondsPrecisionOverflowBehavior; use speedate::{Date, DateTime, Duration, ParseError, Time, TimeConfig}; use std::borrow::Cow; use std::collections::hash_map::DefaultHasher; use std::hash::Hash; use std::hash::Hasher; use strum::EnumMessage; use super::Input; use crate::errors::ToErrorValue; use crate::errors::{ErrorType, ValError, ValResult}; use crate::tools::py_err; #[cfg_attr(debug_assertions, derive(Debug))] pub enum EitherDate<'a> { Raw(Date), Py(Bound<'a, PyDate>), } impl From for EitherDate<'_> { fn from(date: Date) -> Self { Self::Raw(date) } } impl<'a> From> for EitherDate<'a> { fn from(date: Bound<'a, PyDate>) -> Self { Self::Py(date) } } pub fn pydate_as_date(py_date: &Bound<'_, PyAny>) -> PyResult { let py = py_date.py(); Ok(Date { year: py_date.getattr(intern!(py, "year"))?.extract()?, month: py_date.getattr(intern!(py, "month"))?.extract()?, day: py_date.getattr(intern!(py, "day"))?.extract()?, }) } impl<'py> EitherDate<'py> { pub fn try_into_py(self, py: Python<'py>, input: &(impl Input<'py> + ?Sized)) -> ValResult { match self { Self::Raw(date) => { if date.year == 0 { return Err(ValError::new( ErrorType::DateParsing { error: Cow::Borrowed("year 0 is out of range"), context: None, }, input, )); }; let py_date = PyDate::new_bound(py, date.year.into(), date.month, date.day)?; Ok(py_date.into()) } Self::Py(py_date) => Ok(py_date.into()), } } pub fn as_raw(&self) -> PyResult { match self { Self::Raw(date) => Ok(date.clone()), Self::Py(py_date) => pydate_as_date(py_date), } } } #[cfg_attr(debug_assertions, derive(Debug))] pub enum EitherTime<'a> { Raw(Time), Py(Bound<'a, PyTime>), } impl From

pydantic-core unit tests

loading...