pax_global_header00006660000000000000000000000064135700040030014502gustar00rootroot0000000000000052 comment=b521f6b24aa48ca65044d3a5a693ad44d929307d pydantic-1.2/000077500000000000000000000000001357000400300131575ustar00rootroot00000000000000pydantic-1.2/.codecov.yml000066400000000000000000000001451357000400300154020ustar00rootroot00000000000000coverage: precision: 2 range: [95, 100] comment: layout: 'header, diff, flags, files, footer' pydantic-1.2/.github/000077500000000000000000000000001357000400300145175ustar00rootroot00000000000000pydantic-1.2/.github/FUNDING.yml000066400000000000000000000000261357000400300163320ustar00rootroot00000000000000patreon: samuelcolvin pydantic-1.2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001357000400300167025ustar00rootroot00000000000000pydantic-1.2/.github/ISSUE_TEMPLATE/bug.md000066400000000000000000000007701357000400300200050ustar00rootroot00000000000000--- name: Bug about: Create a bug report to help us improve pydantic labels: bug --- # Bug Please complete: * OS: **?** * Python version `import sys; print(sys.version)`: **?** * Pydantic version `import pydantic; print(pydantic.VERSION)`: **?** **Please read the [docs](https://pydantic-docs.helpmanual.io/) and search through issues to confirm your bug hasn't already been reported.** Where possible please include a self contained code snippet describing your bug: ```py import pydantic ... ``` pydantic-1.2/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000011001357000400300224170ustar00rootroot00000000000000--- name: Feature Request about: Suggest a new feature or change to pydantic labels: feature request --- # Feature Request Please complete: * OS: **?** * Python version `import sys; print(sys.version)`: **?** * Pydantic version `import pydantic; print(pydantic.VERSION)`: **?** **Please read the [docs](https://pydantic-docs.helpmanual.io/) and search through issues to confirm your feature hasn't been asked for before, or already implemented.** Where possible please include a self contained code snippet describing your feature request: ```py import pydantic ... ``` pydantic-1.2/.github/ISSUE_TEMPLATE/question.md000066400000000000000000000010121357000400300210650ustar00rootroot00000000000000--- name: Question about: Ask a question about how to use pydantic labels: question --- # Question Please complete: * OS: **?** * Python version `import sys; print(sys.version)`: **?** * Pydantic version `import pydantic; print(pydantic.VERSION)`: **?** **Please read the [docs](https://pydantic-docs.helpmanual.io/) and search through issues to confirm your question hasn't already been answered.** Where possible please include a self contained code snippet describing your question: ```py import pydantic ... ``` pydantic-1.2/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000014231357000400300203200ustar00rootroot00000000000000 ## Change Summary ## Related issue number ## Checklist * [ ] Unit tests for the changes exist * [ ] Tests pass on CI and coverage remains at 100% * [ ] Documentation reflects the changes where applicable * [ ] `changes/-.md` file added describing change (see [changes/README.md](https://github.com/samuelcolvin/pydantic/blob/master/changes/README.md) for details) pydantic-1.2/.gitignore000066400000000000000000000005071357000400300151510ustar00rootroot00000000000000.idea/ env/ env36/ env37/ env38/ *.py[cod] *.egg-info/ /build/ dist/ .cache/ .mypy_cache/ test.py .coverage /htmlcov/ /benchmarks/*.json /docs/.changelog.md /docs/.version.md /docs/.tmp_schema_mappings.html /docs/.tmp_examples/ /site/ /site.zip .pytest_cache/ .vscode/ _build/ pydantic/*.c pydantic/*.so .auto-format /sandbox/ pydantic-1.2/.travis.yml000066400000000000000000000107121357000400300152710ustar00rootroot00000000000000os: linux dist: xenial sudo: required language: python cache: pip python: - '3.6' - '3.7' - '3.8' install: - make install - pip freeze script: - python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(1 if pydantic.compiled else 0)" - make test - python3 ./changes/make_history.py - make lint - make mypy - make check-dist env: global: - 'CYTHON=no' - 'DEPS=yes' after_success: - ls -lha - bash <(curl -s https://codecov.io/bash) -e CYTHON,DEPS stages: - name: test if: type = pull_request OR branch = master OR tag IS present - name: build if: type = push AND (branch = master OR tag IS present) jobs: include: - stage: test python: 3.6 name: 'Cython: 3.6' script: - make build-cython-trace - python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(0 if pydantic.compiled else 1)" - make test env: - 'CYTHON=yes' - stage: test python: 3.7 name: 'Cython: 3.7' script: - make build-cython-trace - python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(0 if pydantic.compiled else 1)" - make test env: - 'CYTHON=yes' - stage: test python: 3.8 name: 'Cython: 3.8' script: - make build-cython-trace - python -c "import sys, pydantic; print('compiled:', pydantic.compiled); sys.exit(0 if pydantic.compiled else 1)" - make test env: - 'CYTHON=yes' - stage: test python: 3.6 name: 'Without Deps 3.6' script: - pip uninstall -y cython email-validator typing-extensions devtools - make test env: - 'DEPS=no' - stage: test python: 3.7 name: 'Without Deps 3.7' script: - pip uninstall -y cython email-validator typing-extensions devtools - make test env: - 'DEPS=no' - stage: test python: 3.8 name: 'Without Deps 3.8' script: - pip uninstall -y cython email-validator typing-extensions devtools - make test env: - 'DEPS=no' - stage: test python: 3.7 name: 'Benchmarks' script: - make build-cython - BENCHMARK_REPEATS=1 make benchmark-all after_success: skip - stage: build name: 'PyPI Build and Upload' python: 3.7 services: - docker install: skip env: - 'PIP=pip' - 'CIBW_BUILD="cp36-manylinux1_x86_64 cp36-manylinux1_i686 cp37-manylinux1_x86_64 cp37-manylinux1_i686 cp38-manylinux1_x86_64 cp38-manylinux1_i686"' - 'CIBW_BEFORE_BUILD="pip install -U cython"' - 'CIBW_TEST_REQUIRES=pytest' - 'CIBW_TEST_COMMAND="pytest {project}/tests"' script: - ./tests/check_tag.py - pip install -U cibuildwheel wheel - cibuildwheel --output-dir dist - SKIP_CYTHON=1 python setup.py sdist bdist_wheel after_success: - ls -lha dist deploy: provider: pypi skip_cleanup: true user: samuelcolvin password: secure: 'QbXFF2puEWjhFUpD0yu2R+wP4QI1IKIomBkMizsiCyMutlexERElranyYB8bsakvjPaJ+zU14ufffh2u7UA7Zhep/iE4skRHq4XWxnnRLHGu5nyGf3+zSM3F9MOzV32eZ4CDLJtFb6I0ensjTpodJH2EsIYHYxTgndIZn56Qbh6CStj7Xg1zm0Ujxdzm4ZLgcS28SOF/tpjsDW9+GXwc6L1mAZWYiS98gVgzL1vBd9tL9uFbbuFwGz9uhFMzFJko7vXSl8urWB4qeCspKXa9iKH7/AOYSwXTCwcg8U2hhC9UsOapnga2BubZKlU5HRfSs9fQcpnzcP2lwhSmkrEFa8VOw83hX6+bL564xK1Q4kanfGZ1fLU4FYge3iOnqjH7ajO7xEcUrcOEYUPfxM4EfdiDw0xnAzE1ITGH1/pZikF+wjlu+ez7RmmnejgK7quT1WU7keo7pSlRSfQtNgNl6xu818x0xZ1TScfN6e9npNy4TYyIooMOOeI4tMdfcR4JClkjGKhAtBk81DH7isZgPv3uwocGnKZ2S7La97CE3ADzU3MTA9xVIOSOjzwuvAe72uS2nwzqXkS9KATdATkC9QCvheJ9jIBB4UcqnHbD8L1gkqdmZwXZqHZldq8wcqNYZb+81lumy5EZ6xSoEzlLDpXHe80EjMUOBkb5fz3D44s=' on: tags: true all_branches: true - stage: build name: 'Docs Build and Upload' python: 3.7 script: make docs env: - secure: "vpTd8bkwPBP0CV3EJBAwSMNMnNK3m/71dvTvBd1T4YGuefyJvYhtA7wauA5xRL9jpK2mu5QR5eo0owTUJhKi4DjpafMMd1bc4PnXlrdZFzkn3VsGmlKt74D/aJgiuiNyhd/Qvq4OxMHrMhf4f6lKWoMM1vh6yT0yp3+51SexSh2Me0Q+npxbjXwoxX5XUHRcoSLtFk4GbYI88a2I+08XWI6v+Awo/giQ5QurUJhjAklbosrrQVr1FCOkU0em5jeyZvEbZSLmaMtbX1JlRdKoJm6WMU+y9I7zj35w6ue/vgfcLz7b/HDZrBx7/L9g1LxRo80briueX/IbHvN7DOVFKvaXVmnEa6lIDdCeOLOyESpIbmjqmDKi8JeexdPNxKq4Tvo2VEA9dL2w2aw+aALNtU2OF5iEMfPTUQyosu/CNu2PKtiuZkSOdvpYbSy1WUNHJRvomdR4Olzg8ZIScNsxU3IIPdrlG/LUA8auXcE9juFeZfD6D2hQZATqWeEe/C2J7amNSD+mLLaTf6nMQw8oNtKYOvYK17M7xyvi7HXDy711Bi18U3x6Ye0xGx8CDbFwl0ICNzIk9rrSAh9hEHTvfdUUkk35pxifvO0Hrh4SArCA20ozcH/hHWBhyqGdxoIQ6KoDgNbIFIGQ6/vugxL/pt8z1sJwPfJnq8tRDAyWZvE=" deploy: provider: script script: make publish on: tags: true pydantic-1.2/HISTORY.md000066400000000000000000000640311357000400300146460ustar00rootroot00000000000000## v1.2 (2019-11-28) * Add benchmarks for `cattrs`, #513 by @sebastianmika * Add `exclude_none` option to `dict()` and friends, #587 by @niknetniko * Add benchmarks for `valideer`, #670 by @gsakkis * Add `parse_obj_as` and `parse_file_as` functions for ad-hoc parsing of data into arbitrary pydantic-compatible types, #934 by @dmontagu * Add `allow_reuse` argument to validators, thus allowing validator reuse, #940 by @dmontagu * Add support for mapping types for custom root models, #958 by @dmontagu * Mypy plugin support for dataclasses, #966 by @koxudaxi * Add support for dataclasses default factory, #968 by @ahirner * Add a `ByteSize` type for converting byte string (`1GB`) to plain bytes, #977 by @dgasmith * Fix mypy complaint about `@root_validator(pre=True)`, #984 by @samuelcolvin * Add manylinux binaries for python 3.8 to pypi, also support manylinux2010, #994 by @samuelcolvin * Adds ByteSize conversion to another unit, #995 by @dgasmith * Fix `__str__` and `__repr__` inheritance for models, #1022 by @samuelcolvin * add testimonials section to docs, #1025 by @sullivancolin * Add support for `typing.Literal` for Python 3.8, #1026 by @dmontagu * Add support for required `Optional` with `name: Optional[AnyType] = Field(...)` and refactor `ModelField` creation to preserve `required` parameter value, #1031 by @tiangolo ## v1.1.1 (2019-11-20) * Fix bug where use of complex fields on sub-models could cause fields to be incorrectly configured, #1015 by @samuelcolvin ## v1.1 (2019-11-07) * Add a mypy plugin for type checking `BaseModel.__init__` and more, #722 by @dmontagu * Change return type typehint for `GenericModel.__class_getitem__` to prevent PyCharm warnings, #936 by @dmontagu * Fix usage of `Any` to allow `None`, also support `TypeVar` thus allowing use of un-parameterised collection types e.g. `Dict` and `List`, #962 by @samuelcolvin * Set `FieldInfo` on subfields to fix schema generation for complex nested types, #965 by @samuelcolvin ## v1.0 (2019-10-23) * **Breaking Change:** deprecate the `Model.fields` property, use `Model.__fields__` instead, #883 by @samuelcolvin * **Breaking Change:** Change the precedence of aliases so child model aliases override parent aliases, including using `alias_generator`, #904 by @samuelcolvin * **Breaking change:** Rename `skip_defaults` to `exclude_unset`, and add ability to exclude actual defaults, #915 by @dmontagu * Add `**kwargs` to `pydantic.main.ModelMetaclass.__new__` so `__init_subclass__` can take custom parameters on extended `BaseModel` classes, #867 by @retnikt * Fix field of a type that has a default value, #880 by @koxudaxi * Use `FutureWarning` instead of `DeprecationWarning` when `alias` instead of `env` is used for settings models, #881 by @samuelcolvin * Fix issue with `BaseSettings` inheritance and `alias` getting set to `None`, #882 by @samuelcolvin * Modify `__repr__` and `__str__` methods to be consistent across all public classes, add `__pretty__` to support python-devtools, #884 by @samuelcolvin * deprecation warning for `case_insensitive` on `BaseSettings` config, #885 by @samuelcolvin * For `BaseSettings` merge environment variables and in-code values recursively, as long as they create a valid object when merged together, to allow splitting init arguments, #888 by @idmitrievsky * change secret types example, #890 by @ashears * Change the signature of `Model.construct()` to be more user-friendly, document `construct()` usage, #898 by @samuelcolvin * Add example for the `construct()` method, #907 by @ashears * Improve use of `Field` constraints on complex types, raise an error if constraints are not enforceable, also support tuples with an ellipsis `Tuple[X, ...]`, `Sequence` and `FrozenSet` in schema, #909 by @samuelcolvin * update docs for bool missing valid value, #911 by @trim21 * Better `str`/`repr` logic for `ModelField`, #912 by @samuelcolvin * Fix `ConstrainedList`, update schema generation to reflect `min_items` and `max_items` `Field()` arguments, #917 by @samuelcolvin * Allow abstracts sets (eg. dict keys) in the `include` and `exclude` arguments of `dict()`, #921 by @samuelcolvin * Fix JSON serialization errors on `ValidationError.json()` by using `pydantic_encoder`, #922 by @samuelcolvin * Clarify usage of `remove_untouched`, improve error message for types with no validators, #926 by @retnikt ## v1.0b2 (2019-10-07) * Mark `StrictBool` typecheck as `bool` to allow for default values without mypy errors, #690 by @dmontagu * Transfer the documentation build from sphinx to mkdocs, re-write much of the documentation, #856 by @samuelcolvin * Add support for custom naming schemes for `GenericModel` subclasses, #859 by @dmontagu * Add `if TYPE_CHECKING:` to the excluded lines for test coverage, #874 by @dmontagu * Rename `allow_population_by_alias` to `allow_population_by_field_name`, remove unnecessary warning about it, #875 by @samuelcolvin ## v1.0b1 (2019-10-01) * **Breaking Change:** rename `Schema` to `Field`, make it a function to placate mypy, #577 by @samuelcolvin * **Breaking Change:** modify parsing behavior for `bool`, #617 by @dmontagu * **Breaking Change:** `get_validators` is no longer recognised, use `__get_validators__`. `Config.ignore_extra` and `Config.allow_extra` are no longer recognised, use `Config.extra`, #720 by @samuelcolvin * **Breaking Change:** modify default config settings for `BaseSettings`; `case_insensitive` renamed to `case_sensitive`, default changed to `case_sensitive = False`, `env_prefix` default changed to `''` - e.g. no prefix, #721 by @dmontagu * **Breaking change:** Implement `root_validator` and rename root errors from `__obj__` to `__root__`, #729 by @samuelcolvin * **Breaking Change:** alter the behaviour of `dict(model)` so that sub-models are nolonger converted to dictionaries, #733 by @samuelcolvin * **Breaking change:** Added `initvars` support to `post_init_post_parse`, #748 by @Raphael-C-Almeida * **Breaking Change:** Make `BaseModel.json()` only serialize the `__root__` key for models with custom root, #752 by @dmontagu * **Breaking Change:** complete rewrite of `URL` parsing logic, #755 by @samuelcolvin * **Breaking Change:** preserve superclass annotations for field-determination when not provided in subclass, #757 by @dmontagu * **Breaking Change:** `BaseSettings` now uses the special `env` settings to define which environment variables to read, not aliases, #847 by @samuelcolvin * add support for `assert` statements inside validators, #653 by @abdusco * Update documentation to specify the use of `pydantic.dataclasses.dataclass` and subclassing `pydantic.BaseModel`, #710 by @maddosaurus * Allow custom JSON decoding and encoding via `json_loads` and `json_dumps` `Config` properties, #714 by @samuelcolvin * make all annotated fields occur in the order declared, #715 by @dmontagu * use pytest to test `mypy` integration, #735 by @dmontagu * add `__repr__` method to `ErrorWrapper`, #738 by @samuelcolvin * Added support for `FrozenSet` members in dataclasses, and a better error when attempting to use types from the `typing` module that are not supported by Pydantic, #745 by @djpetti * add documentation for Pycharm Plugin, #750 by @koxudaxi * fix broken examples in the docs, #753 by @dmontagu * moving typing related objects into `pydantic.typing`, #761 by @samuelcolvin * Minor performance improvements to `ErrorWrapper`, `ValidationError` and datetime parsing, #763 by @samuelcolvin * Improvements to `datetime`/`date`/`time`/`timedelta` types: more descriptive errors, change errors to `value_error` not `type_error`, support bytes, #766 by @samuelcolvin * fix error messages for `Literal` types with multiple allowed values, #770 by @dmontagu * Improved auto-generated `title` field in JSON schema by converting underscore to space, #772 by @skewty * support `mypy --no-implicit-reexport` for dataclasses, also respect `--no-implicit-reexport` in pydantic itself, #783 by @samuelcolvin * add the `PaymentCardNumber` type, #790 by @matin * Fix const validations for lists, #794 by @hmvp * Set `additionalProperties` to false in schema for models with extra fields disallowed, #796 by @Code0x58 * `EmailStr` validation method now returns local part case-sensitive per RFC 5321, #798 by @henriklindgren * Added ability to validate strictness to `ConstrainedFloat`, `ConstrainedInt` and `ConstrainedStr` and added `StrictFloat` and `StrictInt` classes, #799 by @DerRidda * Improve handling of `None` and `Optional`, replace `whole` with `each_item` (inverse meaning, default `False`) on validators, #803 by @samuelcolvin * add support for `Type[T]` type hints, #807 by @timonbimon * Performance improvements from removing `change_exceptions`, change how pydantic error are constructed, #819 by @samuelcolvin * Fix the error message arising when a `BaseModel`-type model field causes a `ValidationError` during parsing, #820 by @dmontagu * allow `getter_dict` on `Config`, modify `GetterDict` to be more like a `Mapping` object and thus easier to work with, #821 by @samuelcolvin * Only check `TypeVar` param on base `GenericModel` class, #842 by @zpencerq * rename `Model._schema_cache` -> `Model.__schema_cache__`, `Model._json_encoder` -> `Model.__json_encoder__`, `Model._custom_root_type` -> `Model.__custom_root_type__`, #851 by @samuelcolvin ## v0.32.2 (2019-08-17) * fix `__post_init__` usage with dataclass inheritance, fix #739 by @samuelcolvin * fix required fields validation on GenericModels classes, #742 by @amitbl * fix defining custom `Schema` on `GenericModel` fields, #754 by @amitbl ## v0.32.1 (2019-08-08) * do not validate extra fields when `validate_assignment` is on, #724 by @YaraslauZhylko ## v0.32 (2019-08-06) * add model name to `ValidationError` error message, #676 by @dmontagu * **breaking change**: remove `__getattr__` and rename `__values__` to `__dict__` on `BaseModel`, deprecation warning on use `__values__` attr, attributes access speed increased up to 14 times, #712 by @MrMrRobat * support `ForwardRef` (without self-referencing annotations) in Python 3.6, #706 by @koxudaxi * implement `schema_extra` in `Config` sub-class, #663 by @tiangolo ## v0.31.1 (2019-07-31) * fix json generation for `EnumError`, #697 by @dmontagu * update numerous dependencies ## v0.31 (2019-07-24) * better support for floating point `multiple_of` values, #652 by @justindujardin * fix schema generation for `NewType` and `Literal`, #649 by @dmontagu * fix `alias_generator` and field config conflict, #645 by @gmetzker and #658 by @MrMrRobat * more detailed message for `EnumError`, #673 by @dmontagu * add advanced exclude support for `dict`, `json` and `copy`, #648 by @MrMrRobat * fix bug in `GenericModel` for models with concrete parameterized fields, #672 by @dmontagu * add documentation for `Literal` type, #651 by @dmontagu * add `Config.keep_untouched` for custom descriptors support, #679 by @MrMrRobat * use `inspect.cleandoc` internally to get model description, #657 by @tiangolo * add `Color` to schema generation, by @euri10 * add documentation for Literal type, #651 by @dmontagu ## v0.30.1 (2019-07-15) * fix so nested classes which inherit and change `__init__` are correctly processed while still allowing `self` as a parameter, #644 by @lnaden and @dgasmith ## v0.30 (2019-07-07) * enforce single quotes in code, #612 by @samuelcolvin * fix infinite recursion with dataclass inheritance and `__post_init__`, #606 by @Hanaasagi * fix default values for `GenericModel`, #610 by @dmontagu * clarify that self-referencing models require python 3.7+, #616 by @vlcinsky * fix truncate for types, #611 by @dmontagu * add `alias_generator` support, #622 by @MrMrRobat * fix unparameterized generic type schema generation, #625 by @dmontagu * fix schema generation with multiple/circular references to the same model, #621 by @tiangolo and @wongpat * support custom root types, #628 by @koxudaxi * support `self` as a field name in `parse_obj`, #632 by @samuelcolvin ## v0.29 (2019-06-19) * support dataclasses.InitVar, #592 by @pfrederiks * Updated documentation to elucidate the usage of `Union` when defining multiple types under an attribute's annotation and showcase how the type-order can affect marshalling of provided values, #594 by @somada141 * add `conlist` type, #583 by @hmvp * add support for generics, #595 by @dmontagu ## v0.28 (2019-06-06) * fix support for JSON Schema generation when using models with circular references in Python 3.7, #572 by @tiangolo * support `__post_init_post_parse__` on dataclasses, #567 by @sevaho * allow dumping dataclasses to JSON, #575 by @samuelcolvin and @DanielOberg * ORM mode, #562 by @samuelcolvin * fix `pydantic.compiled` on ipython, #573 by @dmontagu and @samuelcolvin * add `StrictBool` type, #579 by @cazgp ## v0.27 (2019-05-30) * **breaking change** `_pydantic_post_init` to execute dataclass' original `__post_init__` before validation, #560 by @HeavenVolkoff * fix handling of generic types without specified parameters, #550 by @dmontagu * **breaking change** (maybe): this is the first release compiled with **cython**, see the docs and please submit an issue if you run into problems ## v0.27.0a1 (2019-05-26) * fix JSON Schema for `list`, `tuple`, and `set`, #540 by @tiangolo * compiling with cython, `manylinux` binaries, some other performance improvements, #548 by @samuelcolvin ## v0.26 (2019-05-22) * fix to schema generation for `IPvAnyAddress`, `IPvAnyInterface`, `IPvAnyNetwork` #498 by @pilosus * fix variable length tuples support, #495 by @pilosus * fix return type hint for `create_model`, #526 by @dmontagu * **Breaking Change:** fix `.dict(skip_keys=True)` skipping values set via alias (this involves changing `validate_model()` to always returns `Tuple[Dict[str, Any], Set[str], Optional[ValidationError]]`), #517 by @sommd * fix to schema generation for `IPv4Address`, `IPv6Address`, `IPv4Interface`, `IPv6Interface`, `IPv4Network`, `IPv6Network` #532 by @euri10 * add `Color` type, #504 by @pilosus and @samuelcolvin ## v0.25 (2019-05-05) * Improve documentation on self-referencing models and annotations, #487 by @theenglishway * fix `.dict()` with extra keys, #490 by @JaewonKim * support `const` keyword in `Schema`, #434 by @Sean1708 ## v0.24 (2019-04-23) * fix handling `ForwardRef` in sub-types, like `Union`, #464 by @tiangolo * fix secret serialization, #465 by @atheuz * Support custom validators for dataclasses, #454 by @primal100 * fix `parse_obj` to cope with dict-like objects, #472 by @samuelcolvin * fix to schema generation in nested dataclass-based models, #474 by @NoAnyLove * fix `json` for `Path`, `FilePath`, and `DirectoryPath` objects, #473 by @mikegoodspeed ## v0.23 (2019-04-04) * improve documentation for contributing section, #441 by @pilosus * improve README.rst to include essential information about the package, #446 by @pilosus * `IntEnum` support, #444 by @potykion * fix PyObject callable value, #409 by @pilosus * fix `black` deprecation warnings after update, #451 by @pilosus * fix `ForwardRef` collection bug, #450 by @tigerwings * Support specialized `ClassVars`, #455 by @tyrylu * fix JSON serialization for `ipaddress` types, #333 by @pilosus * add `SecretStr` and `SecretBytes` types, #452 by @atheuz ## v0.22 (2019-03-29) * add `IPv{4,6,Any}Network` and `IPv{4,6,Any}Interface` types from `ipaddress` stdlib, #333 by @pilosus * add docs for `datetime` types, #386 by @pilosus * fix to schema generation in dataclass-based models, #408 by @pilosus * fix path in nested models, #437 by @kataev * add `Sequence` support, #304 by @pilosus ## v0.21.0 (2019-03-15) * fix typo in `NoneIsNotAllowedError` message, #414 by @YaraslauZhylko * add `IPvAnyAddress`, `IPv4Address` and `IPv6Address` types, #333 by @pilosus ## v0.20.1 (2019-02-26) * fix type hints of `parse_obj` and similar methods, #405 by @erosennin * fix submodel validation, #403 by @samuelcolvin * correct type hints for `ValidationError.json`, #406 by @layday ## v0.20.0 (2019-02-18) * fix tests for python 3.8, #396 by @samuelcolvin * Adds fields to the `dir` method for autocompletion in interactive sessions, #398 by @dgasmith * support `ForwardRef` (and therefore `from __future__ import annotations`) with dataclasses, #397 by @samuelcolvin ## v0.20.0a1 (2019-02-13) * **breaking change** (maybe): more sophisticated argument parsing for validators, any subset of `values`, `config` and `field` is now permitted, eg. `(cls, value, field)`, however the variadic key word argument ("`**kwargs`") **must** be called `kwargs`, #388 by @samuelcolvin * **breaking change**: Adds `skip_defaults` argument to `BaseModel.dict()` to allow skipping of fields that were not explicitly set, signature of `Model.construct()` changed, #389 by @dgasmith * add `py.typed` marker file for PEP-561 support, #391 by @je-l * Fix `extra` behaviour for multiple inheritance/mix-ins, #394 by @YaraslauZhylko ## v0.19.0 (2019-02-04) * Support `Callable` type hint, fix #279 by @proofit404 * Fix schema for fields with `validator` decorator, fix #375 by @tiangolo * Add `multiple_of` constraint to `ConstrainedDecimal`, `ConstrainedFloat`, `ConstrainedInt` and their related types `condecimal`, `confloat`, and `conint` #371, thanks @StephenBrown2 * Deprecated `ignore_extra` and `allow_extra` Config fields in favor of `extra`, #352 by @liiight * Add type annotations to all functions, test fully with mypy, #373 by @samuelcolvin * fix for 'missing' error with `validate_all` or `validate_always`, #381 by @samuelcolvin * Change the second/millisecond watershed for date/datetime parsing to `2e10`, #385 by @samuelcolvin ## v0.18.2 (2019-01-22) * Fix to schema generation with `Optional` fields, fix #361 by @samuelcolvin ## v0.18.1 (2019-01-17) * add `ConstrainedBytes` and `conbytes` types, #315 @Gr1N * adding `MANIFEST.in` to include license in package `.tar.gz`, #358 by @samuelcolvin ## v0.18.0 (2019-01-13) * **breaking change**: don't call validators on keys of dictionaries, #254 by @samuelcolvin * Fix validators with `always=True` when the default is `None` or the type is optional, also prevent `whole` validators being called for sub-fields, fix #132 by @samuelcolvin * improve documentation for settings priority and allow it to be easily changed, #343 by @samuelcolvin * fix `ignore_extra=False` and `allow_population_by_alias=True`, fix #257 by @samuelcolvin * **breaking change**: Set `BaseConfig` attributes `min_anystr_length` and `max_anystr_length` to `None` by default, fix #349 in #350 by @tiangolo * add support for postponed annotations, #348 by @samuelcolvin ## v0.17.0 (2018-12-27) * fix schema for `timedelta` as number, #325 by @tiangolo * prevent validators being called repeatedly after inheritance, #327 by @samuelcolvin * prevent duplicate validator check in ipython, fix #312 by @samuelcolvin * add "Using Pydantic" section to docs, #323 by @tiangolo & #326 by @samuelcolvin * fix schema generation for fields annotated as `: dict`, `: list`, `: tuple` and `: set`, #330 & #335 by @nkonin * add support for constrained strings as dict keys in schema, #332 by @tiangolo * support for passing Config class in dataclasses decorator, #276 by @jarekkar (**breaking change**: this supersedes the `validate_assignment` argument with `config`) * support for nested dataclasses, #334 by @samuelcolvin * better errors when getting an `ImportError` with `PyObject`, #309 by @samuelcolvin * rename `get_validators` to `__get_validators__`, deprecation warning on use of old name, #338 by @samuelcolvin * support `ClassVar` by excluding such attributes from fields, #184 by @samuelcolvin ## v0.16.1 (2018-12-10) * fix `create_model` to correctly use the passed `__config__`, #320 by @hugoduncan ## v0.16.0 (2018-12-03) * **breaking change**: refactor schema generation to be compatible with JSON Schema and OpenAPI specs, #308 by @tiangolo * add `schema` to `schema` module to generate top-level schemas from base models, #308 by @tiangolo * add additional fields to `Schema` class to declare validation for `str` and numeric values, #311 by @tiangolo * rename `_schema` to `schema` on fields, #318 by @samuelcolvin * add `case_insensitive` option to `BaseSettings` `Config`, #277 by @jasonkuhrt ## v0.15.0 (2018-11-18) * move codebase to use black, #287 by @samuelcolvin * fix alias use in settings, #286 by @jasonkuhrt and @samuelcolvin * fix datetime parsing in `parse_date`, #298 by @samuelcolvin * allow dataclass inheritance, fix #293 by @samuelcolvin * fix `PyObject = None`, fix #305 by @samuelcolvin * allow `Pattern` type, fix #303 by @samuelcolvin ## v0.14.0 (2018-10-02) * dataclasses decorator, #269 by @Gaunt and @samuelcolvin ## v0.13.1 (2018-09-21) * fix issue where int_validator doesn't cast a `bool` to an `int` #264 by @nphyatt * add deep copy support for `BaseModel.copy()` #249, @gangefors ## v0.13.0 (2018-08-25) * raise an exception if a field's name shadows an existing `BaseModel` attribute #242 * add `UrlStr` and `urlstr` types #236 * timedelta json encoding ISO8601 and total seconds, custom json encoders #247, by @cfkanesan and @samuelcolvin * allow `timedelta` objects as values for properties of type `timedelta` (matches `datetime` etc. behavior) #247 ## v0.12.1 (2018-07-31) * fix schema generation for fields defined using `typing.Any` #237 ## v0.12.0 (2018-07-31) * add `by_alias` argument in `.dict()` and `.json()` model methods #205 * add Json type support #214 * support tuples #227 * major improvements and changes to schema #213 ## v0.11.2 (2018-07-05) * add `NewType` support #115 * fix `list`, `set` & `tuple` validation #225 * separate out `validate_model` method, allow errors to be returned along with valid values #221 ## v0.11.1 (2018-07-02) * support Python 3.7 #216, thanks @layday * Allow arbitrary types in model #209, thanks @oldPadavan ## v0.11.0 (2018-06-28) * make `list`, `tuple` and `set` types stricter #86 * **breaking change**: remove msgpack parsing #201 * add `FilePath` and `DirectoryPath` types #10 * model schema generation #190 * JSON serialisation of models and schemas #133 ## v0.10.0 (2018-06-11) * add `Config.allow_population_by_alias` #160, thanks @bendemaree * **breaking change**: new errors format #179, thanks @Gr1N * **breaking change**: removed `Config.min_number_size` and `Config.max_number_size` #183, thanks @Gr1N * **breaking change**: correct behaviour of `lt` and `gt` arguments to `conint` etc. #188 for the old behaviour use `le` and `ge` #194, thanks @jaheba * added error context and ability to redefine error message templates using `Config.error_msg_templates` #183, thanks @Gr1N * fix typo in validator exception #150 * copy defaults to model values, so different models don't share objects #154 ## v0.9.1 (2018-05-10) * allow custom `get_field_config` on config classes #159 * add `UUID1`, `UUID3`, `UUID4` and `UUID5` types #167, thanks @Gr1N * modify some inconsistent docstrings and annotations #173, thanks @YannLuo * fix type annotations for exotic types #171, thanks @Gr1N * re-use type validators in exotic types #171 * scheduled monthly requirements updates #168 * add `Decimal`, `ConstrainedDecimal` and `condecimal` types #170, thanks @Gr1N ## v0.9.0 (2018-04-28) * tweak email-validator import error message #145 * fix parse error of `parse_date()` and `parse_datetime()` when input is 0 #144, thanks @YannLuo * add `Config.anystr_strip_whitespace` and `strip_whitespace` kwarg to `constr`, by default values is `False` #163, thanks @Gr1N * add `ConstrainedFloat`, `confloat`, `PositiveFloat` and `NegativeFloat` types #166, thanks @Gr1N ## v0.8.0 (2018-03-25) * fix type annotation for `inherit_config` #139 * **breaking change**: check for invalid field names in validators #140 * validate attributes of parent models #141 * **breaking change**: email validation now uses [email-validator](https://github.com/JoshData/python-email-validator) #142 ## v0.7.1 (2018-02-07) * fix bug with `create_model` modifying the base class ## v0.7.0 (2018-02-06) * added compatibility with abstract base classes (ABCs) #123 * add `create_model` method #113 #125 * **breaking change**: rename `.config` to `.__config__` on a model * **breaking change**: remove deprecated `.values()` on a model, use `.dict()` instead * remove use of `OrderedDict` and use simple dict #126 * add `Config.use_enum_values` #127 * add wildcard validators of the form `@validate('*')` #128 ## v0.6.4 (2018-02-01) * allow python date and times objects #122 ## v0.6.3 (2017-11-26) * fix direct install without `README.rst` present ## v0.6.2 (2017-11-13) * errors for invalid validator use * safer check for complex models in `Settings` ## v0.6.1 (2017-11-08) * prevent duplicate validators, #101 * add `always` kwarg to validators, #102 ## v0.6.0 (2017-11-07) * assignment validation #94, thanks petroswork! * JSON in environment variables for complex types, #96 * add `validator` decorators for complex validation, #97 * depreciate `values(...)` and replace with `.dict(...)`, #99 ## v0.5.0 (2017-10-23) * add `UUID` validation #89 * remove `index` and `track` from error object (json) if they're null #90 * improve the error text when a list is provided rather than a dict #90 * add benchmarks table to docs #91 ## v0.4.0 (2017-07-08) * show length in string validation error * fix aliases in config during inheritance #55 * simplify error display * use unicode ellipsis in `truncate` * add `parse_obj`, `parse_raw` and `parse_file` helper functions #58 * switch annotation only fields to come first in fields list not last ## v0.3.0 (2017-06-21) * immutable models via `config.allow_mutation = False`, associated cleanup and performance improvement #44 * immutable helper methods `construct()` and `copy()` #53 * allow pickling of models #53 * `setattr` is removed as `__setattr__` is now intelligent #44 * `raise_exception` removed, Models now always raise exceptions #44 * instance method validators removed * django-restful-framework benchmarks added #47 * fix inheritance bug #49 * make str type stricter so list, dict etc are not coerced to strings. #52 * add `StrictStr` which only always strings as input #52 ## v0.2.1 (2017-06-07) * pypi and travis together messed up the deploy of `v0.2` this should fix it ## v0.2.0 (2017-06-07) * **breaking change**: `values()` on a model is now a method not a property, takes `include` and `exclude` arguments * allow annotation only fields to support mypy * add pretty `to_string(pretty=True)` method for models ## v0.1.0 (2017-06-03) * add docs * add history pydantic-1.2/LICENSE000066400000000000000000000021331357000400300141630ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2017, 2018, 2019 Samuel Colvin and other contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pydantic-1.2/MANIFEST.in000066400000000000000000000000651357000400300147160ustar00rootroot00000000000000include LICENSE include README.md include HISTORY.md pydantic-1.2/Makefile000066400000000000000000000044151357000400300146230ustar00rootroot00000000000000.DEFAULT_GOAL := all isort = isort -rc pydantic tests black = black -S -l 120 --target-version py36 pydantic tests .PHONY: install install: pip install -U setuptools pip pip install -U -r requirements.txt SKIP_CYTHON=1 pip install -e . .PHONY: build-cython-trace build-cython-trace: python setup.py build_ext --force --inplace --define CYTHON_TRACE .PHONY: build-cython build-cython: python setup.py build_ext --inplace .PHONY: format format: $(isort) $(black) .PHONY: lint lint: flake8 pydantic/ tests/ $(isort) --check-only $(black) --check .PHONY: check-dist check-dist: python setup.py check -ms SKIP_CYTHON=1 python setup.py sdist twine check dist/* .PHONY: mypy mypy: mypy pydantic .PHONY: test test: pytest --cov=pydantic @python tests/try_assert.py .PHONY: testcov testcov: test @echo "building coverage html" @coverage html .PHONY: testcov-compile testcov-compile: build-cython-trace test @echo "building coverage html" @coverage html .PHONY: test-examples test-examples: @echo "running examples" @find docs/examples -type f -name '*.py' | xargs -I'{}' sh -c 'python {} >/dev/null 2>&1 || (echo "{} failed")' .PHONY: all all: testcov lint mypy .PHONY: benchmark-all benchmark-all: python benchmarks/run.py .PHONY: benchmark-pydantic benchmark-pydantic: python benchmarks/run.py pydantic-only .PHONY: clean clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -rf .cache rm -rf .pytest_cache rm -rf .mypy_cache rm -rf htmlcov rm -rf *.egg-info rm -f .coverage rm -f .coverage.* rm -rf build rm -rf dist rm -f pydantic/*.c pydantic/*.so python setup.py clean rm -rf site rm -rf docs/_build rm -rf docs/.changelog.md docs/.version.md docs/.tmp_schema_mappings.html .PHONY: docs docs: ./docs/build/main.py mkdocs build @# to work with the old sphinx build and deploy: @rm -rf docs/_build/ @mkdir docs/_build/ @cp -r site docs/_build/html .PHONY: docs-serve docs-serve: ./docs/build/main.py mkdocs serve .PHONY: publish publish: docs zip -r site.zip site @curl -H "Content-Type: application/zip" -H "Authorization: Bearer ${NETLIFY}" \ --data-binary "@site.zip" https://api.netlify.com/api/v1/sites/pydantic-docs.netlify.com/deploys pydantic-1.2/README.md000066400000000000000000000040531357000400300144400ustar00rootroot00000000000000# pydantic [![BuildStatus](https://travis-ci.org/samuelcolvin/pydantic.svg?branch=master)](https://travis-ci.org/samuelcolvin/pydantic) [![Coverage](https://codecov.io/gh/samuelcolvin/pydantic/branch/master/graph/badge.svg)](https://codecov.io/gh/samuelcolvin/pydantic) [![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) [![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) [![downloads](https://img.shields.io/pypi/dm/pydantic.svg)](https://pypistats.org/packages/pydantic) [![versions](https://img.shields.io/pypi/pyversions/pydantic.svg)](https://github.com/samuelcolvin/pydantic) [![license](https://img.shields.io/github/license/samuelcolvin/pydantic.svg)](https://github.com/samuelcolvin/pydantic/blob/master/LICENSE) Data validation and settings management using Python type hinting. Fast and extensible, *pydantic* plays nicely with your linters/IDE/brain. Define how data should be in pure, canonical Python 3.6+; validate it with *pydantic*. ## Help See [documentation](https://pydantic-docs.helpmanual.io/) for more details. ## Installation Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`. For more installation options to make *pydantic* even faster, see the [Install](https://pydantic-docs.helpmanual.io/install/) section in the documentation. ## A Simple Example ```py from datetime import datetime from typing import List from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None friends: List[int] = [] external_data = {'id': '123', 'signup_ts': '2017-06-01 12:22', 'friends': [1, '2', b'3']} user = User(**external_data) print(user) #> User id=123 name='John Doe' signup_ts=datetime.datetime(2017, 6, 1, 12, 22) friends=[1, 2, 3] print(user.id) #> 123 ``` ## Contributing For guidance on setting up a development environment and how to make a contribution to *pydantic*, see [Contributing to Pydantic](https://pydantic-docs.helpmanual.io/contributing/). pydantic-1.2/azure-pipelines.yml000066400000000000000000000014271357000400300170220ustar00rootroot00000000000000# https://docs.microsoft.com/azure/devops/pipelines/languages/python pool: vmImage: 'windows-latest' strategy: matrix: Python36: python.version: '3.6' Python37: python.version: '3.7' Python38: python.version: '3.8' steps: - task: UsePythonVersion@0 inputs: versionSpec: '$(python.version)' displayName: 'Use Python $(python.version)' - script: | pip install --user -U setuptools pip 2>&1 pip install --user -U -r requirements.txt 2>&1 pip install --user -e . 2>&1 pip freeze 2>&1 displayName: install failOnStderr: false - script: make test 2>&1 displayName: test failOnStderr: false - script: make lint 2>&1 displayName: lint failOnStderr: false - script: make mypy 2>&1 displayName: mypy failOnStderr: false pydantic-1.2/benchmarks/000077500000000000000000000000001357000400300152745ustar00rootroot00000000000000pydantic-1.2/benchmarks/profile.py000066400000000000000000000020731357000400300173100ustar00rootroot00000000000000import json from line_profiler import LineProfiler import pydantic.datetime_parse import pydantic.validators from pydantic import validate_model from pydantic.fields import Field from test_pydantic import TestPydantic with open('./benchmarks/cases.json') as f: cases = json.load(f) def run(): count, pass_count = 0, 0 test = TestPydantic(False) for case in cases: passed, result = test.validate(case) count += 1 pass_count += passed print('success percentage:', pass_count / count * 100) funcs_to_profile = [validate_model, Field.validate, Field._validate_singleton, Field._apply_validators] module_objects = {**vars(pydantic.validators), **vars(pydantic.datetime_parse), **vars(Field)} funcs_to_profile += [v for k, v in module_objects.items() if not k.startswith('_') and str(v).startswith('40} time={time:0.3f}s, success={success:0.2f}%') times.append(time) print(f'{p:>40} best={min(times):0.3f}s, avg={mean(times):0.3f}s, stdev={stdev(times):0.3f}s') model_count = 3 * len(cases) avg = mean(times) / model_count * 1e6 sd = stdev(times) / model_count * 1e6 results.append(f'{p:>40} best={min(times) / model_count * 1e6:0.3f}μs/iter ' f'avg={avg:0.3f}μs/iter stdev={sd:0.3f}μs/iter') csv_results.append([p, test_class.version, avg]) print() for r in results: print(r) if 'SAVE' in os.environ: save_md(csv_results) def save_md(data): headings = 'Package', 'Version', 'Relative Performance', 'Mean validation time' rows = [headings, ['---' for _ in headings]] first_avg = None for package, version, avg in sorted(data, key=itemgetter(2)): if first_avg: relative = f'{avg / first_avg:0.1f}x slower' else: relative = '' first_avg = avg rows.append([package, f'`{version}`', relative, f'{avg:0.1f}μs']) table = '\n'.join(' | '.join(row) for row in rows) text = f"""\ [//]: <> (Generated with benchmarks/run.py, DO NOT EDIT THIS FILE DIRECTLY, instead run `SAVE=1 python ./run.py`.) {table} """ (Path(__file__).parent / '..' / 'docs' / '.benchmarks_table.md').write_text(text) def diff(): json_path = THIS_DIR / 'cases.json' with json_path.open() as f: cases = json.load(f) allow_extra = True pydantic = TestPydantic(allow_extra) others = [t(allow_extra) for t in other_tests] for case in cases: pydantic_passed, pydantic_result = pydantic.validate(case) for other in others: other_passed, other_result = other.validate(case) if other_passed != pydantic_passed: print(f'⨯ pydantic {pydantic_passed} != {other.package} {other_passed}') debug(case, pydantic_result, other_result) return print('✓ data passes match for all packages') if __name__ == '__main__': if 'diff' in sys.argv: diff() else: main() pydantic-1.2/benchmarks/test_cattrs.py000066400000000000000000000065501357000400300202130ustar00rootroot00000000000000from datetime import datetime from typing import List, Optional import attr import cattr from dateutil.parser import parse class TestCAttrs: package = 'attrs + cattrs' version = attr.__version__ def __init__(self, allow_extra): # cf. https://github.com/Tinche/cattrs/issues/26 why at least structure_str is needed def structure_str(s, _): if not isinstance(s, str): raise ValueError() return s def structure_int(i, _): if not isinstance(i, int): raise ValueError() return i class PositiveInt(int): ... def structure_posint(i, x): i = PositiveInt(i) if not isinstance(i, PositiveInt): raise ValueError() if i <= 0: raise ValueError() return i cattr.register_structure_hook(datetime, lambda isostring, _: parse(isostring)) cattr.register_structure_hook(str, structure_str) cattr.register_structure_hook(int, structure_int) cattr.register_structure_hook(PositiveInt, structure_posint) def str_len_val(max_len: int, min_len: int = 0, required: bool = False): # validate the max len of a string and optionally its min len and whether None is # an acceptable value def _check_str_len(self, attribute, value): if value is None: if required: raise ValueError("") else: return if len(value) > max_len: raise ValueError("") if min_len and len(value) < min_len: raise ValueError("") return _check_str_len def pos_int(self, attribute, value): # Validate that value is a positive >0 integer; None is allowed if value is None: return if value <= 0: raise ValueError("") @attr.s(auto_attribs=True, frozen=True, kw_only=True) class Skill: subject: str subject_id: int category: str qual_level: str qual_level_id: int qual_level_ranking: float = 0 @attr.s(auto_attribs=True, frozen=True, kw_only=True) class Location: latitude: float = None longitude: float = None @attr.s(auto_attribs=True, frozen=True, kw_only=True) class Model: id: int sort_index: float client_name: str = attr.ib(validator=str_len_val(255)) # client_email: EmailStr = None client_phone: Optional[str] = attr.ib(default=None, validator=str_len_val(255)) location: Optional[Location] = None contractor: Optional[PositiveInt] upstream_http_referrer: Optional[str] = attr.ib(default=None, validator=str_len_val(1023)) grecaptcha_response: str = attr.ib(validator=str_len_val(1000, 20, required=True)) last_updated: Optional[datetime] = None skills: List[Skill] = [] self.model = Model def validate(self, data): try: return True, cattr.structure(data, self.model) except ValueError as e: return False, str(e) except TypeError as e: return False, str(e) pydantic-1.2/benchmarks/test_cerberus.py000066400000000000000000000040671357000400300205260ustar00rootroot00000000000000from cerberus import Validator, __version__ from dateutil.parser import parse as datetime_parse class TestCerberus: package = 'cerberus' version = str(__version__) def __init__(self, allow_extra): schema = { 'id': {'type': 'integer', 'required': True}, 'client_name': {'type': 'string', 'maxlength': 255, 'required': True}, 'sort_index': {'type': 'float', 'required': True}, 'client_phone': {'type': 'string', 'maxlength': 255, 'nullable': True}, 'location': { 'type': 'dict', 'schema': {'latitude': {'type': 'float'}, 'longitude': {'type': 'float'}}, 'nullable': True, }, 'contractor': {'type': 'integer', 'min': 0, 'nullable': True, 'coerce': int}, 'upstream_http_referrer': {'type': 'string', 'maxlength': 1023, 'nullable': True}, 'grecaptcha_response': {'type': 'string', 'minlength': 20, 'maxlength': 1000, 'required': True}, 'last_updated': {'type': 'datetime', 'nullable': True, 'coerce': datetime_parse}, 'skills': { 'type': 'list', 'default': [], 'schema': { 'type': 'dict', 'schema': { 'subject': {'type': 'string', 'required': True}, 'subject_id': {'type': 'integer', 'required': True}, 'category': {'type': 'string', 'required': True}, 'qual_level': {'type': 'string', 'required': True}, 'qual_level_id': {'type': 'integer', 'required': True}, 'qual_level_ranking': {'type': 'float', 'default': 0, 'required': True}, }, }, }, } self.v = Validator(schema) self.v.allow_unknown = allow_extra def validate(self, data): validated = self.v.validated(data) if validated is None: return False, self.v.errors else: return True, validated pydantic-1.2/benchmarks/test_drf.py000066400000000000000000000042611357000400300174630ustar00rootroot00000000000000import django from django.conf import settings settings.configure( INSTALLED_APPS=['django.contrib.auth', 'django.contrib.contenttypes'] ) django.setup() from rest_framework import __version__, serializers class TestDRF: package = 'django-restful-framework' version = __version__ def __init__(self, allow_extra): class Model(serializers.Serializer): id = serializers.IntegerField() client_name = serializers.CharField(max_length=255, trim_whitespace=False) sort_index = serializers.FloatField() # client_email = serializers.EmailField(required=False, allow_null=True) client_phone = serializers.CharField(max_length=255, trim_whitespace=False, required=False, allow_null=True) class Location(serializers.Serializer): latitude = serializers.FloatField(required=False, allow_null=True) longitude = serializers.FloatField(required=False, allow_null=True) location = Location(required=False, allow_null=True) contractor = serializers.IntegerField(required=False, allow_null=True, min_value=0) upstream_http_referrer = serializers.CharField( max_length=1023, trim_whitespace=False, required=False, allow_null=True ) grecaptcha_response = serializers.CharField(min_length=20, max_length=1000, trim_whitespace=False) last_updated = serializers.DateTimeField(required=False, allow_null=True) class Skill(serializers.Serializer): subject = serializers.CharField() subject_id = serializers.IntegerField() category = serializers.CharField() qual_level = serializers.CharField() qual_level_id = serializers.IntegerField() qual_level_ranking = serializers.FloatField(default=0) skills = serializers.ListField(child=Skill()) self.allow_extra = allow_extra # unused self.serializer = Model def validate(self, data): s = self.serializer(data=data) if s.is_valid(): return True, dict(s.data) else: return False, dict(s.errors) pydantic-1.2/benchmarks/test_marshmallow.py000066400000000000000000000034271357000400300212410ustar00rootroot00000000000000from marshmallow import Schema, ValidationError, __version__, fields, validate class TestMarshmallow: package = 'marshmallow' version = __version__ def __init__(self, allow_extra): class LocationSchema(Schema): latitude = fields.Float(allow_none=True) longitude = fields.Float(allow_none=True) class SkillSchema(Schema): subject = fields.Str(required=True) subject_id = fields.Integer(required=True) category = fields.Str(required=True) qual_level = fields.Str(required=True) qual_level_id = fields.Integer(required=True) qual_level_ranking = fields.Float(default=0) class Model(Schema): id = fields.Integer(required=True) client_name = fields.Str(validate=validate.Length(max=255), required=True) sort_index = fields.Float(required=True) # client_email = fields.Email() client_phone = fields.Str(validate=validate.Length(max=255), allow_none=True) location = fields.Nested(LocationSchema) contractor = fields.Integer(validate=validate.Range(min=0), allow_none=True) upstream_http_referrer = fields.Str(validate=validate.Length(max=1023), allow_none=True) grecaptcha_response = fields.Str(validate=validate.Length(min=20, max=1000), required=True) last_updated = fields.DateTime(allow_none=True) skills = fields.Nested(SkillSchema, many=True) self.allow_extra = allow_extra # unused self.schema = Model() def validate(self, data): try: result = self.schema.load(data) except ValidationError as e: return False, e.normalized_messages() else: return True, result pydantic-1.2/benchmarks/test_pydantic.py000066400000000000000000000026221357000400300205220ustar00rootroot00000000000000from datetime import datetime from typing import List from pydantic import VERSION, BaseModel, Extra, PositiveInt, ValidationError, constr class TestPydantic: package = 'pydantic' version = str(VERSION) def __init__(self, allow_extra): class Model(BaseModel): id: int client_name: constr(max_length=255) sort_index: float # client_email: EmailStr = None client_phone: constr(max_length=255) = None class Location(BaseModel): latitude: float = None longitude: float = None location: Location = None contractor: PositiveInt = None upstream_http_referrer: constr(max_length=1023) = None grecaptcha_response: constr(min_length=20, max_length=1000) last_updated: datetime = None class Skill(BaseModel): subject: str subject_id: int category: str qual_level: str qual_level_id: int qual_level_ranking: float = 0 skills: List[Skill] = [] class Config: extra = Extra.allow if allow_extra else Extra.forbid self.model = Model def validate(self, data): try: return True, self.model(**data) except ValidationError as e: return False, str(e) pydantic-1.2/benchmarks/test_trafaret.py000066400000000000000000000031751357000400300205230ustar00rootroot00000000000000from dateutil.parser import parse import trafaret as t class TestTrafaret: package = 'trafaret' version = '.'.join(map(str, t.__VERSION__)) def __init__(self, allow_extra): self.schema = t.Dict({ 'id': t.Int(), 'client_name': t.String(max_length=255), 'sort_index': t.Float, # t.Key('client_email', optional=True): t.Or(t.Null | t.Email()), t.Key('client_phone', optional=True): t.Or(t.Null | t.String(max_length=255)), t.Key('location', optional=True): t.Or(t.Null | t.Dict({ 'latitude': t.Or(t.Float | t.Null), 'longitude': t.Or(t.Float | t.Null), })), t.Key('contractor', optional=True): t.Or(t.Null | t.Int(gt=0)), t.Key('upstream_http_referrer', optional=True): t.Or(t.Null | t.String(max_length=1023)), t.Key('grecaptcha_response'): t.String(min_length=20, max_length=1000), t.Key('last_updated', optional=True): t.Or(t.Null | t.String >> parse), t.Key('skills', default=[]): t.List(t.Dict({ 'subject': t.String, 'subject_id': t.Int, 'category': t.String, 'qual_level': t.String, 'qual_level_id': t.Int, t.Key('qual_level_ranking', default=0): t.Float, })), }) if allow_extra: self.schema.allow_extra('*') def validate(self, data): try: return True, self.schema.check(data) except t.DataError: return False, None except ValueError: return False, None pydantic-1.2/benchmarks/test_valideer.py000066400000000000000000000032061357000400300205010ustar00rootroot00000000000000import re import subprocess import dateutil.parser import valideer as V # valideer appears to provide no way of getting the installed version p = subprocess.run(['pip', 'freeze'], stdout=subprocess.PIPE, encoding='utf8', check=True) valideer_version = re.search(r'valideer==(.+)', p.stdout).group(1) class TestValideer: package = 'valideer' version = valideer_version def __init__(self, allow_extra): schema = { '+id': int, '+client_name': V.String(max_length=255), '+sort_index': float, 'client_phone': V.Nullable(V.String(max_length=255)), 'location': {'latitude': float, 'longitude': float}, 'contractor': V.Range(V.AdaptTo(int), min_value=1), 'upstream_http_referrer': V.Nullable(V.String(max_length=1023)), '+grecaptcha_response': V.String(min_length=20, max_length=1000), 'last_updated': V.AdaptBy(dateutil.parser.parse), 'skills': V.Nullable( [ { '+subject': str, '+subject_id': int, '+category': str, '+qual_level': str, '+qual_level_id': int, 'qual_level_ranking': V.Nullable(float, default=0), } ], default=[], ), } self.validator = V.parse(schema, additional_properties=allow_extra) def validate(self, data): try: return True, self.validator.validate(data) except V.ValidationError as e: return False, str(e) pydantic-1.2/changes/000077500000000000000000000000001357000400300145675ustar00rootroot00000000000000pydantic-1.2/changes/README.md000066400000000000000000000005771357000400300160570ustar00rootroot00000000000000# Pending Changes This directory contains files describing changes to pydantic since the last release. If you're creating a pull request, please add a new file to this directory called `-.md`. It should be formatted as a single paragraph of markdown The contents of this file will be used to update `HISTORY.md` before the next release. pydantic-1.2/changes/make_history.py000077500000000000000000000030041357000400300176370ustar00rootroot00000000000000#!/usr/bin/env python3.7 import re import sys from datetime import date from importlib.machinery import SourceFileLoader from pathlib import Path THIS_DIR = Path(__file__).parent name_regex = re.compile(r'(\d+)-(.*?)\.md') bullet_list = [] for p in THIS_DIR.glob('*.md'): if p.name == 'README.md': continue m = name_regex.fullmatch(p.name) if not m: raise RuntimeError(f'{p.name!r}: invalid change file name') gh_id, creator = m.groups() content = p.read_text().replace('\r\n', '\n').strip('\n. ') if '\n\n' in content: raise RuntimeError(f'{p.name!r}: content includes multiple paragraphs') content = content.replace('\n', '\n ') priority = 0 if '**breaking change' in content.lower() else 1 bullet_list.append((priority, int(gh_id), f'* {content}, #{gh_id} by @{creator}')) if not bullet_list: print('no changes found') sys.exit(0) version = SourceFileLoader('version', 'pydantic/version.py').load_module() chunk_title = f'v{version.VERSION} ({date.today():%Y-%m-%d})' new_chunk = '## {}\n\n{}\n\n'.format(chunk_title, '\n'.join(c for *_, c in sorted(bullet_list))) print(f'{chunk_title}...{len(bullet_list)} items') history_path = THIS_DIR / '..' / 'HISTORY.md' history = new_chunk + history_path.read_text() history_path.write_text(history) for p in THIS_DIR.glob('*.md'): if p.name != 'README.md': p.unlink() print( 'changes deleted and HISTORY.md successfully updated, to reset use:\n\n' ' git checkout -- changes/*-*.md HISTORY.md\n' ) pydantic-1.2/docs/000077500000000000000000000000001357000400300141075ustar00rootroot00000000000000pydantic-1.2/docs/.benchmarks_table.md000066400000000000000000000010141357000400300177670ustar00rootroot00000000000000[//]: <> (Generated with benchmarks/run.py, DO NOT EDIT THIS FILE DIRECTLY, instead run `SAVE=1 python ./run.py`.) Package | Version | Relative Performance | Mean validation time --- | --- | --- | --- pydantic | `1.1` | | 43.6μs attrs + cattrs | `19.3.0` | 1.4x slower | 59.9μs valideer | `0.4.2` | 1.4x slower | 61.8μs marshmallow | `3.2.2` | 2.5x slower | 107.6μs trafaret | `2.0.0` | 3.4x slower | 148.7μs django-restful-framework | `3.10.3` | 12.6x slower | 551.2μs cerberus | `1.3.2` | 26.3x slower | 1146.3μs pydantic-1.2/docs/benchmarks.md000066400000000000000000000006641357000400300165540ustar00rootroot00000000000000Below are the results of crude benchmarks comparing *pydantic* to other validation libraries. {!.benchmarks_table.md!} See [the benchmarks code](https://github.com/samuelcolvin/pydantic/tree/master/benchmarks) for more details on the test case. Feel free to suggest more packages to benchmark or improve an existing one. Benchmarks were run with Python 3.7.4 and the package versions listed above installed via pypi on Ubuntu 18.04. pydantic-1.2/docs/build/000077500000000000000000000000001357000400300152065ustar00rootroot00000000000000pydantic-1.2/docs/build/exec_examples.py000077500000000000000000000146171357000400300204160ustar00rootroot00000000000000#!/usr/bin/env python3 import importlib import inspect import json import os import re import shutil import subprocess import sys import textwrap import traceback from pathlib import Path from typing import Any from unittest.mock import patch from ansi2html import Ansi2HTMLConverter from devtools import PrettyFormat THIS_DIR = Path(__file__).parent DOCS_DIR = (THIS_DIR / '..').resolve() EXAMPLES_DIR = DOCS_DIR / 'examples' TMP_EXAMPLES_DIR = DOCS_DIR / '.tmp_examples' MAX_LINE_LENGTH = int(re.search(r'max_line_length = (\d+)', (EXAMPLES_DIR / '.editorconfig').read_text()).group(1)) LONG_LINE = 50 pformat = PrettyFormat(simple_cutoff=LONG_LINE) def to_string(value: Any) -> str: # attempt to build a pretty equivalent of the print output if isinstance(value, (dict, list, tuple, set)): return pformat(value) elif isinstance(value, str) and any(re.fullmatch(r, value, flags=re.DOTALL) for r in ['{".+}', r'\[.+\]']): try: obj = json.loads(value) except ValueError: # not JSON, not a problem pass else: s = json.dumps(obj) if len(s) > LONG_LINE: json.dumps(obj, indent=2) else: return s return str(value) class MockPrint: def __init__(self, file: Path): self.file = file self.statements = [] def __call__(self, *args, file=None, flush=None): frame = inspect.currentframe().f_back.f_back.f_back if not self.file.samefile(frame.f_code.co_filename): # happens when index_error.py imports index_main.py return s = ' '.join(map(to_string, args)) lines = [] for line in s.split('\n'): if len(line) > MAX_LINE_LENGTH - 3: lines += textwrap.wrap(line, width=MAX_LINE_LENGTH - 3) else: lines.append(line) self.statements.append((frame.f_lineno, lines)) def all_md_contents() -> str: file_contents = [] for f in DOCS_DIR.glob('**/*.md'): file_contents.append(f.read_text()) return '\n\n\n'.join(file_contents) def gen_ansi_output(): conv = Ansi2HTMLConverter() input_file = EXAMPLES_DIR / 'devtools_main.py' os.environ['PY_DEVTOOLS_HIGHLIGHT'] = 'true' p = subprocess.run((sys.executable, str(input_file)), stdout=subprocess.PIPE, check=True, encoding='utf8') html = conv.convert(p.stdout, full=False).strip('\r\n') full_html = f'
\n
\n{html}\n
\n
' path = TMP_EXAMPLES_DIR / f'{input_file.stem}.html' path.write_text(full_html) print(f'generated ansi output to {path}') def exec_examples(): errors = [] all_md = all_md_contents() new_files = {} os.environ.clear() os.environ.update({'my_auth_key': 'xxx', 'my_api_key': 'xxx'}) sys.path.append(str(EXAMPLES_DIR)) for file in sorted(EXAMPLES_DIR.iterdir()): def error(desc: str): errors.append((file, desc)) sys.stderr.write(f'error in {file.name}: {desc}\n') if not file.is_file(): # __pycache__, maybe others continue if file.suffix != '.py': # just copy new_files[file.name] = file.read_text() continue if f'{{!.tmp_examples/{file.name}!}}' not in all_md: error('file not used anywhere') file_text = file.read_text() if '\n\n\n' in file_text: error('too many new lines') if not file_text.endswith('\n'): error('no trailing new line') if re.search('^ *# *>', file_text, flags=re.M): error('contains comments with print output, please remove') dont_execute_re = re.compile(r'^# dont-execute\n', flags=re.M) if dont_execute_re.search(file_text): lines = dont_execute_re.sub('', file_text).split('\n') else: no_print_intercept_re = re.compile(r'^# no-print-intercept\n', flags=re.M) no_print_intercept = bool(no_print_intercept_re.search(file_text)) if no_print_intercept: file_text = no_print_intercept_re.sub('', file_text) mp = MockPrint(file) mod = None with patch('builtins.print') as mock_print: if not no_print_intercept: mock_print.side_effect = mp try: mod = importlib.import_module(file.stem) except Exception: tb = traceback.format_exception(*sys.exc_info()) error(''.join(e for e in tb if '/pydantic/docs/examples/' in e or not e.startswith(' File '))) if mod and not mod.__file__.startswith(str(EXAMPLES_DIR)): error(f'module path "{mod.__file__}" not inside "{EXAMPLES_DIR}", name may shadow another module?') lines = file_text.split('\n') to_json_line = '# output-json' if to_json_line in lines: lines = [line for line in lines if line != to_json_line] if len(mp.statements) != 1: error('should only have one print statement') new_files[file.stem + '.json'] = '\n'.join(mp.statements[0][1]) + '\n' else: for line_no, print_lines in reversed(mp.statements): if len(print_lines) > 2: text = '"""\n{}\n"""'.format('\n'.join(print_lines)) else: text = '\n'.join('#> ' + l for l in print_lines) lines.insert(line_no, text) try: ignore_above = lines.index('# ignore-above') except ValueError: pass else: lines = lines[ignore_above + 1 :] lines = '\n'.join(lines).split('\n') if any(len(l) > MAX_LINE_LENGTH for l in lines): error(f'lines longer than {MAX_LINE_LENGTH} characters') new_files[file.name] = '\n'.join(lines) if errors: print(f'\n{len(errors)} errors, not writing files\n') return 1 if TMP_EXAMPLES_DIR.exists(): shutil.rmtree(TMP_EXAMPLES_DIR) print(f'writing {len(new_files)} example files to {TMP_EXAMPLES_DIR}') TMP_EXAMPLES_DIR.mkdir() for file_name, content in new_files.items(): (TMP_EXAMPLES_DIR / file_name).write_text(content) gen_ansi_output() return 0 if __name__ == '__main__': sys.exit(exec_examples()) pydantic-1.2/docs/build/main.py000077500000000000000000000017731357000400300165170ustar00rootroot00000000000000#!/usr/bin/env python3 import re import sys from importlib.machinery import SourceFileLoader from pathlib import Path THIS_DIR = Path(__file__).parent PROJECT_ROOT = THIS_DIR / '..' / '..' def main(): history = (PROJECT_ROOT / 'HISTORY.md').read_text() history = re.sub(r'#(\d+)', r'[#\1](https://github.com/samuelcolvin/pydantic/issues/\1)', history) history = re.sub(r'( +)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) history = re.sub('@@', '@', history) (PROJECT_ROOT / 'docs/.changelog.md').write_text(history) version = SourceFileLoader('version', str(PROJECT_ROOT / 'pydantic/version.py')).load_module() (PROJECT_ROOT / 'docs/.version.md').write_text(f'Documentation for version: **v{version.VERSION}**\n') sys.path.append(str(THIS_DIR.resolve())) from schema_mapping import build_schema_mappings from exec_examples import exec_examples build_schema_mappings() return exec_examples() if __name__ == '__main__': sys.exit(main()) pydantic-1.2/docs/build/schema_mapping.py000077500000000000000000000271641357000400300205500ustar00rootroot00000000000000#!/usr/bin/env python3 """ Build a table of Python / Pydantic to JSON Schema mappings. Done like this rather than as a raw rst table to make future edits easier. Please edit this file directly not .tmp_schema_mappings.html """ import re from pathlib import Path table = [ [ 'bool', 'boolean', '', 'JSON Schema Core', '' ], [ 'str', 'string', '', 'JSON Schema Core', '' ], [ 'float', 'number', '', 'JSON Schema Core', '' ], [ 'int', 'integer', '', 'JSON Schema Validation', '' ], [ 'dict', 'object', '', 'JSON Schema Core', '' ], [ 'list', 'array', '{"items": {}}', 'JSON Schema Core', '' ], [ 'tuple', 'array', '{"items": {}}', 'JSON Schema Core', '' ], [ 'set', 'array', '{"items": {}, {"uniqueItems": true}', 'JSON Schema Validation', '' ], [ 'List[str]', 'array', '{"items": {"type": "string"}}', 'JSON Schema Validation', 'And equivalently for any other sub type, e.g. `List[int]`.' ], [ 'Tuple[str, int]', 'array', '{"items": [{"type": "string"}, {"type": "integer"}]}', 'JSON Schema Validation', ( 'And equivalently for any other set of subtypes. Note: If using schemas for OpenAPI, ' 'you shouldn\'t use this declaration, as it would not be valid in OpenAPI (although it is ' 'valid in JSON Schema).' ) ], [ 'Dict[str, int]', 'object', '{"additionalProperties": {"type": "integer"}}', 'JSON Schema Validation', ( 'And equivalently for any other subfields for dicts. Have in mind that although you can use other types as ' 'keys for dicts with Pydantic, only strings are valid keys for JSON, and so, only str is valid as ' 'JSON Schema key types.' ) ], [ 'Union[str, int]', 'anyOf', '{"anyOf": [{"type": "string"}, {"type": "integer"}]}', 'JSON Schema Validation', 'And equivalently for any other subfields for unions.' ], [ 'Enum', 'enum', '{"enum": [...]}', 'JSON Schema Validation', 'All the literal values in the enum are included in the definition.' ], [ 'SecretStr', 'string', '{"writeOnly": true}', 'JSON Schema Validation', '' ], [ 'SecretBytes', 'string', '{"writeOnly": true}', 'JSON Schema Validation', '' ], [ 'EmailStr', 'string', '{"format": "email"}', 'JSON Schema Validation', '' ], [ 'NameEmail', 'string', '{"format": "name-email"}', 'Pydantic standard "format" extension', '' ], [ 'AnyUrl', 'string', '{"format": "uri"}', 'JSON Schema Validation', '' ], [ 'DSN', 'string', '{"format": "dsn"}', 'Pydantic standard "format" extension', '' ], [ 'bytes', 'string', '{"format": "binary"}', 'OpenAPI', '' ], [ 'Decimal', 'number', '', 'JSON Schema Core', '' ], [ 'UUID1', 'string', '{"format": "uuid1"}', 'Pydantic standard "format" extension', '' ], [ 'UUID3', 'string', '{"format": "uuid3"}', 'Pydantic standard "format" extension', '' ], [ 'UUID4', 'string', '{"format": "uuid4"}', 'Pydantic standard "format" extension', '' ], [ 'UUID5', 'string', '{"format": "uuid5"}', 'Pydantic standard "format" extension', '' ], [ 'UUID', 'string', '{"format": "uuid"}', 'Pydantic standard "format" extension', 'Suggested in OpenAPI.' ], [ 'FilePath', 'string', '{"format": "file-path"}', 'Pydantic standard "format" extension', '' ], [ 'DirectoryPath', 'string', '{"format": "directory-path"}', 'Pydantic standard "format" extension', '' ], [ 'Path', 'string', '{"format": "path"}', 'Pydantic standard "format" extension', '' ], [ 'datetime', 'string', '{"format": "date-time"}', 'JSON Schema Validation', '' ], [ 'date', 'string', '{"format": "date"}', 'JSON Schema Validation', '' ], [ 'time', 'string', '{"format": "time"}', 'JSON Schema Validation', '' ], [ 'timedelta', 'number', '{"format": "time-delta"}', 'Difference in seconds (a `float`), with Pydantic standard "format" extension', 'Suggested in JSON Schema repository\'s issues by maintainer.' ], [ 'Json', 'string', '{"format": "json-string"}', 'Pydantic standard "format" extension', '' ], [ 'IPv4Address', 'string', '{"format": "ipv4"}', 'JSON Schema Validation', '' ], [ 'IPv6Address', 'string', '{"format": "ipv6"}', 'JSON Schema Validation', '' ], [ 'IPvAnyAddress', 'string', '{"format": "ipvanyaddress"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 address as used in `ipaddress` module', ], [ 'IPv4Interface', 'string', '{"format": "ipv4interface"}', 'Pydantic standard "format" extension', 'IPv4 interface as used in `ipaddress` module', ], [ 'IPv6Interface', 'string', '{"format": "ipv6interface"}', 'Pydantic standard "format" extension', 'IPv6 interface as used in `ipaddress` module', ], [ 'IPvAnyInterface', 'string', '{"format": "ipvanyinterface"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 interface as used in `ipaddress` module', ], [ 'IPv4Network', 'string', '{"format": "ipv4network"}', 'Pydantic standard "format" extension', 'IPv4 network as used in `ipaddress` module', ], [ 'IPv6Network', 'string', '{"format": "ipv6network"}', 'Pydantic standard "format" extension', 'IPv6 network as used in `ipaddress` module', ], [ 'IPvAnyNetwork', 'string', '{"format": "ipvanynetwork"}', 'Pydantic standard "format" extension', 'IPv4 or IPv6 network as used in `ipaddress` module', ], [ 'StrictBool', 'boolean', '', 'JSON Schema Core', '' ], [ 'StrictStr', 'string', '', 'JSON Schema Core', '' ], [ 'ConstrainedStr', 'string', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `constr` below.' ) ], [ 'constr(regex=\'^text$\', min_length=2, max_length=10)', 'string', '{"pattern": "^text$", "minLength": 2, "maxLength": 10}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'ConstrainedInt', 'integer', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `conint` below.' ) ], [ 'conint(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'integer', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', '', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'PositiveInt', 'integer', '{"exclusiveMinimum": 0}', 'JSON Schema Validation', '' ], [ 'NegativeInt', 'integer', '{"exclusiveMaximum": 0}', 'JSON Schema Validation', '' ], [ 'ConstrainedFloat', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `confloat` below.' ) ], [ 'confloat(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'PositiveFloat', 'number', '{"exclusiveMinimum": 0}', 'JSON Schema Validation', '' ], [ 'NegativeFloat', 'number', '{"exclusiveMaximum": 0}', 'JSON Schema Validation', '' ], [ 'ConstrainedDecimal', 'number', '', 'JSON Schema Core', ( 'If the type has values declared for the constraints, they are included as validations. ' 'See the mapping for `condecimal` below.' ) ], [ 'condecimal(gt=1, ge=2, lt=6, le=5, multiple_of=2)', 'number', '{"maximum": 5, "exclusiveMaximum": 6, "minimum": 2, "exclusiveMinimum": 1, "multipleOf": 2}', 'JSON Schema Validation', 'Any argument not passed to the function (not defined) will not be included in the schema.' ], [ 'BaseModel', 'object', '', 'JSON Schema Core', 'All the properties defined will be defined with standard JSON Schema, including submodels.' ], [ 'Color', 'string', '{"format": "color"}', 'Pydantic standard "format" extension', '', ], ] headings = [ 'Python type', 'JSON Schema Type', 'Additional JSON Schema', 'Defined in', ] def md2html(s): return re.sub(r'`(.+?)`', r'\1', s) def build_schema_mappings(): rows = [] for py_type, json_type, additional, defined_in, notes in table: cols = [ f'{py_type}', f'{json_type}', f'{additional}' if additional else '', md2html(defined_in) ] rows.append('\n'.join(f' \n {c}\n ' for c in cols)) if notes: rows.append( f' \n' f' {md2html(notes)}\n' f' ' ) heading = '\n'.join(f' {h}' for h in headings) body = '\n\n\n'.join(rows) text = f"""\ {heading} {body}
""" (Path(__file__).parent / '..' / '.tmp_schema_mappings.html').write_text(text) if __name__ == '__main__': build_schema_mappings() pydantic-1.2/docs/changelog.md000066400000000000000000000000221357000400300163520ustar00rootroot00000000000000{!.changelog.md!} pydantic-1.2/docs/contributing.md000066400000000000000000000042221357000400300171400ustar00rootroot00000000000000We'd love you to contribute to *pydantic*! It should be extremely simple to get started and create a Pull Request. *pydantic* is released regularly so you should see your improvements release in a matter of days or weeks. !!! note Unless your change is trivial (typo, docs tweak etc.), please create an issue to discuss the change before creating a pull request. If you're looking for something to get your teeth into, check out the ["help wanted"](https://github.com/samuelcolvin/pydantic/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) label on github. To make contributing as easy and fast as possible, you'll want to run tests and linting locally. Luckily, *pydantic* has few dependencies, doesn't require compiling and tests don't need access to databases, etc. Because of this, setting up and running the tests should be very simple. You'll need to have **python 3.6**, **3.7**, or **3.8**, **virtualenv**, **git**, and **make** installed. ```bash # 1. clone your fork and cd into the repo directory git clone git@github.com:/pydantic.git cd pydantic # 2. Set up a virtualenv for running tests virtualenv -p `which python3.7` env source env/bin/activate # (or however you prefer to setup a python environment, 3.6 will work too) # 3. Install pydantic, dependencies and test dependencies make install # 4. Checkout a new branch and make your changes git checkout -b my-new-feature-branch # make your changes... # 5. Fix formatting and imports make format # Pydantic uses black to enforce formatting and isort to fix imports # (https://github.com/ambv/black, https://github.com/timothycrosley/isort) # 6. Run tests and linting make # there are a few sub-commands in Makefile like `test`, `testcov` and `lint` # which you might want to use, but generally just `make` should be all you need # 7. Build documentation make docs # if you have changed the documentation make sure it builds successfully # you can also use `make docs-serve` to serve the documentation at localhost:8000 # ... commit, push, and create your pull request ``` **tl;dr**: use `make format` to fix formatting, `make` to run tests and linting & `make docs` to build the docs. pydantic-1.2/docs/examples/000077500000000000000000000000001357000400300157255ustar00rootroot00000000000000pydantic-1.2/docs/examples/.editorconfig000066400000000000000000000001161357000400300204000ustar00rootroot00000000000000root = true [*.py] indent_style = space indent_size = 4 max_line_length = 80 pydantic-1.2/docs/examples/dataclasses_default_schema.py000066400000000000000000000004331357000400300236120ustar00rootroot00000000000000import dataclasses from typing import List from pydantic.dataclasses import dataclass @dataclass class User: id: int name: str = 'John Doe' friends: List[int] = dataclasses.field(default_factory=lambda: [0]) user = User(id='42') print(user.__pydantic_model__.schema()) pydantic-1.2/docs/examples/dataclasses_initvars.py000066400000000000000000000011241357000400300225030ustar00rootroot00000000000000from dataclasses import InitVar from pathlib import Path from typing import Optional from pydantic.dataclasses import dataclass @dataclass class PathData: path: Path base_path: InitVar[Optional[Path]] def __post_init__(self, base_path): print(f"Received path={self.path!r}, base_path={base_path!r}") def __post_init_post_parse__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathData('world', base_path="/hello") # Received path='world', base_path='/hello' assert path_data.path == Path('/hello/world') pydantic-1.2/docs/examples/dataclasses_main.py000066400000000000000000000003471357000400300215760ustar00rootroot00000000000000from datetime import datetime from pydantic.dataclasses import dataclass @dataclass class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id='42', signup_ts='2032-06-21T12:00') print(user) pydantic-1.2/docs/examples/dataclasses_nested.py000066400000000000000000000003531357000400300221310ustar00rootroot00000000000000from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class NavbarButton: href: AnyUrl @dataclass class Navbar: button: NavbarButton navbar = Navbar(button=('https://example.com',)) print(navbar) pydantic-1.2/docs/examples/dataclasses_post_init_post_parse.py000066400000000000000000000005601357000400300251160ustar00rootroot00000000000000from datetime import datetime from pydantic.dataclasses import dataclass @dataclass class Birth: year: int month: int day: int @dataclass class User: birth: Birth def __post_init__(self): print(self.birth) def __post_init_post_parse__(self): print(self.birth) user = User(**{'birth': {'year': 1995, 'month': 3, 'day': 2}}) pydantic-1.2/docs/examples/devtools_main.py000066400000000000000000000011341357000400300211410ustar00rootroot00000000000000# no-print-intercept from datetime import datetime from typing import List from pydantic import BaseModel from devtools import debug class Address(BaseModel): street: str country: str lat: float lng: float class User(BaseModel): id: int name: str signup_ts: datetime friends: List[int] address: Address user = User( id='123', name='John Doe', signup_ts='2019-06-01 12:22', friends=[1234, 4567, 7890], address=dict(street='Testing', country='uk', lat=51.5, lng=0) ) debug(user) print('\nshould be much easier read than:\n') print('user:', user) pydantic-1.2/docs/examples/exporting_models_copy.py000066400000000000000000000010121357000400300227050ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) print(m.copy(include={'foo', 'bar'})) print(m.copy(exclude={'foo', 'bar'})) print(m.copy(update={'banana': 0})) print(id(m.bar), id(m.copy().bar)) # normal copy gives the same object reference for `bar` print(id(m.bar), id(m.copy(deep=True).bar)) # deep copy gives a new object reference for `bar` pydantic-1.2/docs/examples/exporting_models_dict.py000066400000000000000000000005241357000400300226650ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) # returns a dictionary: print(m.dict()) print(m.dict(include={'foo', 'bar'})) print(m.dict(exclude={'foo', 'bar'})) pydantic-1.2/docs/examples/exporting_models_exclude1.py000066400000000000000000000010501357000400300234470ustar00rootroot00000000000000from pydantic import BaseModel, SecretStr class User(BaseModel): id: int username: str password: SecretStr class Transaction(BaseModel): id: str user: User value: int t = Transaction( id="1234567890", user=User( id=42, username="JohnDoe", password="hashedpassword" ), value=9876543210 ) # using a set: print(t.dict(exclude={'user', 'value'})) # using a dict: print(t.dict(exclude={'user': {'username', 'password'}, 'value': ...})) print(t.dict(include={'id': ..., 'user': {'id'}})) pydantic-1.2/docs/examples/exporting_models_exclude2.py000066400000000000000000000025521357000400300234600ustar00rootroot00000000000000import datetime from typing import List from pydantic import BaseModel, SecretStr class Country(BaseModel): name: str phone_code: int class Address(BaseModel): post_code: int country: Country class CardDetails(BaseModel): number: SecretStr expires: datetime.date class Hobby(BaseModel): name: str info: str class User(BaseModel): first_name: str second_name: str address: Address card_details: CardDetails hobbies: List[Hobby] user = User( first_name='John', second_name='Doe', address=Address( post_code=123456, country=Country( name='USA', phone_code=1 ) ), card_details=CardDetails( number=4212934504460000, expires=datetime.date(2020, 5, 1) ), hobbies=[ Hobby(name='Programming', info='Writing code and stuff'), Hobby(name='Gaming', info='Hell Yeah!!!') ] ) exclude_keys = { 'second_name': ..., 'address': {'post_code': ..., 'country': {'phone_code'}}, 'card_details': ..., # You can exclude values from tuples and lists by indexes 'hobbies': {-1: {'info'}}, } include_keys = { 'first_name': ..., 'address': {'country': {'name'}}, 'hobbies': {0: ..., -1: {'name'}} } # would be the same as user.dict(exclude=exclude_keys) in this case: print(user.dict(include=include_keys)) pydantic-1.2/docs/examples/exporting_models_iterate.py000066400000000000000000000004441357000400300234000ustar00rootroot00000000000000from pydantic import BaseModel class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): banana: float foo: str bar: BarModel m = FooBarModel(banana=3.14, foo='hello', bar={'whatever': 123}) print(dict(m)) for name, value in m: print(f'{name}: {value}') pydantic-1.2/docs/examples/exporting_models_json.py000066400000000000000000000012171357000400300227130ustar00rootroot00000000000000from datetime import datetime, timedelta from pydantic import BaseModel from pydantic.json import timedelta_isoformat class BarModel(BaseModel): whatever: int class FooBarModel(BaseModel): foo: datetime bar: BarModel m = FooBarModel(foo=datetime(2032, 6, 1, 12, 13, 14), bar={'whatever': 123}) print(m.json()) # (returns a str) class WithCustomEncoders(BaseModel): dt: datetime diff: timedelta class Config: json_encoders = { datetime: lambda v: v.timestamp(), timedelta: timedelta_isoformat, } m = WithCustomEncoders(dt=datetime(2032, 6, 1), diff=timedelta(hours=100)) print(m.json()) pydantic-1.2/docs/examples/exporting_models_orjson.py000066400000000000000000000010041357000400300232460ustar00rootroot00000000000000from datetime import datetime import orjson from pydantic import BaseModel def orjson_dumps(v, *, default): # orjson.dumps returns bytes, to match standard json.dumps we need to decode return orjson.dumps(v, default=default).decode() class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None class Config: json_loads = orjson.loads json_dumps = orjson_dumps user = User.parse_raw('{"id":123,"signup_ts":1234567890,"name":"John Doe"}') print(user.json()) pydantic-1.2/docs/examples/exporting_models_pickle.py000066400000000000000000000003231357000400300232060ustar00rootroot00000000000000import pickle from pydantic import BaseModel class FooBarModel(BaseModel): a: str b: int m = FooBarModel(a='hello', b=123) print(m) data = pickle.dumps(m) print(data) m2 = pickle.loads(data) print(m2) pydantic-1.2/docs/examples/exporting_models_ujson.py000066400000000000000000000004621357000400300231010ustar00rootroot00000000000000from datetime import datetime import ujson from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None class Config: json_loads = ujson.loads user = User.parse_raw('{"id": 123,"signup_ts":1234567890,"name":"John Doe"}') print(user) pydantic-1.2/docs/examples/index_error.py000066400000000000000000000003201357000400300206120ustar00rootroot00000000000000# output-json from index_main import User # ignore-above from pydantic import ValidationError try: User(signup_ts='broken', friends=[1, 2, 'not number']) except ValidationError as e: print(e.json()) pydantic-1.2/docs/examples/index_main.py000066400000000000000000000006371357000400300204200ustar00rootroot00000000000000from datetime import datetime from typing import List from pydantic import BaseModel class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None friends: List[int] = [] external_data = { 'id': '123', 'signup_ts': '2019-06-01 12:22', 'friends': [1, 2, '3'] } user = User(**external_data) print(user.id) print(repr(user.signup_ts)) print(user.friends) print(user.dict()) pydantic-1.2/docs/examples/model_config_alias_generator.py000066400000000000000000000005511357000400300241440ustar00rootroot00000000000000from pydantic import BaseModel def to_camel(string: str) -> str: return ''.join(word.capitalize() for word in string.split('_')) class Voice(BaseModel): name: str language_code: str class Config: alias_generator = to_camel voice = Voice(Name='Filiz', LanguageCode='tr-TR') print(voice.language_code) print(voice.dict(by_alias=True)) pydantic-1.2/docs/examples/model_config_alias_precedence.py000066400000000000000000000010401357000400300242450ustar00rootroot00000000000000from pydantic import BaseModel class Voice(BaseModel): name: str language_code: str class Config: @classmethod def alias_generator(cls, string: str) -> str: # this is the same as `alias_generator = to_camel` above return ''.join(word.capitalize() for word in string.split('_')) class Character(Voice): mood: str class Config: fields = {'mood': 'Mood', 'language_code': 'lang'} c = Character(Mood='happy', Name='Filiz', lang='tr-TR') print(c) print(c.dict(by_alias=True)) pydantic-1.2/docs/examples/model_config_dataclass.py000066400000000000000000000010151357000400300227400ustar00rootroot00000000000000from datetime import datetime from pydantic import ValidationError from pydantic.dataclasses import dataclass class MyConfig: max_anystr_length = 10 validate_assignment = True error_msg_templates = { 'value_error.any_str.max_length': 'max_length:{limit_value}', } @dataclass(config=MyConfig) class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id='42', signup_ts='2032-06-21T12:00') try: user.name = 'x' * 20 except ValidationError as e: print(e) pydantic-1.2/docs/examples/model_config_main.py000066400000000000000000000005001357000400300217230ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class Model(BaseModel): v: str class Config: max_anystr_length = 10 error_msg_templates = { 'value_error.any_str.max_length': 'max_length:{limit_value}', } try: Model(v='x' * 20) except ValidationError as e: print(e) pydantic-1.2/docs/examples/models_abc.py000066400000000000000000000002601357000400300203650ustar00rootroot00000000000000import abc from pydantic import BaseModel class FooBarModel(BaseModel, abc.ABC): a: str b: int @abc.abstractmethod def my_abstract_method(self): pass pydantic-1.2/docs/examples/models_construct.py000066400000000000000000000012701357000400300216660ustar00rootroot00000000000000from pydantic import BaseModel class User(BaseModel): id: int age: int name: str = 'John Doe' original_user = User(id=123, age=32) user_data = original_user.dict() print(user_data) fields_set = original_user.__fields_set__ print(fields_set) # ... # pass user_data and fields_set to RPC or save to the database etc. # ... # you can then create a new instance of User without # re-running validation which would be unnecessary at this point: new_user = User.construct(_fields_set=fields_set, **user_data) print(repr(new_user)) print(new_user.__fields_set__) # construct can be dangerous, only use it with validated data!: bad_user = User.construct(id='dog') print(repr(bad_user)) pydantic-1.2/docs/examples/models_custom_root_field.py000066400000000000000000000005511357000400300233630ustar00rootroot00000000000000from typing import List import json from pydantic import BaseModel from pydantic.schema import schema class Pets(BaseModel): __root__: List[str] print(Pets(__root__=['dog', 'cat'])) print(Pets(__root__=['dog', 'cat']).json()) print(Pets.parse_obj(['dog', 'cat'])) print(Pets.schema()) pets_schema = schema([Pets]) print(json.dumps(pets_schema, indent=2)) pydantic-1.2/docs/examples/models_custom_root_field_parse_obj.py000066400000000000000000000007321357000400300254100ustar00rootroot00000000000000from typing import List, Dict from pydantic import BaseModel, ValidationError class Pets(BaseModel): __root__: List[str] print(Pets.parse_obj(['dog', 'cat'])) print(Pets.parse_obj({'__root__': ['dog', 'cat']})) # not recommended class PetsByName(BaseModel): __root__: Dict[str, str] print(PetsByName.parse_obj({'Otis': 'dog', 'Milo': 'cat'})) try: PetsByName.parse_obj({'__root__': {'Otis': 'dog', 'Milo': 'cat'}}) except ValidationError as e: print(e) pydantic-1.2/docs/examples/models_data_conversion.py000066400000000000000000000002151357000400300230160ustar00rootroot00000000000000from pydantic import BaseModel class Model(BaseModel): a: int b: float c: str print(Model(a=3.1415, b=' 2.72 ', c=123).dict()) pydantic-1.2/docs/examples/models_dynamic_creation.py000066400000000000000000000003041357000400300231470ustar00rootroot00000000000000from pydantic import BaseModel, create_model DynamicFoobarModel = create_model('DynamicFoobarModel', foo=(str, ...), bar=123) class StaticFoobarModel(BaseModel): foo: str bar: int = 123 pydantic-1.2/docs/examples/models_dynamic_inheritance.py000066400000000000000000000004071357000400300236400ustar00rootroot00000000000000from pydantic import BaseModel, create_model class FooModel(BaseModel): foo: str bar: int = 123 BarModel = create_model( 'BarModel', apple='russet', banana='yellow', __base__=FooModel, ) print(BarModel) print(BarModel.__fields__.keys()) pydantic-1.2/docs/examples/models_errors1.py000066400000000000000000000011211357000400300212320ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, ValidationError, conint class Location(BaseModel): lat = 0.1 lng = 10.1 class Model(BaseModel): is_required: float gt_int: conint(gt=42) list_of_ints: List[int] = None a_float: float = None recursive_model: Location = None data = dict( list_of_ints=['1', 2, 'bad'], a_float='not a float', recursive_model={'lat': 4.2, 'lng': 'New York'}, gt_int=21, ) try: Model(**data) except ValidationError as e: print(e) try: Model(**data) except ValidationError as e: print(e.json()) pydantic-1.2/docs/examples/models_errors2.py000066400000000000000000000005151357000400300212410ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, validator class Model(BaseModel): foo: str @validator('foo') def name_must_contain_space(cls, v): if v != 'bar': raise ValueError('value must be "bar"') return v try: Model(foo='ber') except ValidationError as e: print(e.errors()) pydantic-1.2/docs/examples/models_errors3.py000066400000000000000000000007241357000400300212440ustar00rootroot00000000000000from pydantic import BaseModel, PydanticValueError, ValidationError, validator class NotABarError(PydanticValueError): code = 'not_a_bar' msg_template = 'value is not "bar", got "{wrong_value}"' class Model(BaseModel): foo: str @validator('foo') def name_must_contain_space(cls, v): if v != 'bar': raise NotABarError(wrong_value=v) return v try: Model(foo='ber') except ValidationError as e: print(e.json()) pydantic-1.2/docs/examples/models_field_order.py000066400000000000000000000005441357000400300221230ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class Model(BaseModel): a: int b = 2 c: int = 1 d = 0 e: float print(Model.__fields__.keys()) m = Model(e=2, a=1) print(m.dict()) try: Model(a='x', b='x', c='x', d='x', e='x') except ValidationError as e: error_locations = [e['loc'] for e in e.errors()] print(error_locations) pydantic-1.2/docs/examples/models_generics.py000066400000000000000000000021361357000400300214430ustar00rootroot00000000000000from typing import Generic, TypeVar, Optional, List from pydantic import BaseModel, validator, ValidationError from pydantic.generics import GenericModel DataT = TypeVar('DataT') class Error(BaseModel): code: int message: str class DataModel(BaseModel): numbers: List[int] people: List[str] class Response(GenericModel, Generic[DataT]): data: Optional[DataT] error: Optional[Error] @validator('error', always=True) def check_consistency(cls, v, values): if v is not None and values['data'] is not None: raise ValueError('must not provide both data and error') if v is None and values.get('data') is None: raise ValueError('must provide data or error') return v data = DataModel(numbers=[1, 2, 3], people=[]) error = Error(code=404, message='Not found') print(Response[int](data=1)) print(Response[str](data='value')) print(Response[str](data='value').dict()) print(Response[DataModel](data=data).dict()) print(Response[DataModel](error=error).dict()) try: Response[int](data='value') except ValidationError as e: print(e) pydantic-1.2/docs/examples/models_generics_naming.py000066400000000000000000000006221357000400300227720ustar00rootroot00000000000000from typing import Generic, TypeVar, Type, Any, Tuple from pydantic.generics import GenericModel DataT = TypeVar('DataT') class Response(GenericModel, Generic[DataT]): data: DataT @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: return f'{params[0].__name__.title()}Response' print(Response[int](data=1)) print(Response[str](data='a')) pydantic-1.2/docs/examples/models_mutation.py000066400000000000000000000005171357000400300215050ustar00rootroot00000000000000from pydantic import BaseModel class FooBarModel(BaseModel): a: str b: dict class Config: allow_mutation = False foobar = FooBarModel(a='hello', b={'apple': 'pear'}) try: foobar.a = 'different' except TypeError as e: print(e) print(foobar.a) print(foobar.b) foobar.b['apple'] = 'grape' print(foobar.b) pydantic-1.2/docs/examples/models_orm_mode.py000066400000000000000000000016201357000400300214420ustar00rootroot00000000000000from typing import List from sqlalchemy import Column, Integer, String from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.ext.declarative import declarative_base from pydantic import BaseModel, constr Base = declarative_base() class CompanyOrm(Base): __tablename__ = 'companies' id = Column(Integer, primary_key=True, nullable=False) public_key = Column(String(20), index=True, nullable=False, unique=True) name = Column(String(63), unique=True) domains = Column(ARRAY(String(255))) class CompanyModel(BaseModel): id: int public_key: constr(max_length=20) name: constr(max_length=63) domains: List[constr(max_length=255)] class Config: orm_mode = True co_orm = CompanyOrm( id=123, public_key='foobar', name='Testing', domains=['example.com', 'foobar.com'] ) print(co_orm) co_model = CompanyModel.from_orm(co_orm) print(co_model) pydantic-1.2/docs/examples/models_orm_mode_recursive.py000066400000000000000000000014011357000400300235260ustar00rootroot00000000000000from typing import List from pydantic import BaseModel class PetCls: def __init__(self, *, name: str, species: str): self.name = name self.species = species class PersonCls: def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): self.name = name self.age = age self.pets = pets class Pet(BaseModel): name: str species: str class Config: orm_mode = True class Person(BaseModel): name: str age: float = None pets: List[Pet] class Config: orm_mode = True bones = PetCls(name='Bones', species='dog') orion = PetCls(name='Orion', species='cat') anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) anna_model = Person.from_orm(anna) print(anna_model) pydantic-1.2/docs/examples/models_parse.py000066400000000000000000000012151357000400300207530ustar00rootroot00000000000000import pickle from datetime import datetime from pydantic import BaseModel, ValidationError class User(BaseModel): id: int name = 'John Doe' signup_ts: datetime = None m = User.parse_obj({'id': 123, 'name': 'James'}) print(m) try: User.parse_obj(['not', 'a', 'dict']) except ValidationError as e: print(e) # assumes json as no content type passed m = User.parse_raw('{"id": 123, "name": "James"}') print(m) pickle_data = pickle.dumps({ 'id': 123, 'name': 'James', 'signup_ts': datetime(2017, 7, 14) }) m = User.parse_raw(pickle_data, content_type='application/pickle', allow_pickle=True) print(m) pydantic-1.2/docs/examples/models_recursive.py000066400000000000000000000005031357000400300216470ustar00rootroot00000000000000from typing import List from pydantic import BaseModel class Foo(BaseModel): count: int size: float = None class Bar(BaseModel): apple = 'x' banana = 'y' class Spam(BaseModel): foo: Foo bars: List[Bar] m = Spam(foo={'count': 4}, bars=[{'apple': 'x1'}, {'apple': 'x2'}]) print(m) print(m.dict()) pydantic-1.2/docs/examples/models_required_field_optional.py000066400000000000000000000004311357000400300245300ustar00rootroot00000000000000from typing import Optional from pydantic import BaseModel, Field, ValidationError class Model(BaseModel): a: Optional[int] b: Optional[int] = ... c: Optional[int] = Field(...) print(Model(b=1, c=2)) try: Model(a=1, b=2) except ValidationError as e: print(e) pydantic-1.2/docs/examples/mypy_main.py000066400000000000000000000006651357000400300203100ustar00rootroot00000000000000# dont-execute from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.middle_name) # not a model field! Model() # will raise a validation error for age and list_of_ints pydantic-1.2/docs/examples/parse_obj_as.py000066400000000000000000000005401357000400300207250ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, parse_obj_as class Item(BaseModel): id: int name: str # `item_data` could come from an API call, eg., via something like: # item_data = requests.get('https://my-api.com/items').json() item_data = [{'id': 1, 'name': 'My Item'}] items = parse_obj_as(List[Item], item_data) print(items) pydantic-1.2/docs/examples/postponed_annotations_broken.py000066400000000000000000000004311357000400300242650ustar00rootroot00000000000000from __future__ import annotations from pydantic import BaseModel def this_is_broken(): # List is defined inside the function so is not in the module's # global scope! from typing import List class Model(BaseModel): a: List[int] print(Model(a=(1, 2))) pydantic-1.2/docs/examples/postponed_annotations_forward_ref.py000066400000000000000000000003241357000400300253060ustar00rootroot00000000000000from typing import ForwardRef from pydantic import BaseModel Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None Foo.update_forward_refs() print(Foo()) print(Foo(b={'a': '321'})) pydantic-1.2/docs/examples/postponed_annotations_main.py000066400000000000000000000002411357000400300237300ustar00rootroot00000000000000from __future__ import annotations from typing import List from pydantic import BaseModel class Model(BaseModel): a: List[int] print(Model(a=('1', 2, 3))) pydantic-1.2/docs/examples/postponed_annotations_self_referencing_annotations.py000066400000000000000000000004071357000400300307250ustar00rootroot00000000000000from __future__ import annotations from pydantic import BaseModel class Foo(BaseModel): a: int = 123 #: The sibling of `Foo` is referenced directly by type sibling: Foo = None Foo.update_forward_refs() print(Foo()) print(Foo(sibling={'a': '321'})) pydantic-1.2/docs/examples/postponed_annotations_self_referencing_string.py000066400000000000000000000003371357000400300277000ustar00rootroot00000000000000from pydantic import BaseModel class Foo(BaseModel): a: int = 123 #: The sibling of `Foo` is referenced by string sibling: 'Foo' = None Foo.update_forward_refs() print(Foo()) print(Foo(sibling={'a': '321'})) pydantic-1.2/docs/examples/postponed_annotations_works.py000066400000000000000000000003551357000400300241570ustar00rootroot00000000000000from __future__ import annotations from typing import List # <-- List is defined in the module's global scope from pydantic import BaseModel def this_works(): class Model(BaseModel): a: List[int] print(Model(a=(1, 2))) pydantic-1.2/docs/examples/schema_custom.py000066400000000000000000000004701357000400300211320ustar00rootroot00000000000000# output-json import json from pydantic import BaseModel from pydantic.schema import schema class Foo(BaseModel): a: int class Model(BaseModel): a: Foo # Default location for OpenAPI top_level_schema = schema([Model], ref_prefix='#/components/schemas/') print(json.dumps(top_level_schema, indent=2)) pydantic-1.2/docs/examples/schema_main.py000066400000000000000000000013171357000400300205450ustar00rootroot00000000000000# output-json from enum import Enum from pydantic import BaseModel, Field class FooBar(BaseModel): count: int size: float = None class Gender(str, Enum): male = 'male' female = 'female' other = 'other' not_given = 'not_given' class MainModel(BaseModel): """ This is the description of the main model """ foo_bar: FooBar = Field(...) gender: Gender = Field(None, alias='Gender') snap: int = Field( 42, title='The Snap', description='this is the value of snap', gt=30, lt=50, ) class Config: title = 'Main' # this is equivilant of json.dumps(MainModel.schema(), indent=2): print(MainModel.schema_json(indent=2)) pydantic-1.2/docs/examples/schema_top_level.py000066400000000000000000000004661357000400300216160ustar00rootroot00000000000000# output-json import json from pydantic import BaseModel from pydantic.schema import schema class Foo(BaseModel): a: str = None class Model(BaseModel): b: Foo class Bar(BaseModel): c: int top_level_schema = schema([Model, Bar], title='My Schema') print(json.dumps(top_level_schema, indent=2)) pydantic-1.2/docs/examples/schema_unenforced_constraints.py000066400000000000000000000015071357000400300244010ustar00rootroot00000000000000from pydantic import BaseModel, Field, PositiveInt try: # this won't work since PositiveInt takes precedence over the # constraints defined in Field meaning they're ignored class Model(BaseModel): foo: PositiveInt = Field(..., lt=10) except ValueError as e: print(e) # but you can set the schema attribute directly: # (Note: here exclusiveMaximum will not be enforce) class Model(BaseModel): foo: PositiveInt = Field(..., exclusiveMaximum=10) print(Model.schema()) # if you find yourself needing this, an alternative is to declare # the constraints in Field (or you could use conint()) # here both constraints will be enforced: class Model(BaseModel): # Here both constraints will be applied and the schema # will be generated correctly foo: int = Field(..., gt=0, lt=10) print(Model.schema()) pydantic-1.2/docs/examples/schema_with_example.py000066400000000000000000000005201357000400300223020ustar00rootroot00000000000000# output-json from pydantic import BaseModel class Person(BaseModel): name: str age: int class Config: schema_extra = { 'examples': [ { 'name': 'John Doe', 'age': 25, } ] } print(Person.schema_json(indent=2)) pydantic-1.2/docs/examples/settings_case_sensitive.py000066400000000000000000000002171357000400300232230ustar00rootroot00000000000000from pydantic import BaseSettings class Settings(BaseSettings): redis_host = 'localhost' class Config: case_sensitive = True pydantic-1.2/docs/examples/settings_main.py000066400000000000000000000020011357000400300211340ustar00rootroot00000000000000from typing import Set from pydantic import ( BaseModel, BaseSettings, PyObject, RedisDsn, PostgresDsn, Field ) class SubModel(BaseModel): foo = 'bar' apple = 1 class Settings(BaseSettings): auth_key: str api_key: str = Field(..., env='my_api_key') redis_dsn: RedisDsn = 'redis://user:pass@localhost:6379/1' pg_dsn: PostgresDsn = 'postgres://user:pass@localhost:5432/foobar' special_function: PyObject = 'math.cos' # to override domains: # export my_prefix_domains='["foo.com", "bar.com"]' domains: Set[str] = set() # to override more_settings: # export my_prefix_more_settings='{"foo": "x", "apple": 1}' more_settings: SubModel = SubModel() class Config: env_prefix = 'my_prefix_' # defaults to no prefix, i.e. "" fields = { 'auth_key': { 'env': 'my_auth_key', }, 'redis_dsn': { 'env': ['service_redis_dsn', 'redis_url'] } } print(Settings().dict()) pydantic-1.2/docs/examples/types_bare_type.py000066400000000000000000000005101357000400300214710ustar00rootroot00000000000000from typing import Type from pydantic import BaseModel, ValidationError class Foo: pass class LenientSimpleModel(BaseModel): any_class_goes: Type LenientSimpleModel(any_class_goes=int) LenientSimpleModel(any_class_goes=Foo) try: LenientSimpleModel(any_class_goes=Foo()) except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_boolean.py000066400000000000000000000004101357000400300211350ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class BooleanModel(BaseModel): bool_value: bool print(BooleanModel(bool_value=False)) print(BooleanModel(bool_value='False')) try: BooleanModel(bool_value=[]) except ValidationError as e: print(str(e)) pydantic-1.2/docs/examples/types_bytesize.py000066400000000000000000000004331357000400300213610ustar00rootroot00000000000000from pydantic import BaseModel, ByteSize class MyModel(BaseModel): size: ByteSize print(MyModel(size=52000).size) print(MyModel(size='3000 KiB').size) m = MyModel(size='50 PB') print(m.size.human_readable()) print(m.size.human_readable(decimal=True)) print(m.size.to('TiB')) pydantic-1.2/docs/examples/types_callable.py000066400000000000000000000002351357000400300212620ustar00rootroot00000000000000from typing import Callable from pydantic import BaseModel class Foo(BaseModel): callback: Callable[[int], int] m = Foo(callback=lambda x: x) print(m) pydantic-1.2/docs/examples/types_choices.py000066400000000000000000000007141357000400300211420ustar00rootroot00000000000000from enum import Enum, IntEnum from pydantic import BaseModel, ValidationError class FruitEnum(str, Enum): pear = 'pear' banana = 'banana' class ToolEnum(IntEnum): spanner = 1 wrench = 2 class CookingModel(BaseModel): fruit: FruitEnum = FruitEnum.pear tool: ToolEnum = ToolEnum.spanner print(CookingModel()) print(CookingModel(tool=2, fruit='banana')) try: CookingModel(fruit='other') except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_color.py000066400000000000000000000006071357000400300206440ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError from pydantic.color import Color c = Color('ff00ff') print(c.as_named()) print(c.as_hex()) c2 = Color('green') print(c2.as_rgb_tuple()) print(c2.original()) print(repr(Color('hsl(180, 100%, 50%)'))) class Model(BaseModel): color: Color print(Model(color='purple')) try: Model(color='hello') except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_constrained.py000066400000000000000000000021601357000400300220330ustar00rootroot00000000000000from decimal import Decimal from pydantic import ( BaseModel, NegativeFloat, NegativeInt, PositiveFloat, PositiveInt, conbytes, condecimal, confloat, conint, conlist, constr, Field, ) class Model(BaseModel): short_bytes: conbytes(min_length=2, max_length=10) strip_bytes: conbytes(strip_whitespace=True) short_str: constr(min_length=2, max_length=10) regex_str: constr(regex='apple (pie|tart|sandwich)') strip_str: constr(strip_whitespace=True) big_int: conint(gt=1000, lt=1024) mod_int: conint(multiple_of=5) pos_int: PositiveInt neg_int: NegativeInt big_float: confloat(gt=1000, lt=1024) unit_interval: confloat(ge=0, le=1) mod_float: confloat(multiple_of=0.5) pos_float: PositiveFloat neg_float: NegativeFloat short_list: conlist(int, min_items=1, max_items=4) decimal_positive: condecimal(gt=0) decimal_negative: condecimal(lt=0) decimal_max_digits_and_places: condecimal(max_digits=2, decimal_places=2) mod_decimal: condecimal(multiple_of=Decimal('0.25')) bigger_int: int = Field(..., gt=10000) pydantic-1.2/docs/examples/types_custom_type.py000066400000000000000000000007201357000400300220750ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError class StrictStr(str): @classmethod def __get_validators__(cls): yield cls.validate @classmethod def validate(cls, v): if not isinstance(v, str): raise ValueError(f'strict string: str expected not {type(v)}') return v class Model(BaseModel): s: StrictStr print(Model(s='hello')) try: print(Model(s=123)) except ValidationError as e: print(e.json()) pydantic-1.2/docs/examples/types_dt.py000066400000000000000000000005211357000400300201300ustar00rootroot00000000000000from datetime import date, datetime, time, timedelta from pydantic import BaseModel class Model(BaseModel): d: date = None dt: datetime = None t: time = None td: timedelta = None m = Model( d=1966280412345.6789, dt='2032-04-23T10:20:30.400+02:30', t=time(4, 8, 16), td='P3DT12H30M5S' ) print(m.dict()) pydantic-1.2/docs/examples/types_iterables.py000066400000000000000000000021701357000400300214750ustar00rootroot00000000000000from typing import Dict, FrozenSet, List, Optional, Sequence, Set, Tuple, Union from pydantic import BaseModel class Model(BaseModel): simple_list: list = None list_of_ints: List[int] = None simple_tuple: tuple = None tuple_of_different_types: Tuple[int, float, str, bool] = None simple_dict: dict = None dict_str_float: Dict[str, float] = None simple_set: set = None set_bytes: Set[bytes] = None frozen_set: FrozenSet[int] = None str_or_bytes: Union[str, bytes] = None none_or_str: Optional[str] = None sequence_of_ints: Sequence[int] = None compound: Dict[Union[str, bytes], List[Set[int]]] = None print(Model(simple_list=['1', '2', '3']).simple_list) print(Model(list_of_ints=['1', '2', '3']).list_of_ints) print(Model(simple_dict={'a': 1, b'b': 2}).simple_dict) print(Model(dict_str_float={'a': 1, b'b': 2}).dict_str_float) print(Model(simple_tuple=[1, 2, 3, 4]).simple_tuple) print(Model(tuple_of_different_types=[4, 3, 2, 1]).tuple_of_different_types) print(Model(sequence_of_ints=[1, 2, 3, 4]).sequence_of_ints) print(Model(sequence_of_ints=(1, 2, 3, 4)).sequence_of_ints) pydantic-1.2/docs/examples/types_json_type.py000066400000000000000000000010471357000400300215370ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, Json, ValidationError class SimpleJsonModel(BaseModel): json_obj: Json class ComplexJsonModel(BaseModel): json_obj: Json[List[int]] print(SimpleJsonModel(json_obj='{"b": 1}')) print(ComplexJsonModel(json_obj='[1, 2, 3]')) try: ComplexJsonModel(json_obj=12) except ValidationError as e: print(e) try: ComplexJsonModel(json_obj='[a, b]') except ValidationError as e: print(e) try: ComplexJsonModel(json_obj='["a", "b"]') except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_literal1.py000066400000000000000000000004161357000400300212410ustar00rootroot00000000000000from typing_extensions import Literal from pydantic import BaseModel, ValidationError class Pie(BaseModel): flavor: Literal['apple', 'pumpkin'] Pie(flavor='apple') Pie(flavor='pumpkin') try: Pie(flavor='cherry') except ValidationError as e: print(str(e)) pydantic-1.2/docs/examples/types_literal2.py000066400000000000000000000011631357000400300212420ustar00rootroot00000000000000from typing import ClassVar, List, Union from typing_extensions import Literal from pydantic import BaseModel, ValidationError class Cake(BaseModel): kind: Literal['cake'] required_utensils: ClassVar[List[str]] = ['fork', 'knife'] class IceCream(BaseModel): kind: Literal['icecream'] required_utensils: ClassVar[List[str]] = ['spoon'] class Meal(BaseModel): dessert: Union[Cake, IceCream] print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) print(type(Meal(dessert={'kind': 'icecream'}).dessert).__name__) try: Meal(dessert={'kind': 'pie'}) except ValidationError as e: print(str(e)) pydantic-1.2/docs/examples/types_literal3.py000066400000000000000000000012561357000400300212460ustar00rootroot00000000000000from typing import Optional, Union from typing_extensions import Literal from pydantic import BaseModel class Dessert(BaseModel): kind: str class Pie(Dessert): kind: Literal['pie'] flavor: Optional[str] class ApplePie(Pie): flavor: Literal['apple'] class PumpkinPie(Pie): flavor: Literal['pumpkin'] class Meal(BaseModel): dessert: Union[ApplePie, PumpkinPie, Pie, Dessert] print(type(Meal(dessert={'kind': 'pie', 'flavor': 'apple'}).dessert).__name__) print(type(Meal(dessert={'kind': 'pie', 'flavor': 'pumpkin'}).dessert).__name__) print(type(Meal(dessert={'kind': 'pie'}).dessert).__name__) print(type(Meal(dessert={'kind': 'cake'}).dessert).__name__) pydantic-1.2/docs/examples/types_payment_card_number.py000066400000000000000000000013051357000400300235400ustar00rootroot00000000000000from datetime import date from pydantic import BaseModel from pydantic.types import PaymentCardBrand, PaymentCardNumber, constr class Card(BaseModel): name: constr(strip_whitespace=True, min_length=1) number: PaymentCardNumber exp: date @property def brand(self) -> PaymentCardBrand: return self.number.brand @property def expired(self) -> bool: return self.exp < date.today() card = Card( name='Georg Wilhelm Friedrich Hegel', number='4000000000000002', exp=date(2023, 9, 30) ) assert card.number.brand == PaymentCardBrand.visa assert card.number.bin == '400000' assert card.number.last4 == '0002' assert card.number.masked == '400000******0002' pydantic-1.2/docs/examples/types_secret_types.py000066400000000000000000000011201357000400300222260ustar00rootroot00000000000000from pydantic import BaseModel, SecretStr, SecretBytes, ValidationError class SimpleModel(BaseModel): password: SecretStr password_bytes: SecretBytes sm = SimpleModel(password='IAmSensitive', password_bytes=b'IAmSensitiveBytes') # Standard access methods will not display the secret print(sm) print(sm.password) print(sm.json()) # Use get_secret_value method to see the secret's content. print(sm.password.get_secret_value()) print(sm.password_bytes.get_secret_value()) try: SimpleModel(password=[1, 2, 3], password_bytes=[1, 2, 3]) except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_strict.py000066400000000000000000000012521357000400300210330ustar00rootroot00000000000000from pydantic import ( BaseModel, confloat, StrictBool, StrictInt, ValidationError ) class StrictIntModel(BaseModel): strict_int: StrictInt try: StrictIntModel(strict_int=3.14159) except ValidationError as e: print(e) class ConstrainedFloatModel(BaseModel): constrained_float: confloat(strict=True, ge=0.0) try: ConstrainedFloatModel(constrained_float=3) except ValidationError as e: print(e) try: ConstrainedFloatModel(constrained_float=-1.23) except ValidationError as e: print(e) class StrictBoolModel(BaseModel): strict_bool: StrictBool try: StrictBoolModel(strict_bool='False') except ValidationError as e: print(str(e)) pydantic-1.2/docs/examples/types_type.py000066400000000000000000000005721357000400300205100ustar00rootroot00000000000000from typing import Type from pydantic import BaseModel from pydantic import ValidationError class Foo: pass class Bar(Foo): pass class Other: pass class SimpleModel(BaseModel): just_subclasses: Type[Foo] SimpleModel(just_subclasses=Foo) SimpleModel(just_subclasses=Bar) try: SimpleModel(just_subclasses=Other) except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_typevar.py000066400000000000000000000006601357000400300212170ustar00rootroot00000000000000from typing import TypeVar from pydantic import BaseModel Foobar = TypeVar('Foobar') BoundFloat = TypeVar('BoundFloat', bound=float) IntStr = TypeVar('IntStr', int, str) class Model(BaseModel): a: Foobar # equivalent of ": Any" b: BoundFloat # equivalent of ": float" c: IntStr # equivalent of ": Union[int, str]" print(Model(a=[1], b=4.2, c='x')) # a may be None and is therefore optional print(Model(b=1, c=1)) pydantic-1.2/docs/examples/types_union_correct.py000066400000000000000000000004711357000400300223760ustar00rootroot00000000000000from uuid import UUID from typing import Union from pydantic import BaseModel class User(BaseModel): id: Union[UUID, int, str] name: str user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') user_03 = User(id=user_03_uuid, name='John Doe') print(user_03) print(user_03.id) print(user_03_uuid.int) pydantic-1.2/docs/examples/types_union_incorrect.py000066400000000000000000000007161357000400300227270ustar00rootroot00000000000000from uuid import UUID from typing import Union from pydantic import BaseModel class User(BaseModel): id: Union[int, str, UUID] name: str user_01 = User(id=123, name='John Doe') print(user_01) print(user_01.id) user_02 = User(id='1234', name='John Doe') print(user_02) print(user_02.id) user_03_uuid = UUID('cf57432e-809e-4353-adbd-9d5c0d733868') user_03 = User(id=user_03_uuid, name='John Doe') print(user_03) print(user_03.id) print(user_03_uuid.int) pydantic-1.2/docs/examples/types_url_properties.py000066400000000000000000000013241357000400300226010ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl, PostgresDsn, ValidationError, validator class MyModel(BaseModel): url: HttpUrl m = MyModel(url='http://www.example.com') # the repr() method for a url will display all properties of the url print(repr(m.url)) print(m.url.scheme) print(m.url.host) print(m.url.host_type) print(m.url.port) class MyDatabaseModel(BaseModel): db: PostgresDsn @validator('db') def check_db_name(cls, v): assert v.path and len(v.path) > 1, 'database must be provided' return v m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') print(m.db) try: MyDatabaseModel(db='postgres://user:pass@localhost:5432') except ValidationError as e: print(e) pydantic-1.2/docs/examples/types_url_punycode.py000066400000000000000000000003731357000400300222360ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl class MyModel(BaseModel): url: HttpUrl m1 = MyModel(url='http://puny£code.com') print(m1.url) print(m1.url.host_type) m2 = MyModel(url='https://www.аррӏе.com/') print(m2.url) print(m2.url.host_type) pydantic-1.2/docs/examples/types_urls.py000066400000000000000000000004761357000400300205170ustar00rootroot00000000000000from pydantic import BaseModel, HttpUrl, ValidationError class MyModel(BaseModel): url: HttpUrl m = MyModel(url='http://www.example.com') print(m.url) try: MyModel(url='ftp://invalid.url') except ValidationError as e: print(e) try: MyModel(url='not a url') except ValidationError as e: print(e) pydantic-1.2/docs/examples/validators_always.py000066400000000000000000000004461357000400300220330ustar00rootroot00000000000000from datetime import datetime from pydantic import BaseModel, validator class DemoModel(BaseModel): ts: datetime = None @validator('ts', pre=True, always=True) def set_ts_now(cls, v): return v or datetime.now() print(DemoModel()) print(DemoModel(ts='2017-11-08T14:00')) pydantic-1.2/docs/examples/validators_dataclass.py000066400000000000000000000005221357000400300224650ustar00rootroot00000000000000from datetime import datetime from pydantic import validator from pydantic.dataclasses import dataclass @dataclass class DemoDataclass: ts: datetime = None @validator('ts', pre=True, always=True) def set_ts_now(cls, v): return v or datetime.now() print(DemoDataclass()) print(DemoDataclass(ts='2017-11-08T14:00')) pydantic-1.2/docs/examples/validators_pre_item.py000066400000000000000000000024331357000400300223350ustar00rootroot00000000000000from typing import List from pydantic import BaseModel, ValidationError, validator class DemoModel(BaseModel): square_numbers: List[int] = [] cube_numbers: List[int] = [] # '*' is the same as 'cube_numbers', 'square_numbers' here: @validator('*', pre=True) def split_str(cls, v): if isinstance(v, str): return v.split('|') return v @validator('cube_numbers', 'square_numbers') def check_sum(cls, v): if sum(v) > 42: raise ValueError(f'sum of numbers greater than 42') return v @validator('square_numbers', each_item=True) def check_squares(cls, v): assert v ** 0.5 % 1 == 0, f'{v} is not a square number' return v @validator('cube_numbers', each_item=True) def check_cubes(cls, v): # 64 ** (1 / 3) == 3.9999999999999996 (!) # this is not a good way of checking cubes assert v ** (1 / 3) % 1 == 0, f'{v} is not a cubed number' return v print(DemoModel(square_numbers=[1, 4, 9])) print(DemoModel(square_numbers='1|4|16')) print(DemoModel(square_numbers=[16], cube_numbers=[8, 27])) try: DemoModel(square_numbers=[1, 4, 2]) except ValidationError as e: print(e) try: DemoModel(cube_numbers=[27, 27]) except ValidationError as e: print(e) pydantic-1.2/docs/examples/validators_root.py000066400000000000000000000017001357000400300215100ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, root_validator class UserModel(BaseModel): username: str password1: str password2: str @root_validator(pre=True) def check_card_number_omitted(cls, values): assert 'card_number' not in values, 'card_number should not be included' return values @root_validator def check_passwords_match(cls, values): pw1, pw2 = values.get('password1'), values.get('password2') if pw1 is not None and pw2 is not None and pw1 != pw2: raise ValueError('passwords do not match') return values print(UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn')) try: UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn2') except ValidationError as e: print(e) try: UserModel(username='scolvin', password1='zxcvbn', password2='zxcvbn', card_number='1234') except ValidationError as e: print(e) pydantic-1.2/docs/examples/validators_simple.py000066400000000000000000000016601357000400300220230ustar00rootroot00000000000000from pydantic import BaseModel, ValidationError, validator class UserModel(BaseModel): name: str username: str password1: str password2: str @validator('name') def name_must_contain_space(cls, v): if ' ' not in v: raise ValueError('must contain a space') return v.title() @validator('password2') def passwords_match(cls, v, values, **kwargs): if 'password1' in values and v != values['password1']: raise ValueError('passwords do not match') return v @validator('username') def username_alphanumeric(cls, v): assert v.isalpha(), 'must be alphanumeric' return v print(UserModel(name='samuel colvin', username='scolvin', password1='zxcvbn', password2='zxcvbn')) try: UserModel(name='samuel', username='scolvin', password1='zxcvbn', password2='zxcvbn2') except ValidationError as e: print(e) pydantic-1.2/docs/extra/000077500000000000000000000000001357000400300152325ustar00rootroot00000000000000pydantic-1.2/docs/extra/ad.css000066400000000000000000000004551357000400300163340ustar00rootroot00000000000000#bsa-cpc { border-radius: .1rem;; padding: 0.525rem 0.6rem; min-height: 80px; } @media screen and (max-width: 799px) { #bsa-cpc { display: none; } } #bsa-cpc.loaded { background: hsla(0, 0%, 92.5%, 0.5); } #_default_ a._default_, #_default_ .default-text { width: 100% !important; } pydantic-1.2/docs/extra/ad.js000066400000000000000000000012371357000400300161570ustar00rootroot00000000000000function main () { const ad_el = document.getElementById('bsa-cpc') if (!ad_el || innerWidth < 800) { // if no ad element or element hidden, don't load buysellads return } const script = document.createElement('script') script.onload = () => { if (_bsa.isMobile()) { // bsa doesn't show ads on mobile, hide th box ad_el.remove() return } _bsa.init('default', 'CK7ITKJU', 'placement:pydantic-docshelpmanualio', { target: '#bsa-cpc', align: 'horizontal', }) ad_el.classList.add('loaded') } script.src = 'https://m.servedby-buysellads.com/monetization.js' document.head.appendChild(script) } main() pydantic-1.2/docs/extra/redirects.js000066400000000000000000000122111357000400300175510ustar00rootroot00000000000000// redirects from the old sphinx docs site to the new // redirects have to be done like this since anchor fragments aren't sent by the browser so server-side redirects // wouldn't work const lookup = { 'install': '/install', 'usage': '/usage/models/', 'pep-484-types': '/usage/types/#typing-iterables', 'id1': '/usage/dataclasses/', 'nested-dataclasses': '/usage/dataclasses/#nested-dataclasses', 'initialize-hooks': '/usage/dataclasses/#initialize-hooks', 'choices': '/usage/types/#enums-and-choices', 'validators': '/usage/validators/', 'pre-and-per-item-validators': '/usage/validators/#pre-and-per-item-validators', 'pre-and-whole-validators': '/usage/validators/#pre-and-per-item-validators', 'validate-always': '/usage/validators/#validate-always', 'root-validators': '/usage/validators/#root-validators', 'id3': '/usage/validators/#root-validators', 'dataclass-validators': '/usage/validators/#dataclass-validators', 'field-checks': '/usage/validators/#field-checks', 'recursive-models': '/usage/models/#recursive-models', 'id4': '/usage/models/#recursive-models', 'self-referencing-models': '/usage/postponed_annotations/#self-referencing-models', 'self-ref-models': '/usage/postponed_annotations/#self-referencing-models', 'generic-models': '/usage/models/#generic-models', 'id5': '/usage/models/#generic-models', 'orm-mode-aka-arbitrary-class-instances': '/usage/models/#orm-mode-aka-arbitrary-class-instances', 'orm-mode': '/usage/models/#orm-mode-aka-arbitrary-class-instances', 'schema-creation': '/usage/schema/', 'schema': '/usage/schema/', 'error-handling': '/usage/models/#error-handling', 'datetime-types': '/usage/types/#datetime-types', 'exotic-types': '/usage/types/', 'booleans': '/usage/types/#booleans', 'strictbool': '/usage/types/#booleans', 'callable': '/usage/types/#callable', 'urls': '/usage/types/#urls', 'url-properties': '/usage/types/#url-properties', 'international-domains': '/usage/types/#international-domains', 'int-domains': '/usage/types/#international-domains', 'underscores-in-hostnames': '/usage/types/#underscores-in-hostnames', 'color-type': '/usage/types/#color-type', 'secret-types': '/usage/types/#secret-types', 'strict-types': '/usage/types/#strict-types', 'json-type': '/usage/types/#json-type', 'literal-type': '/usage/types/#literal-type', 'payment-card-numbers': '/usage/types/#payment-card-numbers', 'type-type': '/usage/types/#type', 'custom-data-types': '/usage/types/#custom-data-types', 'custom-root-types': '/usage/models/#custom-root-types', 'custom-root': '/usage/models/#custom-root-types', 'helper-functions': '/usage/models/#helper-functions', 'model-config': '/usage/model_config/', 'config': '/usage/model_config/', 'alias-generator': '/usage/model_config/#alias-generator', 'settings': '/usage/settings/', 'id6': '/usage/settings/', 'dynamic-model-creation': '/usage/models/#dynamic-model-creation', 'usage-with-mypy': '/usage/mypy/', 'usage-mypy': '/usage/mypy/', 'strict-optional': '/usage/mypy/#strict-optional', 'required-fields-and-mypy': '/usage/models/#required-fields', 'usage-mypy-required': '/usage/models/#required-fields', 'faux-immutability': '/usage/models/#faux-immutability', 'exporting-models': '/usage/exporting_models/', 'copying': '/usage/exporting_models/', 'serialisation': '/usage/exporting_models/', 'model-dict': '/usage/exporting_models/#modeldict', 'dict-model-and-iteration': '/usage/exporting_models/#dictmodel-and-iteration', 'model-copy': '/usage/exporting_models/#modelcopy', 'model-json': '/usage/exporting_models/#modeljson', 'json-dump': '/usage/exporting_models/#modeljson', 'pickle-dumps-model': '/usage/exporting_models/#pickledumpsmodel', 'pickle-serialisation': '/usage/exporting_models/#pickledumpsmodel', 'advanced-include-and-exclude': '/usage/exporting_models/#advanced-include-and-exclude', 'include-exclude': '/usage/exporting_models/#advanced-include-and-exclude', 'custom-json-de-serialisation': '/usage/exporting_models/#custom-json-deserialisation', 'json-encode-decode': '/usage/exporting_models/#custom-json-deserialisation', 'abstract-base-classes': '/usage/models/#abstract-base-classes', 'postponed-annotations': '/usage/postponed_annotations/', 'id7': '/usage/postponed_annotations/', 'id8': '/usage/postponed_annotations/', 'usage-of-union-in-annotations-and-type-order': '/usage/types/#unions', 'benchmarks': '/benchmarks/', 'benchmarks-tag': '/benchmarks/', 'contributing-to-pydantic': '/contributing/', 'pycharm-plugin': '/pycharm_plugin/', 'id9': '/pycharm_plugin/', 'history': '/changelog/', } function main() { const fragment = location.hash.substr(1) if (fragment === '' || location.pathname !== '/') { // no fragment or not called from root return } let new_url = lookup[fragment] if (!new_url) { if (!fragment.startsWith('v')) { return } // change the fragments for versions - sphinx replaces dots with a dash while mkdocs removes dots new_url = '/changelog/#' + fragment .replace(/(v\d)-(\d+)-(\d+-\d{4})/, '$1$2$3') .replace(/(v\d)-(\d+-\d{4})/, '$1$2') } window.location = new_url } main() pydantic-1.2/docs/extra/terminal.css000066400000000000000000000010231357000400300175530ustar00rootroot00000000000000.terminal { background: #300a24; border-radius: 4px; padding: 5px 10px; } pre.terminal-content { display: inline-block; line-height: 1.3 !important; white-space: pre-wrap; word-wrap: break-word; background: #300a24 !important; color: #d0d0d0 !important; } .ansi2 { font-weight: lighter; } .ansi3 { font-style: italic; } .ansi32 { color: #00aa00; } .ansi34 { color: #5656fe; } .ansi35 { color: #E850A8; } .ansi38-1 { color: #cf0000; } .ansi38-5 { color: #E850A8; } .ansi38-68 { color: #2a54a8; } pydantic-1.2/docs/favicon.png000066400000000000000000000015731357000400300162500ustar00rootroot00000000000000PNG  IHDR sgAMA a cHRMz&u0`:pQ<bKGD̿ pHYs ǠtIME % *IDATHǝ?hAO vh*(E,8TER\t̪K NvuK5K0(mRڀZC4{i<7{sdmĔiaφo&fR%w\q*Y仺cmҼuU &ÉbHhVó򶏅[c[^6@x{7;`Uv¤kl9_#`p/| .qgM7k񩝚%:fludxn<2Ü/ы(.o\"uXE^:%K=ތHI QW_7A-Qu]xr>ݡjlװXSZd-oeh T+HP8{eF6=s*|zDv'u.hN%-v Oö(o, im W̛Nnem>+l5?U:g%tEXtdate:create2019-10-07T17:37:10+02:00m%tEXtdate:modify2019-10-07T17:37:10+02:0004tEXtSoftwarewww.inkscape.org<WzTXtRaw profile type iptcx qV((OIR# .c #K D4d#T ˀHJ.tB5IENDB`pydantic-1.2/docs/index.md000066400000000000000000000140211357000400300155360ustar00rootroot00000000000000[![BuildStatus](https://travis-ci.org/samuelcolvin/pydantic.svg?branch=master)](https://travis-ci.org/samuelcolvin/pydantic) [![Coverage](https://codecov.io/gh/samuelcolvin/pydantic/branch/master/graph/badge.svg)](https://codecov.io/gh/samuelcolvin/pydantic) [![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) [![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) [![downloads](https://img.shields.io/pypi/dm/pydantic.svg)](https://pypistats.org/packages/pydantic) [![license](https://img.shields.io/github/license/samuelcolvin/pydantic.svg)](https://github.com/samuelcolvin/pydantic/blob/master/LICENSE) {!.version.md!} Data validation and settings management using python type annotations. *pydantic* enforces type hints at runtime, and provides user friendly errors when data is invalid. Define how data should be in pure, canonical python; validate it with *pydantic*. !!! note "Version 0.32 Documentation" This documentation refers to Version 1 of *pydantic* which has just been released, **v0.32.2** (the previous release) docs are available [here](https://5d584fcca7c9b70007d1c997--pydantic-docs.netlify.com). ## Example ```py {!.tmp_examples/index_main.py!} ``` _(This script is complete, it should run "as is")_ What's going on here: * `id` is of type int; the annotation-only declaration tells *pydantic* that this field is required. Strings, bytes or floats will be coerced to ints if possible; otherwise an exception will be raised. * `name` is inferred as a string from the provided default; because it has a default, it is not required. * `signup_ts` is a datetime field which is not required (and takes the value ``None`` if it's not supplied). *pydantic* will process either a unix timestamp int (e.g. `1496498400`) or a string representing the date & time. * `friends` uses python's typing system, and requires a list of inputs. As with `id`, integer-like objects will be converted to integers. If validation fails pydantic will raise an error with a breakdown of what was wrong: ```py {!.tmp_examples/index_error.py!} ``` outputs: ```json {!.tmp_examples/index_error.json!} ``` ## Rationale So *pydantic* uses some cool new language features, but why should I actually go and use it? **plays nicely with your IDE/linter/brain** : There's no new schema definition micro-language to learn. If you know how to use python type hints, you know how to use *pydantic*. Data structures are just instances of classes you define with type annotations, so auto-completion, linting, [mypy](usage/mypy.md), IDEs (especially [PyCharm](pycharm_plugin.md)), and your intuition should all work properly with your validated data. **dual use** : *pydantic's* [BaseSettings](usage/settings.md) class allows *pydantic* to be used in both a "validate this request data" context and in a "load my system settings" context. The main differences are that system settings can be read from environment variables, and more complex objects like DSNs and python objects are often required. **fast** : In [benchmarks](benchmarks.md) *pydantic* is faster than all other tested libraries. **validate complex structures** : use of [recursive *pydantic* models](usage/models.md#recursive-models), `typing`'s [standard types](usage/types.md#standard-library-types) (e.g. `List`, `Tuple`, `Dict` etc.) and [validators](usage/validators.md) allow complex data schemas to be clearly and easily defined, validated, and parsed. **extensible** : *pydantic* allows [custom data types](usage/types.md#custom-data-types) to be defined or you can extend validation with methods on a model decorated with the [`validator`](usage/validators.md) decorator. **dataclasses integration** : As well as `BaseModel`, *pydantic* provides a [`dataclass`](usage/dataclasses.md) decorator which creates (almost) vanilla python dataclasses with input data parsing and validation. ## Using Pydantic Hundreds of organisations and packages are using *pydantic*, including: [FastAPI](https://fastapi.tiangolo.com/) : a high performance API framework, easy to learn, fast to code and ready for production, based on *pydantic* and Starlette. [Project Jupyter](https://jupyter.org/) : developers of the Jupyter notebook are using *pydantic* [for subprojects](https://github.com/samuelcolvin/pydantic/issues/773). **Microsoft** : are using *pydantic* (via FastAPI) for [numerous services](https://github.com/tiangolo/fastapi/pull/26#issuecomment-463768795), some of which are "getting integrated into the core Windows product and some Office products." **Amazon Web Services** : are using *pydantic* in [gluon-ts](https://github.com/awslabs/gluon-ts), an open-source probabilistic time series modeling library. **The NSA** : are using *pydantic* in [WALKOFF](https://github.com/nsacyber/WALKOFF), an open-source automation framework. **Uber** : are using *pydantic* in [Ludwig](https://github.com/uber/ludwig), an an open-source TensorFlow wrapper. **Cuenca** : are a Mexican neobank that uses *pydantic* for several internal tools (including API validation) and for open source projects like [stpmex](https://github.com/cuenca-mx/stpmex-python), which is used to process real-time, 24/7, inter-bank transfers in Mexico. [The Molecular Sciences Software Institute](https://molssi.org) : are using *pydantic* in [QCFractal](https://github.com/MolSSI/QCFractal), a massively distributed compute framework for quantum chemistry. For a more comprehensive list of open-source projects using *pydantic* see the [list of dependents on github](https://github.com/samuelcolvin/pydantic/network/dependents). ## Testimonials [Python Bytes Podcast](https://pythonbytes.fm/episodes/show/157/oh-hai-pandas-hold-my-hand) : "*This is a sweet simple framework that solves some really nice problems... Data validations and settings management using python type annotations, and it's the python type annotations that makes me really extra happy... It works automatically with all the IDE's you already have.*" --Michael Kennedypydantic-1.2/docs/install.md000066400000000000000000000035201357000400300160770ustar00rootroot00000000000000Installation is as simple as: ```py pip install pydantic ``` *pydantic* has no required dependencies except python 3.6, 3.7, or 3.8 (and the dataclasses package in python 3.6). If you've got python 3.6+ and `pip` installed, you're good to go. Pydantic is also available on [conda](https://www.anaconda.com) under the [conda-forge](https://conda-forge.org) channel: ```bash conda install pydantic -c conda-forge ``` *pydantic* can optionally be compiled with [cython](https://cython.org/) which should give a 30-50% performance improvement. `manylinux` binaries exist for python 3.6, 3.7, and 3.8, so if you're installing from PyPI on linux, you should get a compiled version of *pydantic* with no extra work. If you're installing manually, install `cython` before installing *pydantic* and compilation should happen automatically. Compilation with cython [is not tested](https://github.com/samuelcolvin/pydantic/issues/555) on windows or mac. To test if *pydantic* is compiled run: ```py import pydantic print('compiled:', pydantic.compiled) ``` If you require email validation you can add [email-validator](https://github.com/JoshData/python-email-validator) as an optional dependency. Similarly, use of `Literal` prior to python 3.8 relies on [typing-extensions](https://pypi.org/project/typing-extensions/): ```bash pip install pydantic[email] # or pip install pydantic[typing_extensions] # or just pip install pydantic[email,typing_extensions] ``` Of course, you can also install these requirements manually with `pip install email-validator` and/or `pip install typing_extensions`. And if you prefer to install *pydantic* directly from the repository: ```bash pip install git+git://github.com/samuelcolvin/pydantic@master#egg=pydantic # or with extras pip install git+git://github.com/samuelcolvin/pydantic@master#egg=pydantic[email,typing_extensions] ``` pydantic-1.2/docs/logo-white.svg000066400000000000000000000011321357000400300167030ustar00rootroot00000000000000 pydantic-1.2/docs/mypy_plugin.md000066400000000000000000000142601357000400300170100ustar00rootroot00000000000000Pydantic works well with [mypy](http://mypy-lang.org/) right [out of the box](usage/mypy.md). However, Pydantic also ships with a mypy plugin that adds a number of important pydantic-specific features to mypy that improve its ability to type-check your code. For example, consider the following script: ```py {!.tmp_examples/mypy_main.py!} ``` Without any special configuration, mypy catches one of the errors (see [here](usage/mypy.md) for usage instructions): ``` 13: error: "Model" has no attribute "middle_name" ``` But [with the plugin enabled](#enabling-the-plugin), it catches both: ``` 13: error: "Model" has no attribute "middle_name" 16: error: Missing named argument "age" for "Model" 16: error: Missing named argument "list_of_ints" for "Model" ``` With the pydantic mypy plugin, you can fearlessly refactor your models knowing mypy will catch any mistakes if your field names or types change. There are other benefits too! See below for more details. ### Plugin Capabilities #### Generate a signature for `Model.__init__` * Any required fields that don't have dynamically-determined aliases will be included as required keyword arguments. * If `Config.allow_population_by_field_name=True`, the generated signature will use the field names, rather than aliases. * For subclasses of [`BaseSettings`](usage/settings.md), all fields are treated as optional since they may be read from the environment. * If `Config.extra="forbid"` and you don't make use of dynamically-determined aliases, the generated signature will not allow unexpected inputs. * **Optional:** If the [`init_forbid_extra` **plugin setting**](#plugin-settings) is set to `True`, unexpected inputs to `__init__` will raise errors even if `Config.extra` is not `"forbid"`. * **Optional:** If the [`init_typed` **plugin setting**](#plugin-settings) is set to `True`, the generated signature will use the types of the model fields (otherwise they will be annotated as `Any` to allow parsing). #### Generate a typed signature for `Model.construct` * The [`construct`](usage/models.md#creating-models-without-validation) method is a faster alternative to `__init__` when input data is known to be valid and does not need to be parsed. But because this method performs no runtime validation, static checking is important to detect errors. #### Respect `Config.allow_mutation` * If `Config.allow_mutation` is `False`, you'll get a mypy error if you try to change the value of a model field; cf. [faux immutability](usage/models.md#faux-immutability). #### Respect `Config.orm_mode` * If `Config.orm_mode` is `False`, you'll get a mypy error if you try to call `.from_orm()`; cf. [ORM mode](usage/models.md#orm-mode-aka-arbitrary-class-instances) #### Generate a signature for `dataclasses` * classes decorated with [`@pydantic.dataclasess.dataclass`](usage/dataclasses.md) are type checked the same as standard python dataclasses * The `@pydantic.dataclasess.dataclass` decorator accepts a `config` keyword argument which has the same meaning as [the `Config` sub-class](usage/model_config.md). ### Optional Capabilites: #### Prevent the use of required dynamic aliases * If the [`warn_required_dynamic_aliases` **plugin setting**](#plugin-settings) is set to `True`, you'll get a mypy error any time you use a dynamically-determined alias or alias generator on a model with `Config.allow_population_by_field_name=False`. * This is important because if such aliases are present, mypy cannot properly type check calls to `__init__`. In this case, it will default to treating all arguments as optional. #### Prevent the use of untyped fields * If the [`warn_untyped_fields` **plugin setting**](#plugin-settings) is set to `True`, you'll get a mypy error any time you create a field on a model without annotating its type. * This is important because non-annotated fields may result in [**validators being applied in a surprising order**](usage/models.md#field-ordering). * In addition, mypy may not be able to correctly infer the type of the field, and may miss checks or raise spurious errors. ### Enabling the Plugin To enable the plugin, just add `pydantic.mypy` to the list of plugins in your [mypy config file](https://mypy.readthedocs.io/en/latest/config_file.html) (this could be `mypy.ini` or `setup.cfg`). To get started, all you need to do is create a `mypy.ini` file with following contents: ```ini [mypy] plugins = pydantic.mypy ``` See the [mypy usage](usage/mypy.md) and [plugin configuration](#configuring-the-plugin) docs for more details. ### Plugin Settings The plugin offers a few optional strictness flags if you want even stronger checks: * `init_forbid_extra` If enabled, disallow extra arguments to the `__init__` call even when `Config.extra` is not `"forbid"`. * `init_typed` If enabled, include the field types as type hints in the generated signature for the `__init__` method. This means that you'll get mypy errors if you pass an argument that is not already the right type to `__init__`, even if parsing could safely convert the type. * `warn_required_dynamic_aliases` If enabled, raise a mypy error whenever a model is created for which calls to its `__init__` or `construct` methods require the use of aliases that cannot be statically determined. This is the case, for example, if `allow_population_by_field_name=False` and the model uses an alias generator. * `warn_untyped_fields` If enabled, raise a mypy error whenever a field is declared on a model without explicitly specifying its type. #### Configuring the Plugin To change the values of the plugin settings, create a section in your mypy config file called `[pydantic-mypy]`, and add any key-value pairs for settings you want to override. A `mypy.ini` file with all plugin strictness flags enabled (and some other mypy strictness flags, too) might look like: ```ini [mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True [pydantic-mypy] init_forbid_extra = True init_typed = True warn_required_dynamic_aliases = True warn_untyped_fields = True ``` pydantic-1.2/docs/pycharm_plugin.md000066400000000000000000000016671357000400300174640ustar00rootroot00000000000000While pydantic will work well with any IDE out of the box, a [PyCharm plugin](https://plugins.jetbrains.com/plugin/12861-pydantic) offering improved pydantic integration is available on the JetBrains Plugins Repository for PyCharm. You can install the plugin for free from the plugin marketplace (PyCharm's Preferences -> Plugin -> Marketplace -> search "pydantic"). The plugin currently supports the following features: * For `pydantic.BaseModel.__init__`: * Inspection * Autocompletion * Type-checking * For fields of `pydantic.BaseModel`: * Refactor-renaming fields updates `__init__` calls, and affects sub- and super-classes * Refactor-renaming `__init__` keyword arguments updates field names, and affects sub- and super-classes More information can be found on the [official plugin page](https://plugins.jetbrains.com/plugin/12861-pydantic) and [Github repository](https://github.com/koxudaxi/pydantic-pycharm-plugin). pydantic-1.2/docs/requirements.txt000066400000000000000000000003101357000400300173650ustar00rootroot00000000000000ansi2html==1.5.2 mkdocs==1.0.4 pygments==2.5.1 mkdocs-exclude==1.0.2 mkdocs-material==4.5.0 markdown-include==0.5.1 sqlalchemy # pyup: ignore orjson # pyup: ignore ujson # pyup: ignore pydantic-1.2/docs/theme/000077500000000000000000000000001357000400300152115ustar00rootroot00000000000000pydantic-1.2/docs/theme/main.html000066400000000000000000000002651357000400300170260ustar00rootroot00000000000000{% extends "base.html" %} {% block content %} {# no ad on the home page #} {% if not page.is_index %}
{% endif %} {{ super() }} {% endblock %} pydantic-1.2/docs/usage/000077500000000000000000000000001357000400300152135ustar00rootroot00000000000000pydantic-1.2/docs/usage/dataclasses.md000066400000000000000000000050451357000400300200300ustar00rootroot00000000000000If you don't want to use pydantic's `BaseModel` you can instead get the same data validation on standard [dataclasses](https://docs.python.org/3/library/dataclasses.html) (introduced in python 3.7). Dataclasses work in python 3.6 using the [dataclasses backport package](https://github.com/ericvsmith/dataclasses). ```py {!.tmp_examples/dataclasses_main.py!} ``` _(This script is complete, it should run "as is")_ !!! note Keep in mind that `pydantic.dataclasses.dataclass` is a drop-in replacement for `dataclasses.dataclass` with validation, **not** a replacement for `pydantic.BaseModel`. There are cases where subclassing `pydantic.BaseModel` is the better choice. For more information and discussion see [samuelcolvin/pydantic#710](https://github.com/samuelcolvin/pydantic/issues/710). You can use all the standard pydantic field types, and the resulting dataclass will be identical to the one created by the standard library `dataclass` decorator. The underlying model and its schema can be accessed through `__pydantic_model__`. Also, fields that require a `default_factory` can be specified by a `dataclasses.field`. ```py {!.tmp_examples/dataclasses_default_schema.py!} ``` _(This script is complete, it should run "as is")_ `pydantic.dataclasses.dataclass`'s arguments are the same as the standard decorator, except one extra keyword argument `config` which has the same meaning as [Config](model_config.md). !!! warning After v1.2, [The Mypy plugin](/mypy_plugin.md) must be installed to type check pydantic dataclasses. For more information about combining validators with dataclasses, see [dataclass validators](validators.md#dataclass-validators). ## Nested dataclasses Nested dataclasses are supported both in dataclasses and normal models. ```py {!.tmp_examples/dataclasses_nested.py!} ``` _(This script is complete, it should run "as is")_ Dataclasses attributes can be populated by tuples, dictionaries or instances of the dataclass itself. ## Initialize hooks When you initialize a dataclass, it is possible to execute code *after* validation with the help of `__post_init_post_parse__`. This is not the same as `__post_init__`, which executes code *before* validation. ```py {!.tmp_examples/dataclasses_post_init_post_parse.py!} ``` _(This script is complete, it should run "as is")_ Since version **v1.0**, any fields annotated with `dataclasses.InitVar` are passed to both `__post_init__` *and* `__post_init_post_parse__`. ```py {!.tmp_examples/dataclasses_initvars.py!} ``` _(This script is complete, it should run "as is")_ pydantic-1.2/docs/usage/devtools.md000066400000000000000000000013741357000400300174010ustar00rootroot00000000000000!!! note **Admission:** I (the primary developer of *pydantic*) also develop python-devtools. [python-devtools](https://python-devtools.helpmanual.io/) (`pip install devtools`) provides a number of tools which are useful during python development, including `debug()` an alternative to `print()` which formats output in a way which should be easier to read than `print` as well as giving information about which file/line the print statement is on and what value was printed. *pydantic* integrates with *devtools* by implementing the `__pretty__` method on most public classes. In particular `debug()` is useful when inspecting models: ```py {!.tmp_examples/devtools_main.py!} ``` Will output in your terminal: {!.tmp_examples/devtools_main.html!} pydantic-1.2/docs/usage/exporting_models.md000066400000000000000000000136401357000400300211230ustar00rootroot00000000000000As well as accessing model attributes directly via their names (e.g. `model.foobar`), models can be converted and exported in a number of ways: ## `model.dict(...)` This is the primary way of converting a model to a dictionary. Sub-models will be recursively converted to dictionaries. Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-exclude) * `by_alias`: whether field aliases should be used as keys in the returned dictionary; default `False` * `exclude_unset`: whether fields which were not explicitly set when creating the model should be excluded from the returned dictionary; default `False`. Prior to **v1.0**, `exclude_unset` was known as `skip_defaults`; use of `skip_defaults` is now deprecated * `exclude_defaults`: whether fields which are equal to their default values (whether set or otherwise) should be excluded from the returned dictionary; default `False` * `exclude_none`: whether fields which are equal to `None` should be excluded from the returned dictionary; default `False` Example: ```py {!.tmp_examples/exporting_models_dict.py!} ``` _(This script is complete, it should run "as is")_ ## `dict(model)` and iteration *pydantic* models can also be converted to dictionaries using `dict(model)`, and you can also iterate over a model's field using `for field_name, value in model:`. With this approach the raw field values are returned, so sub-models will not be converted to dictionaries. Example: ```py {!.tmp_examples/exporting_models_iterate.py!} ``` _(This script is complete, it should run "as is")_ ## `model.copy(...)` `copy()` allows models to be duplicated, which is particularly useful for immutable models. Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-exclude) * `update`: a dictionary of values to change when creating the copied model * `deep`: whether to make a deep copy of the new model; default `False` Example: ```py {!.tmp_examples/exporting_models_copy.py!} ``` _(This script is complete, it should run "as is")_ ## `model.json(...)` The `.json()` method will serialise a model to JSON. Typically, `.json()` in turn calls `.dict()` and serialises its result. (For models with a [custom root type](models.md#custom-root-types), after calling `.dict()`, only the value for the `__root__` key is serialised.) Serialisation can be customised on a model using the `json_encoders` config property; the keys should be types, and the values should be functions which serialise that type (see the example below). Arguments: * `include`: fields to include in the returned dictionary; see [below](#advanced-include-exclude) * `exclude`: fields to exclude from the returned dictionary; see [below](#advanced-include-exclude) * `by_alias`: whether field aliases should be used as keys in the returned dictionary; default `False` * `exclude_unset`: whether fields which were not set when creating the model and have their default values should be excluded from the returned dictionary; default `False`. Prior to **v1.0**, `exclude_unset` was known as `skip_defaults`; use of `skip_defaults` is now deprecated * `exclude_defaults`: whether fields which are equal to their default values (whether set or otherwise) should be excluded from the returned dictionary; default `False` * `exclude_none`: whether fields which are equal to `None` should be excluded from the returned dictionary; default `False` * `encoder`: a custom encoder function passed to the `default` argument of `json.dumps()`; defaults to a custom encoder designed to take care of all common types * `**dumps_kwargs`: any other keyword arguments are passed to `json.dumps()`, e.g. `indent`. Example: ```py {!.tmp_examples/exporting_models_json.py!} ``` _(This script is complete, it should run "as is")_ By default, `timedelta` is encoded as a simple float of total seconds. The `timedelta_isoformat` is provided as an optional alternative which implements ISO 8601 time diff encoding. See [below](#custom-json-deserialisation) for details on how to use other libraries for more performant JSON encoding and decoding. ## `pickle.dumps(model)` Using the same plumbing as `copy()`, *pydantic* models support efficient pickling and unpickling. ```py {!.tmp_examples/exporting_models_pickle.py!} ``` _(This script is complete, it should run "as is")_ ## Advanced include and exclude The `dict`, `json`, and `copy` methods support `include` and `exclude` arguments which can either be sets or dictionaries. This allows nested selection of which fields to export: ```py {!.tmp_examples/exporting_models_exclude1.py!} ``` The ellipsis (``...``) indicates that we want to exclude or include an entire key, just as if we included it in a set. Of course, the same can be done at any depth level: ```py {!.tmp_examples/exporting_models_exclude2.py!} ``` The same holds for the `json` and `copy` methods. ## Custom JSON (de)serialisation To improve the performance of encoding and decoding JSON, alternative JSON implementations (e.g. [ujson](https://pypi.python.org/pypi/ujson)) can be used via the `json_loads` and `json_dumps` properties of `Config`. ```py {!.tmp_examples/exporting_models_ujson.py!} ``` _(This script is complete, it should run "as is")_ `ujson` generally cannot be used to dump JSON since it doesn't support encoding of objects like datetimes and does not accept a `default` fallback function argument. To do this, you may use another library like [orjson](https://github.com/ijl/orjson). ```py {!.tmp_examples/exporting_models_orjson.py!} ``` _(This script is complete, it should run "as is")_ Note that `orjson` takes care of `datetime` encoding natively, making it faster than `json.dumps` but meaning you cannot always customise the encoding using `Config.json_encoders`. pydantic-1.2/docs/usage/model_config.md000066400000000000000000000100411357000400300201560ustar00rootroot00000000000000Behaviour of pydantic can be controlled via the `Config` class on a model. Options: **`title`** : the title for the generated JSON Schema **`anystr_strip_whitespace`** : whether to strip leading and trailing whitespace for str & byte types (default: `False`) **`min_anystr_length`** : the min length for str & byte types (default: `0`) **`max_anystr_length`** : the max length for str & byte types (default: `2 ** 16`) **`validate_all`** : whether to validate field defaults (default: `False`) **`extra`** : whether to ignore, allow, or forbid extra attributes during model initialization. Accepts the string values of `'ignore'`, `'allow'`, or `'forbid'`, or values of the `Extra` enum (default: `Extra.ignore`) **`allow_mutation`** : whether or not models are faux-immutable, i.e. whether `__setattr__` is allowed (default: `True`) **`use_enum_values`** : whether to populate models with the `value` property of enums, rather than the raw enum. This may be useful if you want to serialise `model.dict()` later (default: `False`) **`fields`** : a `dict` containing schema information for each field; this is equivalent to using [the schema](schema.md) class (default: `None`) **`validate_assignment`** : whether to perform validation on *assignment* to attributes (default: `False`) **`allow_population_by_field_name`** : whether an aliased field may be populated by its name as given by the model attribute, as well as the alias (default: `False`) !!! note The name of this configuration setting was changed in **v1.0** from `allow_population_by_alias` to `allow_population_by_field_name`. **`error_msg_templates`** : a `dict` used to override the default error message templates. Pass in a dictionary with keys matching the error messages you want to override (default: `{}`) **`arbitrary_types_allowed`** : whether to allow arbitrary user types for fields (they are validated simply by checking if the value is an instance of the type). If `False`, `RuntimeError` will be raised on model declaration (default: `False`) **`orm_mode`** : whether to allow usage of [ORM mode](models.md#orm-mode) **`getter_dict`** : a custom class (which should inherit from `GetterDict`) to use when decomposing ORM classes for validation, for use with `orm_mode` **`alias_generator`** : a callable that takes a field name and returns an alias for it **`keep_untouched`** : a tuple of types (e.g. descriptors) for a model's default values that should not be changed during model creation and will not be included in the model schemas. **Note**: this means that attributes on the model with *defaults of this type*, not *annotations of this type*, will be left alone. **`schema_extra`** : a `dict` used to extend/update the generated JSON Schema **`json_loads`** : a custom function for decoding JSON; see [custom JSON (de)serialisation](exporting_models.md#custom-json-deserialisation) **`json_dumps`** : a custom function for encoding JSON; see [custom JSON (de)serialisation](exporting_models.md#custom-json-deserialisation) **`json_encoders`** : a `dict` used to customise the way types are encoded to JSON; see [JSON Serialisation](exporting_models.md#modeljson) ```py {!.tmp_examples/model_config_main.py!} ``` _(This script is complete, it should run "as is")_ Similarly, if using the `@dataclass` decorator: ```py {!.tmp_examples/model_config_dataclass.py!} ``` _(This script is complete, it should run "as is")_ ## Alias Generator If data source field names do not match your code style (e. g. CamelCase fields), you can automatically generate aliases using `alias_generator`: ```py {!.tmp_examples/model_config_alias_generator.py!} ``` _(This script is complete, it should run "as is")_ ## Alias Precedence Aliases defined on the `Config` class of child models will take priority over any aliases defined on `Config` of a parent model: ```py {!.tmp_examples/model_config_alias_precedence.py!} ``` _(This script is complete, it should run "as is")_ This includes when a child model uses `alias_generator` where the aliases of all parent model fields will be updated. pydantic-1.2/docs/usage/models.md000066400000000000000000000430351357000400300170250ustar00rootroot00000000000000The primary means of defining objects in *pydantic* is via models (models are simply classes which inherit from `BaseModel`). You can think of models as similar to types in strictly typed languages, or as the requirements of a single endpoint in an API. Untrusted data can be passed to a model, and after parsing and validation *pydantic* guarantees that the fields of the resultant model instance will conform to the field types defined on the model. !!! note *pydantic* is primarily a parsing library, **not a validation library**. Validation is a means to an end: building a model which conforms to the types and constraints provided. In other words, *pydantic* guarantees the types and constraints of the output model, not the input data. This might sound like an esoteric distinction, but it is not. If you're unsure what this means or how it might effect your usage you should read the section about [Data Conversion](#data-conversion) below. ## Basic model usage ```py from pydantic import BaseModel class User(BaseModel): id: int name = 'Jane Doe' ``` `User` here is a model with two fields `id` which is an integer and is required, and `name` which is a string and is not required (it has a default value). The type of `name` is inferred from the default value, and so a type annotation is not required (however note [this](#field-ordering) warning about field order when some fields do not have type annotations). ```py user = User(id='123') ``` `user` here is an instance of `User`. Initialisation of the object will perform all parsing and validation, if no `ValidationError` is raised, you know the resulting model instance is valid. ```py assert user.id == 123 ``` fields of a model can be accessed as normal attributes of the user object the string '123' has been cast to an int as per the field type ```py assert user.name == 'Jane Doe' ``` name wasn't set when user was initialised, so it has the default value ```py assert user.__fields_set__ == {'id'} ``` the fields which were supplied when user was initialised: ```py assert user.dict() == dict(user) == {'id': 123, 'name': 'Jane Doe'} ``` either `.dict()` or `dict(user)` will provide a dict of fields, but `.dict()` can take numerous other arguments. ```py user.id = 321 assert user.id == 321 ``` This model is mutable so field values can be changed. ### Model properties The example above only shows the tip of the iceberg of what models can do. Models possess the following methods and attributes: `dict()` : returns a dictionary of the model's fields and values; cf. [exporting models](exporting_models.md#modeldict) `json()` : returns a JSON string representation `dict()`; cf. [exporting models](exporting_models.md#modeljson) `copy()` : returns a deep copy of the model; cf. [exporting models](exporting_models.md#modeldcopy) `parse_obj()` : a utility for loading any object into a model with error handling if the object is not a dictionary; cf. [helper functions](#helper-functions) `parse_raw()` : a utility for loading strings of numerous formats; cf. [helper functions](#helper-functions) `parse_file()` : like `parse_raw()` but for files; cf. [helper function](#helper-functions) `from_orm()` : loads data into a model from an arbitrary class; cf. [ORM mode](#orm-mode-aka-arbitrary-class-instances) `schema()` : returns a dictionary representing the model as JSON Schema; cf. [Schema](schema.md) `schema_json()` : returns a JSON string representation of `schema()`; cf. [Schema](schema.md) `construct()` : a class method for creating models without running validation; cf. [Creating models without validation](#creating-models-without-validation) `__fields_set__` : Set of names of fields which were set when the model instance was initialised `__fields__` : a dictionary of the model's fields `__config__` : the configuration class for the model, cf. [model config](model_config.md) ## Recursive Models More complex hierarchical data structures can be defined using models themselves as types in annotations. ```py {!.tmp_examples/models_recursive.py!} ``` _(This script is complete, it should run "as is")_ For self-referencing models, see [postponed annotations](postponed_annotations.md#self-referencing-models). ## ORM Mode (aka Arbitrary Class Instances) Pydantic models can be created from arbitrary class instances to support models that map to ORM objects. To do this: 1. The [Config](model_config.md) property `orm_mode` must be set to `True`. 2. The special constructor `from_orm` must be used to create the model instance. The example here uses SQLAlchemy, but the same approach should work for any ORM. ```py {!.tmp_examples/models_orm_mode.py!} ``` _(This script is complete, it should run "as is")_ ORM instances will be parsed with `from_orm` recursively as well as at the top level. Here a vanilla class is used to demonstrate the principle, but any ORM class could be used instead. ```py {!.tmp_examples/models_orm_mode_recursive.py!} ``` _(This script is complete, it should run "as is")_ Arbitrary classes are processed by *pydantic* using the `GetterDict` class (see [utils.py](https://github.com/samuelcolvin/pydantic/blob/master/pydantic/utils.py)), which attempts to provide a dictionary-like interface to any class. You can customise how this works by setting your own sub-class of `GetterDict` as the value of `Config.getter_dict` (see [config](model_config.md)). You can also customise class validation using [root_validators](validators.md#root-validators) with `pre=True`. In this case your validator function will be passed a `GetterDict` instance which you may copy and modify. ## Error Handling *pydantic* will raise `ValidationError` whenever it finds an error in the data it's validating. !!! note Validation code should not raise `ValidationError` itself, but rather raise `ValueError`, `TypeError` or `AssertionError` (or subclasses of `ValueError` or `TypeError`) which will be caught and used to populate `ValidationError`. One exception will be raised regardless of the number of errors found, that `ValidationError` will contain information about all the errors and how they happened. You can access these errors in a several ways: `e.errors()` : method will return list of errors found in the input data. `e.json()` : method will return a JSON representation of `errors`. `str(e)` : method will return a human readable representation of the errors. Each error object contains: `loc` : the error's location as a list. The first item in the list will be the field where the error occurred, and if the field is a [sub-model](models.md#recursive_models), subsequent items will be present to indicate the nested location of the error. `type` : a computer-readable identifier of the error type. `msg` : a human readable explanation of the error. `ctx` : an optional object which contains values required to render the error message. As a demonstration: ```py {!.tmp_examples/models_errors1.py!} ``` _(This script is complete, it should run "as is". `json()` has `indent=2` set by default, but I've tweaked the JSON here and below to make it slightly more concise.)_ ### Custom Errors In your custom data types or validators you should use `ValueError`, `TypeError` or `AssertionError` to raise errors. See [validators](validators.md) for more details on use of the `@validator` decorator. ```py {!.tmp_examples/models_errors2.py!} ``` _(This script is complete, it should run "as is")_ You can also define your own error classes, which can specify a custom error code, message template, and context: ```py {!.tmp_examples/models_errors3.py!} ``` _(This script is complete, it should run "as is")_ ## Helper Functions *Pydantic* provides three `classmethod` helper functions on models for parsing data: * **`parse_obj`**: this is very similar to the `__init__` method of the model, except it takes a dict rather than keyword arguments. If the object passed is not a dict a `ValidationError` will be raised. * **`parse_raw`**: this takes a *str* or *bytes* and parses it as *json*, then passes the result to `parse_obj`. Parsing *pickle* data is also supported by setting the `content_type` argument appropriately. * **`parse_file`**: this reads a file and passes the contents to `parse_raw`. If `content_type` is omitted, it is inferred from the file's extension. ```py {!.tmp_examples/models_parse.py!} ``` _(This script is complete, it should run "as is")_ !!! warning To quote the [official `pickle` docs](https://docs.python.org/3/library/pickle.html), "The pickle module is not secure against erroneous or maliciously constructed data. Never unpickle data received from an untrusted or unauthenticated source." !!! info Because it can result in arbitrary code execution, as a security measure, you need to explicitly pass `allow_pickle` to the parsing function in order to load `pickle` data. ### Creating models without validation *pydantic* also provides the `construct()` method which allows models to be created **without validation** this can be useful when data has already been validated or comes from a trusted source and you want to create a model as efficiently as possible (`construct()` is generally around 30x faster than creating a model with full validation). !!! warning `construct()` does not do any validation, meaning it can create models which are invalid. **You should only ever use the `construct()` method with data which has already been validated, or you trust.** ```py {!.tmp_examples/models_construct.py!} ``` _(This script is complete, it should run "as is")_ The `_fields_set` keyword argument to `construct()` is optional, but allows you to be more precise about which fields were originally set and which weren't. If it's omitted `__fields_set__` will just be the keys of the data provided. For example, in the example above, if `_fields_set` was not provided, `new_user.__fields_set__` would be `{'id', 'age', 'name'}`. ## Generic Models !!! note New in version **v0.29**. This feature requires Python 3.7+. Pydantic supports the creation of generic models to make it easier to reuse a common model structure. In order to declare a generic model, you perform the following steps: * Declare one or more `typing.TypeVar` instances to use to parameterize your model. * Declare a pydantic model that inherits from `pydantic.generics.GenericModel` and `typing.Generic`, where you pass the `TypeVar` instances as parameters to `typing.Generic`. * Use the `TypeVar` instances as annotations where you will want to replace them with other types or pydantic models. Here is an example using `GenericModel` to create an easily-reused HTTP response payload wrapper: ```py {!.tmp_examples/models_generics.py!} ``` _(This script is complete, it should run "as is")_ If you set `Config` or make use of `validator` in your generic model definition, it is applied to concrete subclasses in the same way as when inheriting from `BaseModel`. Any methods defined on your generic class will also be inherited. Pydantic's generics also integrate properly with mypy, so you get all the type checking you would expect mypy to provide if you were to declare the type without using `GenericModel`. !!! note Internally, pydantic uses `create_model` to generate a (cached) concrete `BaseModel` at runtime, so there is essentially zero overhead introduced by making use of `GenericModel`. If the name of the concrete subclasses is important, you can also override the default behavior: ```py {!.tmp_examples/models_generics_naming.py!} ``` _(This script is complete, it should run "as is")_ ## Dynamic model creation There are some occasions where the shape of a model is not known until runtime. For this *pydantic* provides the `create_model` method to allow models to be created on the fly. ```py {!.tmp_examples/models_dynamic_creation.py!} ``` Here `StaticFoobarModel` and `DynamicFoobarModel` are identical. Fields are defined by either a tuple of the form `(, )` or just a default value. The special key word arguments `__config__` and `__base__` can be used to customise the new model. This includes extending a base model with extra fields. ```py {!.tmp_examples/models_dynamic_inheritance.py!} ``` ## Custom Root Types Pydantic models can be defined with a custom root type by declaring the `__root__` field. The root type can be any type supported by pydantic, and is specified by the type hint on the `__root__` field. The root value can be passed to the model `__init__` via the `__root__` keyword argument, or as the first and only argument to `parse_obj`. ```py {!.tmp_examples/models_custom_root_field.py!} ``` If you call the `parse_obj` method for a model with a custom root type with a *dict* as the first argument, the following logic is used: * If the custom root type is a mapping type (eg., `Dict` or `Mapping`), the argument itself is always validated against the custom root type. * For other custom root types, if the dict has precisely one key with the value `__root__`, the corresponding value will be validated against the custom root type. * Otherwise, the dict itself is validated against the custom root type. This is demonstrated in the following example: ```py {!.tmp_examples/models_custom_root_field_parse_obj.py!} ``` !!! warning Calling the `parse_obj` method on a dict with the single key `"__root__"` for non-mapping custom root types is currently supported for backwards compatibility, but is not recommended and may be dropped in a future version. ## Faux Immutability Models can be configured to be immutable via `allow_mutation = False`. When this is set, attempting to change the values of instance attributes will raise errors. See [model config](model_config.md) for more details on `Config`. !!! warning Immutability in python is never strict. If developers are determined/stupid they can always modify a so-called "immutable" object. ```py {!.tmp_examples/models_mutation.py!} ``` Trying to change `a` caused an error, and `a` remains unchanged. However, the dict `b` is mutable, and the immutability of `foobar` doesn't stop `b` from being changed. ## Abstract Base Classes Pydantic models can be used alongside Python's [Abstract Base Classes](https://docs.python.org/3/library/abc.html) (ABCs). ```py {!.tmp_examples/models_abc.py!} ``` _(This script is complete, it should run "as is")_ ## Field Ordering Field order is important in models for the following reasons: * validation is performed in the order fields are defined; [fields validators](validators.md) can access the values of earlier fields, but not later ones * field order is preserved in the model [schema](schema.md) * field order is preserved in [validation errors](#error-handling) * field order is preserved by [`.dict()` and `.json()` etc.](exporting_models.md#modeldict) As of **v1.0** all fields with annotations (whether annotation-only or with a default value) will precede all fields without an annotation. Within their respective groups, fields remain in the order they were defined. ```py {!.tmp_examples/models_field_order.py!} ``` _(This script is complete, it should run "as is")_ !!! warning As demonstrated by the example above, combining the use of annotated and non-annotated fields in the same model can result in surprising field orderings. (This is due to limitations of python) Therefore, **we recommend adding type annotations to all fields**, even when a default value would determine the type by itself to guarentee field order is preserved. ## Required fields To declare a field as required, you may declare it using just an annotation, or you may use an ellipsis (`...`) as the value: ```py from pydantic import BaseModel, Field class Model(BaseModel): a: int b: int = ... c: int = Field(...) ``` _(This script is complete, it should run "as is")_ Where `Field` refers to the [field function](schema.md#field-customisation). Here `a`, `b` and `c` are all required. However, use of the ellipses in `b` will not work well with [mypy](mypy.md), and as of **v1.0** should be avoided in most cases. If you want to specify a field that can take a `None` value while still being required, you can use `Optional` with `...`: ```py {!.tmp_examples/models_required_field_optional.py!} ``` _(This script is complete, it should run "as is")_ In this model, `a`, `b`, and `c` can take `None` as a value. But `a` is optional, while `b` and `c` are required. `b` and `c` require a value, even if the value is `None`. ## Parsing data into a specified type Pydantic includes a standalone utility function `parse_obj_as` that can be used to apply the parsing logic used to populate pydantic models in a more ad-hoc way. This function behaves similarly to `BaseModel.parse_obj`, but works with arbitrary pydantic-compatible types. This is especially useful when you want to parse results into a type that is not a direct subclass of `BaseModel`. For example: ```py {!.tmp_examples/parse_obj_as.py!} ``` _(This script is complete, it should run "as is")_ This function is capable of parsing data into any of the types pydantic can handle as fields of a `BaseModel`. Pydantic also includes a similar standalone function called `parse_file_as`, which is analogous to `BaseModel.parse_file`. ## Data Conversion *pydantic* may cast input data to force it to conform to model field types, and in some cases this may result in a loss of information. For example: ```py {!.tmp_examples/models_data_conversion.py!} ``` _(This script is complete, it should run "as is")_ This is a deliberate decision of *pydantic*, and in general it's the most useful approach. See [here](https://github.com/samuelcolvin/pydantic/issues/578) for a longer discussion on the subject. pydantic-1.2/docs/usage/mypy.md000066400000000000000000000022551357000400300165370ustar00rootroot00000000000000Pydantic works with [mypy](http://mypy-lang.org/) provided you use the annotation-only version of required fields: ```py {!.tmp_examples/mypy_main.py!} ``` You can run your code through mypy with: ```bash mypy \ --ignore-missing-imports \ --follow-imports=skip \ --strict-optional \ pydantic_mypy_test.py ``` If you call mypy on the example code above, you should see mypy detect the attribute access error: ``` 13: error: "Model" has no attribute "middle_name" ``` ## Strict Optional For your code to pass with `--strict-optional`, you need to to use `Optional[]` or an alias of `Optional[]` for all fields with `None` as the default. (This is standard with mypy.) Pydantic provides a few useful optional or union types: * `NoneStr` aka. `Optional[str]` * `NoneBytes` aka. `Optional[bytes]` * `StrBytes` aka. `Union[str, bytes]` * `NoneStrBytes` aka. `Optional[StrBytes]` If these aren't sufficient you can of course define your own. ## Mypy Plugin Pydantic ships with a mypy plugin that adds a number of important pydantic-specific features to mypy that improve its ability to type-check your code. See the [pydantic mypy plugin docs](../mypy_plugin.md) for more details. pydantic-1.2/docs/usage/postponed_annotations.md000066400000000000000000000045621357000400300221740ustar00rootroot00000000000000!!! note Both postponed annotations via the future import and `ForwardRef` require python 3.7+. Postponed annotations (as described in [PEP563](https://www.python.org/dev/peps/pep-0563/)) "just work". ```py {!.tmp_examples/postponed_annotations_main.py!} ``` _(This script is complete, it should run "as is")_ Internally, *pydantic* will call a method similar to `typing.get_type_hints` to resolve annotations. In cases where the referenced type is not yet defined, `ForwardRef` can be used (although referencing the type directly or by its string is a simpler solution in the case of [self-referencing models](#self-referencing-models)). In some cases, a `ForwardRef` won't be able to be resolved during model creation. For example, this happens whenever a model references itself as a field type. When this happens, you'll need to call `update_forward_refs` after the model has been created before it can be used: ```py {!.tmp_examples/postponed_annotations_forward_ref.py!} ``` _(This script is complete, it should run "as is")_ !!! warning To resolve strings (type names) into annotations (types), *pydantic* needs a namespace dict in which to perform the lookup. For this it uses `module.__dict__`, just like `get_type_hints`. This means *pydantic* may not play well with types not defined in the global scope of a module. For example, this works fine: ```py {!.tmp_examples/postponed_annotations_works.py!} ``` While this will break: ```py {!.tmp_examples/postponed_annotations_broken.py!} ``` Resolving this is beyond the call for *pydantic*: either remove the future import or declare the types globally. ## Self-referencing Models Data structures with self-referencing models are also supported, provided the function `update_forward_refs()` is called once the model is created (you will be reminded with a friendly error message if you forget). Within the model, you can refer to the not-yet-constructed model using a string: ```py {!.tmp_examples/postponed_annotations_self_referencing_string.py!} ``` _(This script is complete, it should run "as is")_ Since `python 3.7`, you can also refer it by its type, provided you import `annotations` (see [above](postponed_annotations.md) for support depending on Python and *pydantic* versions). ```py {!.tmp_examples/postponed_annotations_self_referencing_annotations.py!} ``` _(This script is complete, it should run "as is")_ pydantic-1.2/docs/usage/schema.md000066400000000000000000000134231357000400300170000ustar00rootroot00000000000000*Pydantic* allows auto creation of JSON Schemas from models: ```py {!.tmp_examples/schema_main.py!} ``` _(This script is complete, it should run "as is")_ Outputs: ```json {!.tmp_examples/schema_main.json!} ``` The generated schemas are compliant with the specifications: [JSON Schema Core](https://json-schema.org/latest/json-schema-core.html), [JSON Schema Validation](https://json-schema.org/latest/json-schema-validation.html) and [OpenAPI](https://github.com/OAI/OpenAPI-Specification). `BaseModel.schema` will return a dict of the schema, while `BaseModel.schema_json` will return a JSON string representation of that dict. Sub-models used are added to the `definitions` JSON attribute and referenced, as per the spec. All sub-models' (and their sub-models') schemas are put directly in a top-level `definitions` JSON key for easy re-use and reference. "Sub-models" with modifications (via the `Field` class) like a custom title, description or default value, are recursively included instead of referenced. The `description` for models is taken from either the docstring of the class or the argument `description` to the `Field` class. The schema is generated by default using aliases as keys, but it can be generated using model property names instead by calling `MainModel.schema/schema_json(by_alias=False)`. ## Field customisation Optionally, the `Field` function can be used to provide extra information about the field and validations. It has the following arguments: * `default`: (a positional argument) the default value of the field. Since the `Field` replaces the field's default, this first argument can be used to set the default. Use ellipsis (`...`) to indicate the field is required. * `alias`: the public name of the field * `title`: if omitted, `field_name.title()` is used * `description`: if omitted and the annotation is a sub-model, the docstring of the sub-model will be used * `const`: this argument *must* have be the same as the field's default value if present * `gt`: for numeric values (``int``, `float`, `Decimal`), adds a validation of "greater than" and an annotation of `exclusiveMinimum` to the JSON Schema * `ge`: for numeric values, this adds a validation of "greater than or equal" and an annotation of `minimum` to the JSON Schema * `lt`: for numeric values, this adds a validation of "less than" and an annotation of `exclusiveMaximum` to the JSON Schema * `le`: for numeric values, this adds a validation of "less than or equal" and an annotation of `maximum` to the JSON Schema * `multiple_of`: for numeric values, this adds a validation of "a multiple of" and an annotation of `multipleOf` to the JSON Schema * `min_items`: for list values, this adds a corresponding validation and an annotation of `minItems` to the JSON Schema * `max_items`: for list values, this adds a corresponding validation and an annotation of `maxItems` to the JSON Schema * `min_length`: for string values, this adds a corresponding validation and an annotation of `minLength` to the JSON Schema * `max_length`: for string values, this adds a corresponding validation and an annotation of `maxLength` to the JSON Schema * `regex`: for string values, this adds a Regular Expression validation generated from the passed string and an annotation of `pattern` to the JSON Schema * `**` any other keyword arguments (e.g. `examples`) will be added verbatim to the field's schema Instead of using `Field`, the `fields` property of [the Config class](model_config.md) can be used to set all of the arguments above except `default`. ### Unenforced Field constraints If *pydantic* finds constraints which are not being enforced, an error will be raised. If you want to force the constraint to appear in the schema, even though it's not being checked upon parsing, you can use variadic arguments to `Field()` with the raw schema attribute name: ```py {!.tmp_examples/schema_unenforced_constraints.py!} ``` _(This script is complete, it should run "as is")_ ## JSON Schema Types Types, custom field types, and constraints (like `max_length`) are mapped to the corresponding spec formats in the following priority order (when there is an equivalent available): 1. [JSON Schema Core](http://json-schema.org/latest/json-schema-core.html#rfc.section.4.3.1) 2. [JSON Schema Validation](http://json-schema.org/latest/json-schema-validation.html) 3. [OpenAPI Data Types](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#data-types) 4. The standard `format` JSON field is used to define *pydantic* extensions for more complex `string` sub-types. The field schema mapping from Python / *pydantic* to JSON Schema is done as follows: {!.tmp_schema_mappings.html!} ## Top-level schema generation You can also generate a top-level JSON Schema that only includes a list of models and related sub-models in its `definitions`: ```py {!.tmp_examples/schema_top_level.py!} ``` _(This script is complete, it should run "as is")_ Outputs: ```json {!.tmp_examples/schema_top_level.json!} ``` ## Schema customization You can customize the generated `$ref` JSON location: the definitions are always stored under the key `definitions`, but a specified prefix can be used for the references. This is useful if you need to extend or modify the JSON Schema default definitions location. E.g. with OpenAPI: ```py {!.tmp_examples/schema_custom.py!} ``` _(This script is complete, it should run "as is")_ Outputs: ```json {!.tmp_examples/schema_custom.json!} ``` It's also possible to extend/override the generated JSON schema in a model. To do it, use the `Config` sub-class attribute `schema_extra`. For example, you could add `examples` to the JSON Schema: ```py {!.tmp_examples/schema_with_example.py!} ``` _(This script is complete, it should run "as is")_ Outputs: ```json {!.tmp_examples/schema_with_example.json!} ``` pydantic-1.2/docs/usage/settings.md000066400000000000000000000065331357000400300174040ustar00rootroot00000000000000One of pydantic's most useful applications is settings management. If you create a model that inherits from `BaseSettings`, the model initialiser will attempt to determine the values of any fields not passed as keyword arguments by reading from the environment. (Default values will still be used if the matching environment variable is not set.) This makes it easy to: * Create a clearly-defined, type-hinted application configuration class * Automatically read modifications to the configuration from environment variables * Manually override specific settings in the initialiser where desired (e.g. in unit tests) For example: ```py {!.tmp_examples/settings_main.py!} ``` _(This script is complete, it should run "as is")_ ## Environment variable names The following rules are used to determine which environment variable(s) are read for a given field: * By default, the environment variable name is built by concatenating the prefix and field name. * For example, to override `special_function` above, you could use: export my_prefix_special_function='foo.bar' * Note 1: The default prefix is an empty string. * Note 2: Field aliases are ignored when building the environment variable name. * Custom environment variable names can be set in two ways: * `Config.fields['field_name']['env']` (see `auth_key` and `redis_dsn` above) * `Field(..., env=...)` (see `api_key` above) * When specifying custom environment variable names, either a string or a list of strings may be provided. * When specifying a list of strings, order matters: the first detected value is used. * For example, for `redis_dsn` above, `service_redis_dsn` would take precedence over `redis_url`. !!! warning Since **v1.0** *pydantic* does not consider field aliases when finding environment variables to populate settings models, use `env` instead as described above. To aid the transition from aliases to `env`, a warning will be raised when aliases are used on settings models without a custom env var name. If you really mean to use aliases, either ignore the warning or set `env` to suppress it. Case-sensitivity can be turned on through the `Config`: ```py {!.tmp_examples/settings_case_sensitive.py!} ``` When `case_sensitive` is `True`, the environment variable must be in all-caps, so in this example `redis_host` could only be modified via `export REDIS_HOST`. !!! note On Windows, python's `os` module always treats environment variables as case-insensitive, so the `case_sensitive` config setting will have no effect - settings will always be updated ignoring case. ## Parsing environment variable values For most simple field types (such as `int`, `float`, `str`, etc.), the environment variable value is parsed the same way it would be if passed directly to the initialiser (as a string). Complex types like `list`, `set`, `dict`, and sub-models are populated from the environment by treating the environment variable's value as a JSON-encoded string. ## Field value priority In the case where a value is specified for the same `Settings` field in multiple ways, the selected value is determined as follows (in descending order of priority): 1. Arguments passed to the `Settings` class initialiser. 2. Environment variables, e.g. `my_prefix_special_function` as described above. 3. The default field values for the `Settings` model. pydantic-1.2/docs/usage/types.md000066400000000000000000000576261357000400300167210ustar00rootroot00000000000000Where possible *pydantic* uses [standard library types](#standard-library-types) to define fields, thus smoothing the learning curve. For many useful applications, however, no standard library type exists, so *pydantic* implements [many commonly used types](#pydantic-types). If no existing type suits your purpose you can also implement your [own pydantic-compatible types](#custom-data-types) with custom properties and validation. ## Standard Library Types *pydantic* supports many common types from the python standard library. If you need stricter processing see [Strict Types](#strict-types); if you need to constrain the values allowed (e.g. to require a positive int) see [Constrained Types](#constrained-types). `bool` : see [Booleans](#booleans) below for details on how bools are validated and what values are permitted `int` : *pydantic* uses `int(v)` to coerce types to an `int`; see [this](models.md#data-conversion) warning on loss of information during data conversion `float` : similarly, `float(v)` is used to coerce values to floats `str` : strings are accepted as-is, `int` `float` and `Decimal` are coerced using `str(v)`, `bytes` and `bytearray` are converted using `v.decode()`, enums inheriting from `str` are converted using `v.value`, and all other types cause an error `bytes` : `bytes` are accepted as-is, `bytearray` is converted using `bytes(v)`, `str` are converted using `v.encode()`, and `int`, `float`, and `Decimal` are coerced using `str(v).encode()` `list` : allows `list`, `tuple`, `set`, `frozenset`, or generators and casts to a list; see `typing.List` below for sub-type constraints `tuple` : allows `list`, `tuple`, `set`, `frozenset`, or generators and casts to a tuple; see `typing.Tuple` below for sub-type constraints `dict` : `dict(v)` is used to attempt to convert a dictionary; see `typing.Dict` below for sub-type constraints `set` : allows `list`, `tuple`, `set`, `frozenset`, or generators and casts to a set; see `typing.Set` below for sub-type constraints `frozenset` : allows `list`, `tuple`, `set`, `frozenset`, or generators and casts to a frozen set; see `typing.FrozenSet` below for sub-type constraints `datetime.date` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.time` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.datetime` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `datetime.timedelta` : see [Datetime Types](#datetime-types) below for more detail on parsing and validation `typing.Any` : allows any value include `None`, thus an `Any` field is optional `typing.TypeVar` : constrains the values allowed based on `constraints` or `bound`, see [TypeVar](#typevar) `typing.Union` : see [Unions](#unions) below for more detail on parsing and validation `typing.Optional` : `Optional[x]` is simply short hand for `Union[x, None]`; see [Unions](#unions) below for more detail on parsing and validation and [Required Fields](models.md#required-fields) for details about required fields that can receive `None` as a value. `typing.List` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Tuple` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Dict` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Set` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.FrozenSet` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Sequence` : see [Typing Iterables](#typing-iterables) below for more detail on parsing and validation `typing.Type` : see [Type](#type) below for more detail on parsing and validation `typing.Callable` : see [Callable](#callable) below for more detail on parsing and validation `typing.Pattern` : will cause the input value to be passed to `re.compile(v)` to create a regex pattern `ipaddress.IPv4Address` : simply uses the type itself for validation by passing the value to `IPv4Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv4Interface` : simply uses the type itself for validation by passing the value to `IPv4Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv4Network` : simply uses the type itself for validation by passing the value to `IPv4Network(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Address` : simply uses the type itself for validation by passing the value to `IPv6Address(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Interface` : simply uses the type itself for validation by passing the value to `IPv6Interface(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `ipaddress.IPv6Network` : simply uses the type itself for validation by passing the value to `IPv6Network(v)`; see [Pydantic Types](#pydantic-types) for other custom IP address types `enum.Enum` : checks that the value is a valid member of the enum; see [Enums and Choices](#enums-and-choices) for more details `enum.IntEnum` : checks that the value is a valid member of the integer enum; see [Enums and Choices](#enums-and-choices) for more details `decimal.Decimal` : *pydantic* attempts to convert the value to a string, then passes the string to `Decimal(v)` `pathlib.Path` : simply uses the type itself for validation by passing the value to `Path(v)`; see [Pydantic Types](#pydantic-types) for other more strict path types `uuid.UUID` : strings and bytes (converted to strings) are passed to `UUID(v)`; see [Pydantic Types](#pydantic-types) for other stricter UUID types `ByteSize` : converts a bytes string with units to bytes ### Typing Iterables *pydantic* uses standard library `typing` types as defined in PEP 484 to define complex objects. ```py {!.tmp_examples/types_iterables.py!} ``` _(This script is complete, it should run "as is")_ ### Unions The `Union` type allows a model attribute to accept different types, e.g.: !!! warning This script is complete, it should run "as is". However, it may not reflect the desired behavior; see below. ```py {!.tmp_examples/types_union_incorrect.py!} ``` However, as can be seen above, *pydantic* will attempt to 'match' any of the types defined under `Union` and will use the first one that matches. In the above example the `id` of `user_03` was defined as a `uuid.UUID` class (which is defined under the attribute's `Union` annotation) but as the `uuid.UUID` can be marshalled into an `int` it chose to match against the `int` type and disregarded the other types. As such, it is recommended that, when defining `Union` annotations, the most specific type is included first and followed by less specific types. In the above example, the `UUID` class should precede the `int` and `str` classes to preclude the unexpected representation as such: ```py {!.tmp_examples/types_union_correct.py!} ``` _(This script is complete, it should run "as is")_ !!! tip The type `Optional[x]` is a shorthand for `Union[x, None]`. `Optional[x]` can also be used to specify a required field that can take `None` as a value. See more details in [Required Fields](models.md#required-fields). ### Enums and Choices *pydantic* uses python's standard `enum` classes to define choices. ```py {!.tmp_examples/types_choices.py!} ``` _(This script is complete, it should run "as is")_ ### Datetime Types *Pydantic* supports the following [datetime](https://docs.python.org/library/datetime.html#available-types) types: * `datetime` fields can be: * `datetime`, existing `datetime` object * `int` or `float`, assumed as Unix time, i.e. seconds (if <= `2e10`) or milliseconds (if > `2e10`) since 1 January 1970 * `str`, following formats work: * `YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z[±]HH[:]MM]]]` * `int` or `float` as a string (assumed as Unix time) * `date` fields can be: * `date`, existing `date` object * `int` or `float`, see `datetime` * `str`, following formats work: * `YYYY-MM-DD` * `int` or `float`, see `datetime` * `time` fields can be: * `time`, existing `time` object * `str`, following formats work: * `HH:MM[:SS[.ffffff]]` * `timedelta` fields can be: * `timedelta`, existing `timedelta` object * `int` or `float`, assumed as seconds * `str`, following formats work: * `[-][DD ][HH:MM]SS[.ffffff]` * `[±]P[DD]DT[HH]H[MM]M[SS]S` (ISO 8601 format for timedelta) ```py {!.tmp_examples/types_dt.py!} ``` ### Booleans !!! warning The logic for parsing `bool` fields has changed as of version **v1.0**. Prior to **v1.0**, `bool` parsing never failed, leading to some unexpected results. The new logic is described below. A standard `bool` field will raise a `ValidationError` if the value is not one of the following: * A valid boolean (i.e. `True` or `False`), * The integers `0` or `1`, * a `str` which when converted to lower case is one of `'0', 'off', 'f', 'false', 'n', 'no', '1', 'on', 't', 'true', 'y', 'yes'` * a `bytes` which is valid (per the previous rule) when decoded to `str` !!! note If you want stricter boolean logic (e.g. a field which only permits `True` and `False`) you can use [`StrictBool`](#strict-types). Here is a script demonstrating some of these behaviors: ```py {!.tmp_examples/types_boolean.py!} ``` _(This script is complete, it should run "as is")_ ### Callable Fields can also be of type `Callable`: ```py {!.tmp_examples/types_callable.py!} ``` _(This script is complete, it should run "as is")_ !!! warning Callable fields only perform a simple check that the argument is callable; no validation of arguments, their types, or the return type is performed. ### Type *pydantic* supports the use of `Type[T]` to specify that a field may only accept classes (not instances) that are subclasses of `T`. ```py {!.tmp_examples/types_type.py!} ``` _(This script is complete, it should run "as is")_ You may also use `Type` to specify that any class is allowed. ```py {!.tmp_examples/types_bare_type.py!} ``` _(This script is complete, it should run "as is")_ ### TypeVar `TypeVar` is supported either unconstrained, constrained or with a bound. ```py {!.tmp_examples/types_typevar.py!} ``` _(This script is complete, it should run "as is")_ ## Literal Type !!! note This is a new feature of the python standard library as of python 3.8; prior to python 3.8, it requires the [typing-extensions](https://pypi.org/project/typing-extensions/) package. *pydantic* supports the use of `typing.Literal` (or `typing_extensions.Literal` prior to python 3.8) as a lightweight way to specify that a field may accept only specific literal values: ```py {!.tmp_examples/types_literal1.py!} ``` _(This script is complete, it should run "as is")_ One benefit of this field type is that it can be used to check for equality with one or more specific values without needing to declare custom validators: ```py {!.tmp_examples/types_literal2.py!} ``` _(This script is complete, it should run "as is")_ With proper ordering in an annotated `Union`, you can use this to parse types of decreasing specificity: ```py {!.tmp_examples/types_literal3.py!} ``` _(This script is complete, it should run "as is")_ ## Pydantic Types *pydantic* also provides a variety of other useful types: `FilePath` : like `Path`, but the path must exist and be a file `DirectoryPath` : like `Path`, but the path must exist and be a directory `EmailStr` : requires [email-validator](https://github.com/JoshData/python-email-validator) to be installed; the input string must be a valid email address, and the output is a simple string `NameEmail` : requires [email-validator](https://github.com/JoshData/python-email-validator) to be installed; the input string must be either a valid email address or in the format `Fred Bloggs `, and the output is a `NameEmail` object which has two properties: `name` and `email`. For `Fred Bloggs ` the name would be `"Fred Bloggs"`; for `fred.bloggs@example.com` it would be `"fred.bloggs"`. `PyObject` : expects a string and loads the python object importable at that dotted path; e.g. if `'math.cos'` was provided, the resulting field value would be the function `cos` `Color` : for parsing HTML and CSS colors; see [Color Type](#color-type) `Json` : a special type wrapper which loads JSON before parsing; see [JSON Type](#json-type) `PaymentCardNumber` : for parsing and validating payment cards; see [payment cards](#payment-card-numbers) `AnyUrl` : any URL; see [URLs](#urls) `AnyHttpUrl` : an HTTP URL; see [URLs](#urls) `HttpUrl` : a stricter HTTP URL; see [URLs](#urls) `PostgresDsn` : a postgres DSN style URL; see [URLs](#urls) `RedisDsn` : a redis DSN style URL; see [URLs](#urls) `stricturl` : a type method for arbitrary URL constraints; see [URLs](#urls) `UUID1` : requires a valid UUID of type 1; see `UUID` [above](#standard-library-types) `UUID3` : requires a valid UUID of type 3; see `UUID` [above](#standard-library-types) `UUID4` : requires a valid UUID of type 4; see `UUID` [above](#standard-library-types) `UUID5` : requires a valid UUID of type 5; see `UUID` [above](#standard-library-types) `SecretBytes` : bytes where the value is kept partially secret; see [Secrets](#secret-types) `SecretStr` : string where the value is kept partially secret; see [Secrets](#secret-types) `IPvAnyAddress` : allows either an `IPv4Address` or an `IPv6Address` `IPvAnyInterface` : allows either an `IPv4Interface` or an `IPv6Interface` `IPvAnyNetwork` : allows either an `IPv4Network` or an `IPv6Network` `NegativeFloat` : allows a float which is negative; uses standard `float` parsing then checks the value is less than 0; see [Constrained Types](#constrained-types) `NegativeInt` : allows an int which is negative; uses standard `int` parsing then checks the value is less than 0; see [Constrained Types](#constrained-types) `PositiveFloat` : allows a float which is positive; uses standard `float` parsing then checks the value is greater than 0; see [Constrained Types](#constrained-types) `PositiveInt` : allows an int which is positive; uses standard `int` parsing then checks the value is greater than 0; see [Constrained Types](#constrained-types) `conbytes` : type method for constraining bytes; see [Constrained Types](#constrained-types) `condecimal` : type method for constraining Decimals; see [Constrained Types](#constrained-types) `confloat` : type method for constraining floats; see [Constrained Types](#constrained-types) `conint` : type method for constraining ints; see [Constrained Types](#constrained-types) `conlist` : type method for constraining lists; see [Constrained Types](#constrained-types) `constr` : type method for constraining strs; see [Constrained Types](#constrained-types) ### URLs For URI/URL validation the following types are available: - `AnyUrl`: any scheme allowed, TLD not required - `AnyHttpUrl`: schema `http` or `https`, TLD not required - `HttpUrl`: schema `http` or `https`, TLD required, max length 2083 - `PostgresDsn`: schema `postgres` or `postgresql`, userinfo required, TLD not required - `RedisDsn`: schema `redis`, userinfo required, tld not required - `stricturl`, method with the following keyword arguments: - `strip_whitespace: bool = True` - `min_length: int = 1` - `max_length: int = 2 ** 16` - `tld_required: bool = True` - `allowed_schemes: Optional[Set[str]] = None` The above types (which all inherit from `AnyUrl`) will attempt to give descriptive errors when invalid URLs are provided: ```py {!.tmp_examples/types_urls.py!} ``` _(This script is complete, it should run "as is")_ If you require a custom URI/URL type, it can be created in a similar way to the types defined above. #### URL Properties Assuming an input URL of `http://samuel:pass@example.com:8000/the/path/?query=here#fragment=is;this=bit`, the above types export the following properties: - `scheme`: always set - the url schema (`http` above) - `host`: always set - the url host (`example.com` above) - `host_type`: always set - describes the type of host, either: - `domain`: e.g. `example.com`, - `int_domain`: international domain, see [below](#international-domains), e.g. `exampl£e.org`, - `ipv4`: an IP V4 address, e.g. `127.0.0.1`, or - `ipv6`: an IP V6 address, e.g. `2001:db8:ff00:42` - `user`: optional - the username if included (`samuel` above) - `password`: optional - the password if included (`pass` above) - `tld`: optional - the top level domain (`com` above), **Note: this will be wrong for any two-level domain, e.g. "co.uk".** You'll need to implement your own list of TLDs if you require full TLD validation - `port`: optional - the port (`8000` above) - `path`: optional - the path (`/the/path/` above) - `query`: optional - the URL query (aka GET arguments or "search string") (`query=here` above) - `fragment`: optional - the fragment (`fragment=is;this=bit` above) If further validation is required, these properties can be used by validators to enforce specific behaviour: ```py {!.tmp_examples/types_url_properties.py!} ``` _(This script is complete, it should run "as is")_ #### International Domains "International domains" (e.g. a URL where the host includes non-ascii characters) will be encoded via [punycode](https://en.wikipedia.org/wiki/Punycode) (see [this article](https://www.xudongz.com/blog/2017/idn-phishing/) for a good description of why this is important): ```py {!.tmp_examples/types_url_punycode.py!} ``` _(This script is complete, it should run "as is")_ !!! warning #### Underscores in Hostnames In *pydantic* underscores are allowed in all parts of a domain except the tld. Technically this might be wrong - in theory the hostname cannot have underscores, but subdomains can. To explain this; consider the following two cases: - `exam_ple.co.uk`: the hostname is `exam_ple`, which should not be allowed since it contains an underscore - `foo_bar.example.com` the hostname is `example`, which should be allowed since the underscore is in the subdomain Without having an exhaustive list of TLDs, it would be impossible to differentiate between these two. Therefore underscores are allowed, but you can always do further validation in a validator if desired. Also, Chrome, Firefox, and Safari all currently accept `http://exam_ple.com` as a URL, so we're in good (or at least big) company. ### Color Type You can use the `Color` data type for storing colors as per [CSS3 specification](http://www.w3.org/TR/css3-color/#svg-color). Colors can be defined via: - [name](http://www.w3.org/TR/SVG11/types.html#ColorKeywords) (e.g. `"Black"`, `"azure"`) - [hexadecimal value](https://en.wikipedia.org/wiki/Web_colors#Hex_triplet) (e.g. `"0x000"`, `"#FFFFFF"`, `"7fffd4"`) - RGB/RGBA tuples (e.g. `(255, 255, 255)`, `(255, 255, 255, 0.5)`) - [RGB/RGBA strings](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#RGB_colors) (e.g. `"rgb(255, 255, 255)"`, `"rgba(255, 255, 255, 0.5)"`) - [HSL strings](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#HSL_colors) (e.g. `"hsl(270, 60%, 70%)"`, `"hsl(270, 60%, 70%, .5)"`) ```py {!.tmp_examples/types_color.py!} ``` _(This script is complete, it should run "as is")_ `Color` has the following methods: **`original`** : the original string or tuple passed to `Color` **`as_named`** : returns a named CSS3 color; fails if the alpha channel is set or no such color exists unless `fallback=True` is supplied, in which case it falls back to `as_hex` **`as_hex`** : returns a string in the format `#fff` or `#ffffff`; will contain 4 (or 8) hex values if the alpha channel is set, e.g. `#7f33cc26` **`as_rgb`** : returns a string in the format `rgb(, , )`, or `rgba(, , , )` if the alpha channel is set **`as_rgb_tuple`** : returns a 3- or 4-tuple in RGB(a) format. The `alpha` keyword argument can be used to define whether the alpha channel should be included; options: `True` - always include, `False` - never include, `None` (default) - include if set **`as_hsl`** : string in the format `hsl(, , )` or `hsl(, , , )` if the alpha channel is set **`as_hsl_tuple`** : returns a 3- or 4-tuple in HSL(a) format. The `alpha` keyword argument can be used to define whether the alpha channel should be included; options: `True` - always include, `False` - never include, `None` (the default) - include if set The `__str__` method for `Color` returns `self.as_named(fallback=True)`. !!! note the `as_hsl*` refer to hue, saturation, lightness "HSL" as used in html and most of the world, **not** "HLS" as used in python's `colorsys`. ### Secret Types You can use the `SecretStr` and the `SecretBytes` data types for storing sensitive information that you do not want to be visible in logging or tracebacks. The `SecretStr` and `SecretBytes` will be formatted as either `'**********'` or `''` on conversion to json. ```py {!.tmp_examples/types_secret_types.py!} ``` _(This script is complete, it should run "as is")_ ### Json Type You can use `Json` data type to make *pydantic* first load a raw JSON string. It can also optionally be used to parse the loaded object into another type base on the type `Json` is parameterised with: ```py {!.tmp_examples/types_json_type.py!} ``` _(This script is complete, it should run "as is")_ ### Payment Card Numbers The `PaymentCardNumber` type validates [payment cards](https://en.wikipedia.org/wiki/Payment_card) (such as a debit or credit card). ```py {!.tmp_examples/types_payment_card_number.py!} ``` _(This script is complete, it should run "as is")_ `PaymentCardBrand` can be one of the following based on the BIN: * `PaymentCardBrand.amex` * `PaymentCardBrand.mastercard` * `PaymentCardBrand.visa` * `PaymentCardBrand.other` The actual validation verifies the card number is: * a `str` of only digits * [luhn](https://en.wikipedia.org/wiki/Luhn_algorithm) valid * the correct length based on the BIN, if Amex, Mastercard or Visa, and between 12 and 19 digits for all other brands ## Constrained Types The value of numerous common types can be restricted using `con*` type functions: ```py {!.tmp_examples/types_constrained.py!} ``` _(This script is complete, it should run "as is")_ Where `Field` refers to the [field function](schema.md#field-customisation). ## Strict Types You can use the `StrictStr`, `StrictInt`, `StrictFloat`, and `StrictBool` types to prevent coercion from compatible types. These types will only pass validation when the validated value is of the respective type or is a subtype of that type. This behavior is also exposed via the `strict` field of the `ConstrainedStr`, `ConstrainedFloat` and `ConstrainedInt` classes and can be combined with a multitude of complex validation rules. The following caveats apply: - `StrictInt` (and the `strict` option of `ConstrainedInt`) will not accept `bool` types, even though `bool` is a subclass of `int` in Python. Other subclasses will work. - `StrictFloat` (and the `strict` option of `ConstrainedFloat`) will not accept `int`. ```py {!.tmp_examples/types_strict.py!} ``` _(This script is complete, it should run "as is")_ ## ByteSize You can use the `ByteSize` data type to convert byte string representation to raw bytes and print out human readable versions of the bytes as well. !!! info Note that `1b` will be parsed as "1 byte" and not "1 bit". ```py {!.tmp_examples/types_bytesize.py!} ``` _(This script is complete, it should run "as is")_ ## Custom Data Types You can also define your own custom data types. The classmethod `__get_validators__` will be called to get validators to parse and validate the input data. ```py {!.tmp_examples/types_custom_type.py!} ``` _(This script is complete, it should run "as is")_ pydantic-1.2/docs/usage/validators.md000066400000000000000000000101701357000400300177040ustar00rootroot00000000000000Custom validation and complex relationships between objects can be achieved using the `validator` decorator. ```py {!.tmp_examples/validators_simple.py!} ``` _(This script is complete, it should run "as is")_ A few things to note on validators: * validators are "class methods", so the first argument value they receive is the `UserModel` class, not an instance of `UserModel`. * the second argument is always the field value to validate; it can be named as you please * you can also add any subset of the following arguments to the signature (the names **must** match): * `values`: a dict containing the name-to-value mapping of any previously-validated fields * `config`: the model config * `field`: the field being validated * `**kwargs`: if provided, this will include the arguments above not explicitly listed in the signature * validators should either return the parsed value or raise a `ValueError`, `TypeError`, or `AssertionError` (``assert`` statements may be used). !!! warning If you make use of `assert` statements, keep in mind that running Python with the [`-O` optimization flag](https://docs.python.org/3/using/cmdline.html#cmdoption-o) disables `assert` statements, and **validators will stop working**. * where validators rely on other values, you should be aware that: - Validation is done in the order fields are defined. E.g. in the example above, `password2` has access to `password1` (and `name`), but `password1` does not have access to `password2`. See [Field Ordering](models.md#field-ordering) for more information on how fields are ordered - If validation fails on another field (or that field is missing) it will not be included in `values`, hence `if 'password1' in values and ...` in this example. ## Pre and per-item validators Validators can do a few more complex things: ```py {!.tmp_examples/validators_pre_item.py!} ``` _(This script is complete, it should run "as is")_ A few more things to note: * a single validator can be applied to multiple fields by passing it multiple field names * a single validator can also be called on *all* fields by passing the special value `'*'` * the keyword argument `pre` will cause the validator to be called prior to other validation * passing `each_item=True` will result in the validator being applied to individual values (e.g. of `List`, `Dict`, `Set`, etc.), rather than the whole object ## Validate Always For performance reasons, by default validators are not called for fields when a value is not supplied. However there are situations where it may be useful or required to always call the validator, e.g. to set a dynamic default value. ```py {!.tmp_examples/validators_always.py!} ``` _(This script is complete, it should run "as is")_ You'll often want to use this together with `pre`, since otherwise with `always=True` *pydantic* would try to validate the default `None` which would cause an error. ## Root Validators Validation can also be performed on the entire model's data. ```py {!.tmp_examples/validators_root.py!} ``` _(This script is complete, it should run "as is")_ As with field validators, root validators can have `pre=True`, in which case they're called before field validation occurs (and are provided with the raw input data), or `pre=False` (the default), in which case they're called after field validation. Field validation will not occur if `pre=True` root validators raise an error. As with field validators, "post" (i.e. `pre=False`) root validators will be called even if field validation fails; the `values` argument will be a dict containing the values which passed field validation and field defaults where applicable. ## Field Checks On class creation, validators are checked to confirm that the fields they specify actually exist on the model. Occasionally however this is undesirable: e.g. if you define a validator to validate fields on inheriting models. In this case you should set `check_fields=False` on the validator. ## Dataclass Validators Validators also work with *pydantic* dataclasses. ```py {!.tmp_examples/validators_dataclass.py!} ``` _(This script is complete, it should run "as is")_ pydantic-1.2/docs/version_1_release_notes.md000066400000000000000000000067421357000400300212570ustar00rootroot00000000000000After 2.5 years of development with contributions from over 80 people and 62 releases, *pydantic* has reached version 1! While the fundamentals of *pydantic* have remained unchanged since the previous release [v0.32.2](changelog.md#v0322-2019-08-17) (indeed, since *pydantic* began in early 2017); a number of things have changed which you may wish to be aware of while migrating to Version 1. Below is a list of significant changes, for a full list of changes see release notes for [v1.0b1](changelog.md#v10b1-2019-10-01), [v1.0b2](changelog.md#v10b2-2019-10-07), and [v1.0](changelog.md#v10-2019-10-23). ## What's new in pydantic v1 ### Root validators A new decorator [`root_validator`](usage/validators.md#root-validators) has been added to allow validation of entire models. ### Custom JSON encoding/decoding There are new `Config` settings to allow [Custom JSON (de)serialisation](usage/exporting_models.md#custom-json-deserialisation). This can allow alternative JSON implementations to be used with significantly improved performance. ### Boolean parsing The logic for [parsing and validating boolean values](usage/types.md#booleans) has been overhauled to only allow a defined set of values rather than allowing any value as it used to. ### URL parsing The logic for parsing URLs (and related objects like DSNs) has been completely re-written to provide more useful error messages, greater simplicity and more flexibility. ### Performance improvements Some less "clever" error handling and cleanup of how errors are wrapped (together with many other small changes) has improved the performance of *pydantic* by ~25%, see [samuelcolvin/pydantic#819](https://github.com/samuelcolvin/pydantic/pull/819). ### ORM mode improvements There are improvements to [`GetterDict`](usage/models.md#orm-mode-aka-arbitrary-class-instances) to make ORM mode easier to use and work with root validators, see [samuelcolvin/pydantic#822](https://github.com/samuelcolvin/pydantic/pull/822). ### Settings improvements There are a number of changes to how [`BaseSettings`](usage/settings.md) works: * `case_insensitive` has been renamed to `case_sensitive` and the default has changed to `case_sensitive = False` * the default for `env_prefix` has changed to an empty string, i.e. by default there's no prefix for environment variable lookups * aliases are no longer used when looking up environment variables, instead there's a new `env` setting for `Field()` or in `Config.fields`. ### Improvements to field ordering There are some subtle changes to the ordering of fields, see [Model field ordering](usage/models.md#field-ordering) for more details. ### Schema renamed to Field The function used for providing extra information about fields has been renamed from `Schema` to `Field`. The new name makes more sense since the method can be used to provide any sort of information and change the behaviour of the field, as well as add attributes which are used while [generating a model schema](usage/schema.md). ### Improved repr methods and devtools integration The `__repr__` and `__str__` method of models as well as most other public classes in *pydantic* have been altered to be consistent and informative. There's also new [integration with python-devtools](usage/devtools.md). ### Field constraints checks Constraints added to `Field()` which are not enforced now cause an error when a model is created, see [Unenforced Field constraints](usage/schema.md#unenforced-field-constraints) for more details and work-arounds. pydantic-1.2/mkdocs.yml000066400000000000000000000026641357000400300151720ustar00rootroot00000000000000site_name: pydantic site_description: Data validation and settings management using python 3.6 type hinting strict: true site_url: https://pydantic-docs.helpmanual.io/ theme: name: 'material' custom_dir: 'docs/theme' palette: primary: pink accent: pink logo: 'logo-white.svg' favicon: 'favicon.png' repo_name: samuelcolvin/pydantic repo_url: https://github.com/samuelcolvin/pydantic google_analytics: - 'UA-62733018-4' - 'auto' extra_css: - 'extra/terminal.css' - 'extra/ad.css' extra_javascript: - 'extra/redirects.js' - 'extra/ad.js' nav: - Overview: index.md - install.md - 'Version 1 release notes': version_1_release_notes.md - Usage: - usage/models.md - 'Field Types': usage/types.md - usage/validators.md - 'Model Config': usage/model_config.md - usage/schema.md - usage/exporting_models.md - usage/dataclasses.md - 'Settings management': usage/settings.md - usage/postponed_annotations.md - 'Usage with mypy': usage/mypy.md - 'Usage with devtools': usage/devtools.md - Contributing to pydantic: contributing.md - benchmarks.md - 'Mypy plugin': mypy_plugin.md - 'PyCharm plugin': pycharm_plugin.md - changelog.md markdown_extensions: - markdown.extensions.codehilite: guess_lang: false - markdown_include.include: base_path: docs - toc: permalink: 🔗 - admonition - codehilite - extra plugins: - search - exclude: glob: - _build/* - build/* - examples/* - requirements.txt pydantic-1.2/pydantic/000077500000000000000000000000001357000400300147725ustar00rootroot00000000000000pydantic-1.2/pydantic/__init__.py000066400000000000000000000006051357000400300171040ustar00rootroot00000000000000# flake8: noqa from . import dataclasses from .class_validators import root_validator, validator from .env_settings import BaseSettings from .error_wrappers import ValidationError from .errors import * from .fields import Field, Required, Schema from .main import * from .networks import * from .parse import Protocol from .tools import * from .types import * from .version import VERSION pydantic-1.2/pydantic/class_validators.py000066400000000000000000000313251357000400300207050ustar00rootroot00000000000000import warnings from collections import ChainMap from functools import wraps from inspect import Signature, signature from itertools import chain from types import FunctionType from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload from .errors import ConfigError from .typing import AnyCallable from .utils import in_ipython class Validator: __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields' def __init__( self, func: AnyCallable, pre: bool = False, each_item: bool = False, always: bool = False, check_fields: bool = False, ): self.func = func self.pre = pre self.each_item = each_item self.always = always self.check_fields = check_fields if TYPE_CHECKING: from .main import BaseConfig from .fields import ModelField from .types import ModelOrDc ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] ValidatorsList = List[ValidatorCallable] ValidatorListDict = Dict[str, List[Validator]] _FUNCS: Set[str] = set() ROOT_KEY = '__root__' VALIDATOR_CONFIG_KEY = '__validator_config__' ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' def validator( *fields: str, pre: bool = False, each_item: bool = False, always: bool = False, check_fields: bool = True, whole: bool = None, allow_reuse: bool = False, ) -> Callable[[AnyCallable], classmethod]: """ Decorate methods on the class indicating that they should be used to validate fields :param fields: which field(s) the method should be called on :param pre: whether or not this validator should be called before the standard validators (else after) :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the whole object :param always: whether this method and other validators should be called even if the value is missing :param check_fields: whether to check that the fields actually exist on the model :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function """ if not fields: raise ConfigError('validator with no fields specified') elif isinstance(fields[0], FunctionType): raise ConfigError( "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 "E.g. usage should be `@validator('', ...)`" ) if whole is not None: warnings.warn( 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', DeprecationWarning, ) assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' each_item = not whole def dec(f: AnyCallable) -> classmethod: f_cls = _prepare_validator(f, allow_reuse) setattr( f_cls, VALIDATOR_CONFIG_KEY, ( fields, Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), ), ) return f_cls return dec @overload def root_validator(_func: AnyCallable) -> classmethod: ... @overload def root_validator(*, pre: bool = False) -> Callable[[AnyCallable], classmethod]: ... def root_validator( _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False ) -> Union[classmethod, Callable[[AnyCallable], classmethod]]: """ Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either before or after standard model parsing/validation is performed. """ if _func: f_cls = _prepare_validator(_func, allow_reuse) setattr(f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre)) return f_cls def dec(f: AnyCallable) -> classmethod: f_cls = _prepare_validator(f, allow_reuse) setattr(f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre)) return f_cls return dec def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> classmethod: """ Avoid validators with duplicated names since without this, validators can be overwritten silently which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. """ f_cls = function if isinstance(function, classmethod) else classmethod(function) if not in_ipython() and not allow_reuse: ref = f_cls.__func__.__module__ + '.' + f_cls.__func__.__qualname__ if ref in _FUNCS: raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') _FUNCS.add(ref) return f_cls class ValidatorGroup: def __init__(self, validators: 'ValidatorListDict') -> None: self.validators = validators self.used_validators = {'*'} def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: self.used_validators.add(name) validators = self.validators.get(name, []) if name != ROOT_KEY: validators += self.validators.get('*', []) if validators: return {v.func.__name__: v for v in validators} else: return None def check_for_unused(self) -> None: unused_validators = set( chain( *[ (v.func.__name__ for v in self.validators[f] if v.check_fields) for f in (self.validators.keys() - self.used_validators) ] ) ) if unused_validators: fn = ', '.join(unused_validators) raise ConfigError( f"Validators defined with incorrect fields: {fn} " # noqa: Q000 f"(use check_fields=False if you're inheriting from the model and intended this)" ) def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: validators: Dict[str, List[Validator]] = {} for var_name, value in namespace.items(): validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) if validator_config: fields, v = validator_config for field in fields: if field in validators: validators[field].append(v) else: validators[field] = [v] return validators def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[AnyCallable]]: pre_validators: List[AnyCallable] = [] post_validators: List[AnyCallable] = [] for name, value in namespace.items(): validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) if validator_config: sig = signature(validator_config.func) args = list(sig.parameters.keys()) if args[0] == 'self': raise ConfigError( f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' f'should be: (cls, values).' ) if len(args) != 2: raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') # check function signature if validator_config.pre: pre_validators.append(validator_config.func) else: post_validators.append(validator_config.func) return pre_validators, post_validators def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': for field, field_validators in base_validators.items(): if field not in validators: validators[field] = [] validators[field] += field_validators return validators def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': """ Make a generic function which calls a validator with the right arguments. Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, hence this laborious way of doing things. It's done like this so validators don't all need **kwargs in their signature, eg. any combination of the arguments "values", "fields" and/or "config" are permitted. """ sig = signature(validator) args = list(sig.parameters.keys()) first_arg = args.pop(0) if first_arg == 'self': raise ConfigError( f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' ) elif first_arg == 'cls': # assume the second argument is value return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) else: # assume the first argument was value which has already been removed return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': return [make_generic_validator(f) for f in v_funcs if f] all_kwargs = {'values', 'field', 'config'} def _generic_validator_cls(validator: AnyCallable, sig: Signature, args: Set[str]) -> 'ValidatorCallable': # assume the first argument is value has_kwargs = False if 'kwargs' in args: has_kwargs = True args -= {'kwargs'} if not args.issubset(all_kwargs): raise ConfigError( f'Invalid signature for validator {validator}: {sig}, should be: ' f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' ) if has_kwargs: return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) elif args == set(): return lambda cls, v, values, field, config: validator(cls, v) elif args == {'values'}: return lambda cls, v, values, field, config: validator(cls, v, values=values) elif args == {'field'}: return lambda cls, v, values, field, config: validator(cls, v, field=field) elif args == {'config'}: return lambda cls, v, values, field, config: validator(cls, v, config=config) elif args == {'values', 'field'}: return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) elif args == {'values', 'config'}: return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) elif args == {'field', 'config'}: return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) else: # args == {'values', 'field', 'config'} return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) def _generic_validator_basic(validator: AnyCallable, sig: Signature, args: Set[str]) -> 'ValidatorCallable': has_kwargs = False if 'kwargs' in args: has_kwargs = True args -= {'kwargs'} if not args.issubset(all_kwargs): raise ConfigError( f'Invalid signature for validator {validator}: {sig}, should be: ' f'(value, values, config, field), "values", "config" and "field" are all optional.' ) if has_kwargs: return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) elif args == set(): return lambda cls, v, values, field, config: validator(v) elif args == {'values'}: return lambda cls, v, values, field, config: validator(v, values=values) elif args == {'field'}: return lambda cls, v, values, field, config: validator(v, field=field) elif args == {'config'}: return lambda cls, v, values, field, config: validator(v, config=config) elif args == {'values', 'field'}: return lambda cls, v, values, field, config: validator(v, values=values, field=field) elif args == {'values', 'config'}: return lambda cls, v, values, field, config: validator(v, values=values, config=config) elif args == {'field', 'config'}: return lambda cls, v, values, field, config: validator(v, field=field, config=config) else: # args == {'values', 'field', 'config'} return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, classmethod]: all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) return { k: v for k, v in all_attributes.items() if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) } pydantic-1.2/pydantic/color.py000066400000000000000000000376411357000400300164750ustar00rootroot00000000000000""" Color definitions are used as per CSS3 specification: http://www.w3.org/TR/css3-color/#svg-color A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. In these cases the LAST color when sorted alphabetically takes preferences, eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". """ import math import re from colorsys import hls_to_rgb, rgb_to_hls from typing import TYPE_CHECKING, Any, Optional, Tuple, Union, cast from .errors import ColorError from .utils import Representation, almost_equal_floats if TYPE_CHECKING: from .typing import CallableGenerator, ReprArgs ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] ColorType = Union[ColorTuple, str] HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] class RGBA: """ Internal use only as a representation of a color. """ __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): self.r = r self.g = g self.b = b self.alpha = alpha self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) def __getitem__(self, item: Any) -> Any: return self._tuple[item] r_hex_short = re.compile(r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*') r_hex_long = re.compile(r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*') _r_255 = r'(\d{1,3}(?:\.\d+)?)' _r_comma = r'\s*,\s*' r_rgb = re.compile(fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*') _r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' r_rgba = re.compile(fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*') _r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' _r_sl = r'(\d{1,3}(?:\.\d+)?)%' r_hsl = re.compile(fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*') r_hsla = re.compile(fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*') # colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} rads = 2 * math.pi class Color(Representation): __slots__ = '_original', '_rgba' def __init__(self, value: ColorType) -> None: self._rgba: RGBA self._original: ColorType if isinstance(value, (tuple, list)): self._rgba = parse_tuple(value) elif isinstance(value, str): self._rgba = parse_str(value) else: raise ColorError(reason='value must be a tuple, list or string') # if we've got here value must be a valid color self._original = value def original(self) -> ColorType: """ Original value passed to Color """ return self._original def as_named(self, *, fallback: bool = False) -> str: if self._rgba.alpha is None: rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) try: return COLORS_BY_VALUE[rgb] except KeyError as e: if fallback: return self.as_hex() else: raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e else: return self.as_hex() def as_hex(self) -> str: """ Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string a "short" representation of the color is possible and whether there's an alpha channel. """ values = [float_to_255(c) for c in self._rgba[:3]] if self._rgba.alpha is not None: values.append(float_to_255(self._rgba.alpha)) as_hex = ''.join(f'{v:02x}' for v in values) if all(c in repeat_colors for c in values): as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) return '#' + as_hex def as_rgb(self) -> str: """ Color as an rgb(, , ) or rgba(, , , ) string. """ if self._rgba.alpha is None: return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' else: return ( f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' f'{round(self._alpha_float(), 2)})' ) def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: """ Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is in the range 0 to 1. :param alpha: whether to include the alpha channel, options are None - (default) include alpha only if it's set (e.g. not None) True - always include alpha, False - always omit alpha, """ r, g, b = [float_to_255(c) for c in self._rgba[:3]] if alpha is None: if self._rgba.alpha is None: return r, g, b else: return r, g, b, self._alpha_float() elif alpha: return r, g, b, self._alpha_float() else: # alpha is False return r, g, b def as_hsl(self) -> str: """ Color as an hsl(, , ) or hsl(, , , ) string. """ if self._rgba.alpha is None: h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore return f'hsl({h * 360:0.0f}, {s * 100:0.0f}%, {li * 100:0.0f}%)' else: h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore return f'hsl({h * 360:0.0f}, {s * 100:0.0f}%, {li * 100:0.0f}%, {round(a, 2)})' def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: """ Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in the range 0 to 1. NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. :param alpha: whether to include the alpha channel, options are None - (default) include alpha only if it's set (e.g. not None) True - always include alpha, False - always omit alpha, """ h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) if alpha is None: if self._rgba.alpha is None: return h, s, l else: return h, s, l, self._alpha_float() if alpha: return h, s, l, self._alpha_float() else: # alpha is False return h, s, l def _alpha_float(self) -> float: return 1 if self._rgba.alpha is None else self._rgba.alpha @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls def __str__(self) -> str: return self.as_named(fallback=True) def __repr_args__(self) -> 'ReprArgs': return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore def parse_tuple(value: Tuple[Any, ...]) -> RGBA: """ Parse a tuple or list as a color. """ if len(value) == 3: r, g, b = [parse_color_value(v) for v in value] return RGBA(r, g, b, None) elif len(value) == 4: r, g, b = [parse_color_value(v) for v in value[:3]] return RGBA(r, g, b, parse_float_alpha(value[3])) else: raise ColorError(reason='tuples must have length 3 or 4') def parse_str(value: str) -> RGBA: """ Parse a string to an RGBA tuple, trying the following formats (in this order): * named color, see COLORS_BY_NAME below * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) * `rgb(, , ) ` * `rgba(, , , )` """ value_lower = value.lower() try: r, g, b = COLORS_BY_NAME[value_lower] except KeyError: pass else: return ints_to_rgba(r, g, b, None) m = r_hex_short.fullmatch(value_lower) if m: *rgb, a = m.groups() r, g, b = [int(v * 2, 16) for v in rgb] if a: alpha: Optional[float] = int(a * 2, 16) / 255 else: alpha = None return ints_to_rgba(r, g, b, alpha) m = r_hex_long.fullmatch(value_lower) if m: *rgb, a = m.groups() r, g, b = [int(v, 16) for v in rgb] if a: alpha = int(a, 16) / 255 else: alpha = None return ints_to_rgba(r, g, b, alpha) m = r_rgb.fullmatch(value_lower) if m: return ints_to_rgba(*m.groups(), None) # type: ignore m = r_rgba.fullmatch(value_lower) if m: return ints_to_rgba(*m.groups()) # type: ignore m = r_hsl.fullmatch(value_lower) if m: h, h_units, s, l_ = m.groups() return parse_hsl(h, h_units, s, l_) m = r_hsla.fullmatch(value_lower) if m: h, h_units, s, l_, a = m.groups() return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) raise ColorError(reason='string not recognised as a valid color') def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: """ Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number in the range 0 to 1 """ try: color = float(value) except ValueError: raise ColorError(reason='color values must be a valid number') if 0 <= color <= max_val: return color / max_val else: raise ColorError(reason=f'color values must be in the range 0 to {max_val}') def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: """ Parse a value checking it's a valid float in the range 0 to 1 """ if value is None: return None try: if isinstance(value, str) and value.endswith('%'): alpha = float(value[:-1]) / 100 else: alpha = float(value) except ValueError: raise ColorError(reason='alpha values must be a valid float') if almost_equal_floats(alpha, 1): return None elif 0 <= alpha <= 1: return alpha else: raise ColorError(reason='alpha values must be in the range 0 to 1') def parse_hsl(h: str, h_units: str, s: str, l: str, alpha: Optional[float] = None) -> RGBA: """ Parse raw hue, saturation, lightness and alpha values and convert to RGBA. """ s_value, l_value = parse_color_value(s, 100), parse_color_value(l, 100) h_value = float(h) if h_units in {None, 'deg'}: h_value = h_value % 360 / 360 elif h_units == 'rad': h_value = h_value % rads / rads else: # turns h_value = h_value % 1 r, g, b = hls_to_rgb(h_value, l_value, s_value) return RGBA(r, g, b, alpha) def float_to_255(c: float) -> int: return int(round(c * 255)) COLORS_BY_NAME = { 'aliceblue': (240, 248, 255), 'antiquewhite': (250, 235, 215), 'aqua': (0, 255, 255), 'aquamarine': (127, 255, 212), 'azure': (240, 255, 255), 'beige': (245, 245, 220), 'bisque': (255, 228, 196), 'black': (0, 0, 0), 'blanchedalmond': (255, 235, 205), 'blue': (0, 0, 255), 'blueviolet': (138, 43, 226), 'brown': (165, 42, 42), 'burlywood': (222, 184, 135), 'cadetblue': (95, 158, 160), 'chartreuse': (127, 255, 0), 'chocolate': (210, 105, 30), 'coral': (255, 127, 80), 'cornflowerblue': (100, 149, 237), 'cornsilk': (255, 248, 220), 'crimson': (220, 20, 60), 'cyan': (0, 255, 255), 'darkblue': (0, 0, 139), 'darkcyan': (0, 139, 139), 'darkgoldenrod': (184, 134, 11), 'darkgray': (169, 169, 169), 'darkgreen': (0, 100, 0), 'darkgrey': (169, 169, 169), 'darkkhaki': (189, 183, 107), 'darkmagenta': (139, 0, 139), 'darkolivegreen': (85, 107, 47), 'darkorange': (255, 140, 0), 'darkorchid': (153, 50, 204), 'darkred': (139, 0, 0), 'darksalmon': (233, 150, 122), 'darkseagreen': (143, 188, 143), 'darkslateblue': (72, 61, 139), 'darkslategray': (47, 79, 79), 'darkslategrey': (47, 79, 79), 'darkturquoise': (0, 206, 209), 'darkviolet': (148, 0, 211), 'deeppink': (255, 20, 147), 'deepskyblue': (0, 191, 255), 'dimgray': (105, 105, 105), 'dimgrey': (105, 105, 105), 'dodgerblue': (30, 144, 255), 'firebrick': (178, 34, 34), 'floralwhite': (255, 250, 240), 'forestgreen': (34, 139, 34), 'fuchsia': (255, 0, 255), 'gainsboro': (220, 220, 220), 'ghostwhite': (248, 248, 255), 'gold': (255, 215, 0), 'goldenrod': (218, 165, 32), 'gray': (128, 128, 128), 'green': (0, 128, 0), 'greenyellow': (173, 255, 47), 'grey': (128, 128, 128), 'honeydew': (240, 255, 240), 'hotpink': (255, 105, 180), 'indianred': (205, 92, 92), 'indigo': (75, 0, 130), 'ivory': (255, 255, 240), 'khaki': (240, 230, 140), 'lavender': (230, 230, 250), 'lavenderblush': (255, 240, 245), 'lawngreen': (124, 252, 0), 'lemonchiffon': (255, 250, 205), 'lightblue': (173, 216, 230), 'lightcoral': (240, 128, 128), 'lightcyan': (224, 255, 255), 'lightgoldenrodyellow': (250, 250, 210), 'lightgray': (211, 211, 211), 'lightgreen': (144, 238, 144), 'lightgrey': (211, 211, 211), 'lightpink': (255, 182, 193), 'lightsalmon': (255, 160, 122), 'lightseagreen': (32, 178, 170), 'lightskyblue': (135, 206, 250), 'lightslategray': (119, 136, 153), 'lightslategrey': (119, 136, 153), 'lightsteelblue': (176, 196, 222), 'lightyellow': (255, 255, 224), 'lime': (0, 255, 0), 'limegreen': (50, 205, 50), 'linen': (250, 240, 230), 'magenta': (255, 0, 255), 'maroon': (128, 0, 0), 'mediumaquamarine': (102, 205, 170), 'mediumblue': (0, 0, 205), 'mediumorchid': (186, 85, 211), 'mediumpurple': (147, 112, 219), 'mediumseagreen': (60, 179, 113), 'mediumslateblue': (123, 104, 238), 'mediumspringgreen': (0, 250, 154), 'mediumturquoise': (72, 209, 204), 'mediumvioletred': (199, 21, 133), 'midnightblue': (25, 25, 112), 'mintcream': (245, 255, 250), 'mistyrose': (255, 228, 225), 'moccasin': (255, 228, 181), 'navajowhite': (255, 222, 173), 'navy': (0, 0, 128), 'oldlace': (253, 245, 230), 'olive': (128, 128, 0), 'olivedrab': (107, 142, 35), 'orange': (255, 165, 0), 'orangered': (255, 69, 0), 'orchid': (218, 112, 214), 'palegoldenrod': (238, 232, 170), 'palegreen': (152, 251, 152), 'paleturquoise': (175, 238, 238), 'palevioletred': (219, 112, 147), 'papayawhip': (255, 239, 213), 'peachpuff': (255, 218, 185), 'peru': (205, 133, 63), 'pink': (255, 192, 203), 'plum': (221, 160, 221), 'powderblue': (176, 224, 230), 'purple': (128, 0, 128), 'red': (255, 0, 0), 'rosybrown': (188, 143, 143), 'royalblue': (65, 105, 225), 'saddlebrown': (139, 69, 19), 'salmon': (250, 128, 114), 'sandybrown': (244, 164, 96), 'seagreen': (46, 139, 87), 'seashell': (255, 245, 238), 'sienna': (160, 82, 45), 'silver': (192, 192, 192), 'skyblue': (135, 206, 235), 'slateblue': (106, 90, 205), 'slategray': (112, 128, 144), 'slategrey': (112, 128, 144), 'snow': (255, 250, 250), 'springgreen': (0, 255, 127), 'steelblue': (70, 130, 180), 'tan': (210, 180, 140), 'teal': (0, 128, 128), 'thistle': (216, 191, 216), 'tomato': (255, 99, 71), 'turquoise': (64, 224, 208), 'violet': (238, 130, 238), 'wheat': (245, 222, 179), 'white': (255, 255, 255), 'whitesmoke': (245, 245, 245), 'yellow': (255, 255, 0), 'yellowgreen': (154, 205, 50), } COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} pydantic-1.2/pydantic/dataclasses.py000066400000000000000000000112741357000400300176400ustar00rootroot00000000000000import dataclasses from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Type, TypeVar, Union from .class_validators import gather_all_validators from .error_wrappers import ValidationError from .errors import DataclassTypeError from .fields import Required from .main import create_model, validate_model from .typing import AnyType if TYPE_CHECKING: from .main import BaseModel # noqa: F401 DataclassT = TypeVar('DataclassT', bound='DataclassType') class DataclassType: __pydantic_model__: Type[BaseModel] __initialised__: bool def __init__(self, *args: Any, **kwargs: Any) -> None: pass @classmethod def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': pass def __call__(self: 'DataclassT', *args: Any, **kwargs: Any) -> 'DataclassT': pass def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': if isinstance(v, cls): return v elif isinstance(v, (list, tuple)): return cls(*v) elif isinstance(v, dict): return cls(**v) else: raise DataclassTypeError(class_name=cls.__name__) def _get_validators(cls: Type['DataclassT']) -> Generator[Any, None, None]: yield cls.__validate__ def setattr_validate_assignment(self: 'DataclassType', name: str, value: Any) -> None: if self.__initialised__: d = dict(self.__dict__) d.pop(name, None) known_field = self.__pydantic_model__.__fields__.get(name, None) if known_field: value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) if error_: raise ValidationError([error_], type(self)) object.__setattr__(self, name, value) def _process_class( _cls: AnyType, init: bool, repr: bool, eq: bool, order: bool, unsafe_hash: bool, frozen: bool, config: Optional[Type[Any]], ) -> 'DataclassType': post_init_original = getattr(_cls, '__post_init__', None) if post_init_original and post_init_original.__name__ == '_pydantic_post_init': post_init_original = None if not post_init_original: post_init_original = getattr(_cls, '__post_init_original__', None) post_init_post_parse = getattr(_cls, '__post_init_post_parse__', None) def _pydantic_post_init(self: 'DataclassType', *initvars: Any) -> None: if post_init_original is not None: post_init_original(self, *initvars) d, _, validation_error = validate_model(self.__pydantic_model__, self.__dict__, cls=self.__class__) if validation_error: raise validation_error object.__setattr__(self, '__dict__', d) object.__setattr__(self, '__initialised__', True) if post_init_post_parse is not None: post_init_post_parse(self, *initvars) _cls.__post_init__ = _pydantic_post_init cls = dataclasses._process_class(_cls, init, repr, eq, order, unsafe_hash, frozen) # type: ignore fields: Dict[str, Any] = {} for field in dataclasses.fields(cls): if field.default != dataclasses.MISSING: field_value = field.default # mypy issue 7020 and 708 elif field.default_factory != dataclasses.MISSING: # type: ignore field_value = field.default_factory() # type: ignore else: field_value = Required fields[field.name] = (field.type, field_value) validators = gather_all_validators(cls) cls.__pydantic_model__ = create_model( cls.__name__, __config__=config, __module__=_cls.__module__, __validators__=validators, **fields ) cls.__initialised__ = False cls.__validate__ = classmethod(_validate_dataclass) cls.__get_validators__ = classmethod(_get_validators) if post_init_original: cls.__post_init_original__ = post_init_original if cls.__pydantic_model__.__config__.validate_assignment and not frozen: cls.__setattr__ = setattr_validate_assignment return cls def dataclass( _cls: Optional[AnyType] = None, *, init: bool = True, repr: bool = True, eq: bool = True, order: bool = False, unsafe_hash: bool = False, frozen: bool = False, config: Type[Any] = None, ) -> Union[Callable[[AnyType], 'DataclassType'], 'DataclassType']: """ Like the python standard lib dataclasses but with type validation. Arguments are the same as for standard dataclasses, except for validate_assignment which has the same meaning as Config.validate_assignment. """ def wrap(cls: AnyType) -> 'DataclassType': return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, config) if _cls is None: return wrap return wrap(_cls) pydantic-1.2/pydantic/datetime_parse.py000066400000000000000000000162411357000400300203360ustar00rootroot00000000000000""" Functions to parse datetime objects. We're using regular expressions rather than time.strptime because: - They provide both validation and parsing. - They're more flexible for datetimes. - The date/datetime/time constructors produce friendlier error messages. Stolen from https://raw.githubusercontent.com/django/django/master/django/utils/dateparse.py at 9718fa2e8abe430c3526a9278dd976443d4ae3c6 Changed to: * use standard python datetime types not django.utils.timezone * raise ValueError when regex doesn't match rather than returning None * support parsing unix timestamps for dates and datetimes """ import re from datetime import date, datetime, time, timedelta, timezone from typing import Dict, Union from . import errors date_re = re.compile(r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})$') time_re = re.compile( r'(?P\d{1,2}):(?P\d{1,2})' r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' ) datetime_re = re.compile( r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' r'[T ](?P\d{1,2}):(?P\d{1,2})' r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' ) standard_duration_re = re.compile( r'^' r'(?:(?P-?\d+) (days?, )?)?' r'((?:(?P-?\d+):)(?=\d+:\d+))?' r'(?:(?P-?\d+):)?' r'(?P-?\d+)' r'(?:\.(?P\d{1,6})\d{0,6})?' r'$' ) # Support the sections of ISO 8601 date representation that are accepted by timedelta iso8601_duration_re = re.compile( r'^(?P[-+]?)' r'P' r'(?:(?P\d+(.\d+)?)D)?' r'(?:T' r'(?:(?P\d+(.\d+)?)H)?' r'(?:(?P\d+(.\d+)?)M)?' r'(?:(?P\d+(.\d+)?)S)?' r')?' r'$' ) EPOCH = datetime(1970, 1, 1) # if greater than this, the number is in ms, if less than or equal it's in seconds # (in seconds this is 11th October 2603, in ms it's 20th August 1970) MS_WATERSHED = int(2e10) StrBytesIntFloat = Union[str, bytes, int, float] def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: if isinstance(value, (int, float)): return value try: return float(value) except ValueError: return None except TypeError: raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') def from_unix_seconds(seconds: Union[int, float]) -> datetime: while seconds > MS_WATERSHED: seconds /= 1000 dt = EPOCH + timedelta(seconds=seconds) return dt.replace(tzinfo=timezone.utc) def parse_date(value: Union[date, StrBytesIntFloat]) -> date: """ Parse a date/int/float/string and return a datetime.date. Raise ValueError if the input is well formatted but not a valid date. Raise ValueError if the input isn't well formatted. """ if isinstance(value, date): if isinstance(value, datetime): return value.date() else: return value number = get_numeric(value, 'date') if number is not None: return from_unix_seconds(number).date() if isinstance(value, bytes): value = value.decode() match = date_re.match(value) # type: ignore if match is None: raise errors.DateError() kw = {k: int(v) for k, v in match.groupdict().items()} try: return date(**kw) except ValueError: raise errors.DateError() def parse_time(value: Union[time, StrBytesIntFloat]) -> time: """ Parse a time/string and return a datetime.time. This function doesn't support time zone offsets. Raise ValueError if the input is well formatted but not a valid time. Raise ValueError if the input isn't well formatted, in particular if it contains an offset. """ if isinstance(value, time): return value number = get_numeric(value, 'time') if number is not None: if number >= 86400: # doesn't make sense since the time time loop back around to 0 raise errors.TimeError() return (datetime.min + timedelta(seconds=number)).time() if isinstance(value, bytes): value = value.decode() match = time_re.match(value) # type: ignore if match is None: raise errors.TimeError() kw = match.groupdict() if kw['microsecond']: kw['microsecond'] = kw['microsecond'].ljust(6, '0') kw_ = {k: int(v) for k, v in kw.items() if v is not None} try: return time(**kw_) # type: ignore except ValueError: raise errors.TimeError() def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: """ Parse a datetime/int/float/string and return a datetime.datetime. This function supports time zone offsets. When the input contains one, the output uses a timezone with a fixed offset from UTC. Raise ValueError if the input is well formatted but not a valid datetime. Raise ValueError if the input isn't well formatted. """ if isinstance(value, datetime): return value number = get_numeric(value, 'datetime') if number is not None: return from_unix_seconds(number) if isinstance(value, bytes): value = value.decode() match = datetime_re.match(value) # type: ignore if match is None: raise errors.DateTimeError() kw = match.groupdict() if kw['microsecond']: kw['microsecond'] = kw['microsecond'].ljust(6, '0') tzinfo_str = kw.pop('tzinfo') if tzinfo_str == 'Z': tzinfo = timezone.utc elif tzinfo_str is not None: offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0 offset = 60 * int(tzinfo_str[1:3]) + offset_mins if tzinfo_str[0] == '-': offset = -offset tzinfo = timezone(timedelta(minutes=offset)) else: tzinfo = None kw_: Dict[str, Union[int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} kw_['tzinfo'] = tzinfo try: return datetime(**kw_) # type: ignore except ValueError: raise errors.DateTimeError() def parse_duration(value: StrBytesIntFloat) -> timedelta: """ Parse a duration int/float/string and return a datetime.timedelta. The preferred format for durations in Django is '%d %H:%M:%S.%f'. Also supports ISO 8601 representation. """ if isinstance(value, timedelta): return value if isinstance(value, (int, float)): # bellow code requires a string value = str(value) elif isinstance(value, bytes): value = value.decode() try: match = standard_duration_re.match(value) or iso8601_duration_re.match(value) except TypeError: raise TypeError('invalid type; expected timedelta, string, bytes, int or float') if not match: raise errors.DurationError() kw = match.groupdict() sign = -1 if kw.pop('sign', '+') == '-' else 1 if kw.get('microseconds'): kw['microseconds'] = kw['microseconds'].ljust(6, '0') if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): kw['microseconds'] = '-' + kw['microseconds'] kw_ = {k: float(v) for k, v in kw.items() if v is not None} return sign * timedelta(**kw_) # type: ignore pydantic-1.2/pydantic/env_settings.py000066400000000000000000000063601357000400300200610ustar00rootroot00000000000000import os import warnings from typing import Any, Dict, Iterable, Mapping, Optional from .fields import ModelField from .main import BaseModel, Extra from .typing import display_as_type from .utils import deep_update class SettingsError(ValueError): pass class BaseSettings(BaseModel): """ Base class for settings, allowing values to be overridden by environment variables. This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), Heroku and any 12 factor app design. """ def __init__(__pydantic_self__, **values: Any) -> None: # Uses something other than `self` the first arg to allow "self" as a settable attribute super().__init__(**__pydantic_self__._build_values(values)) def _build_values(self, init_kwargs: Dict[str, Any]) -> Dict[str, Any]: return deep_update(self._build_environ(), init_kwargs) def _build_environ(self) -> Dict[str, Optional[str]]: """ Build environment variables suitable for passing to the Model. """ d: Dict[str, Optional[str]] = {} if self.__config__.case_sensitive: env_vars: Mapping[str, str] = os.environ else: env_vars = {k.lower(): v for k, v in os.environ.items()} for field in self.__fields__.values(): env_val: Optional[str] = None for env_name in field.field_info.extra['env_names']: env_val = env_vars.get(env_name) if env_val is not None: break if env_val is None: continue if field.is_complex(): try: env_val = self.__config__.json_loads(env_val) # type: ignore except ValueError as e: raise SettingsError(f'error parsing JSON for "{env_name}"') from e d[field.alias] = env_val return d class Config: env_prefix = '' validate_all = True extra = Extra.forbid arbitrary_types_allowed = True case_sensitive = False @classmethod def prepare_field(cls, field: ModelField) -> None: env_names: Iterable[str] env = field.field_info.extra.pop('env', None) if env is None: if field.has_alias: warnings.warn( 'aliases are no longer used by BaseSettings to define which environment variables to read. ' 'Instead use the "env" field setting. ' 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', FutureWarning, ) env_names = [cls.env_prefix + field.name] elif isinstance(env, str): env_names = {env} elif isinstance(env, (list, set, tuple)): env_names = env else: raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') if not cls.case_sensitive: env_names = type(env_names)(n.lower() for n in env_names) field.field_info.extra['env_names'] = env_names __config__: Config # type: ignore pydantic-1.2/pydantic/error_wrappers.py000066400000000000000000000115451357000400300204260ustar00rootroot00000000000000import json from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union from .json import pydantic_encoder from .utils import Representation if TYPE_CHECKING: from .main import BaseConfig # noqa: F401 from .types import ModelOrDc # noqa: F401 from .typing import ReprArgs Loc = Tuple[Union[int, str], ...] __all__ = 'ErrorWrapper', 'ValidationError' class ErrorWrapper(Representation): __slots__ = 'exc', '_loc' def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: self.exc = exc self._loc = loc def loc_tuple(self) -> 'Loc': if isinstance(self._loc, tuple): return self._loc else: return (self._loc,) def __repr_args__(self) -> 'ReprArgs': return [('exc', self.exc), ('loc', self.loc_tuple())] # ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] # but recursive, therefore just use: ErrorList = Union[Sequence[Any], ErrorWrapper] class ValidationError(Representation, ValueError): __slots__ = 'raw_errors', 'model', '_error_cache' def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: self.raw_errors = errors self.model = model self._error_cache: Optional[List[Dict[str, Any]]] = None def errors(self) -> List[Dict[str, Any]]: if self._error_cache is None: try: config = self.model.__config__ # type: ignore except AttributeError: config = self.model.__pydantic_model__.__config__ # type: ignore self._error_cache = list(flatten_errors(self.raw_errors, config)) return self._error_cache def json(self, *, indent: Union[None, int, str] = 2) -> str: return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) def __str__(self) -> str: errors = self.errors() no_errors = len(errors) return ( f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' f'{display_errors(errors)}' ) def __repr_args__(self) -> 'ReprArgs': return [('model', self.model.__name__), ('errors', self.errors())] def display_errors(errors: List[Dict[str, Any]]) -> str: return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) def _display_error_loc(error: Dict[str, Any]) -> str: return ' -> '.join(str(l) for l in error['loc']) def _display_error_type_and_ctx(error: Dict[str, Any]) -> str: t = 'type=' + error['type'] ctx = error.get('ctx') if ctx: return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) else: return t def flatten_errors( errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None ) -> Generator[Dict[str, Any], None, None]: for error in errors: if isinstance(error, ErrorWrapper): if loc: error_loc = loc + error.loc_tuple() else: error_loc = error.loc_tuple() if isinstance(error.exc, ValidationError): yield from flatten_errors(error.exc.raw_errors, config, error_loc) else: yield error_dict(error.exc, config, error_loc) elif isinstance(error, list): yield from flatten_errors(error, config, loc=loc) else: raise RuntimeError(f'Unknown error object: {error}') def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> Dict[str, Any]: type_ = get_exc_type(type(exc)) msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) ctx = exc.__dict__ if msg_template: msg = msg_template.format(**ctx) else: msg = str(exc) d: Dict[str, Any] = {'loc': loc, 'msg': msg, 'type': type_} if ctx: d['ctx'] = ctx return d _EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} def get_exc_type(cls: Type[Exception]) -> str: # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up try: return _EXC_TYPE_CACHE[cls] except KeyError: r = _get_exc_type(cls) _EXC_TYPE_CACHE[cls] = r return r def _get_exc_type(cls: Type[Exception]) -> str: if issubclass(cls, AssertionError): return 'assertion_error' base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' if cls in (TypeError, ValueError): # just TypeError or ValueError, no extra code return base_name # if it's not a TypeError or ValueError, we just take the lowercase of the exception name # no chaining or snake case logic, use "code" for more complex error types. code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() return base_name + '.' + code pydantic-1.2/pydantic/errors.py000066400000000000000000000322311357000400300166610ustar00rootroot00000000000000from decimal import Decimal from pathlib import Path from typing import Any, Set, Union from .typing import AnyType, display_as_type # explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. __all__ = ( 'PydanticTypeError', 'PydanticValueError', 'ConfigError', 'MissingError', 'ExtraError', 'NoneIsNotAllowedError', 'NoneIsAllowedError', 'WrongConstantError', 'BoolError', 'BytesError', 'DictError', 'EmailError', 'UrlError', 'UrlSchemeError', 'UrlSchemePermittedError', 'UrlUserInfoError', 'UrlHostError', 'UrlHostTldError', 'UrlExtraError', 'EnumError', 'IntegerError', 'FloatError', 'PathError', '_PathValueError', 'PathNotExistsError', 'PathNotAFileError', 'PathNotADirectoryError', 'PyObjectError', 'SequenceError', 'ListError', 'SetError', 'FrozenSetError', 'TupleError', 'TupleLengthError', 'ListMinLengthError', 'ListMaxLengthError', 'AnyStrMinLengthError', 'AnyStrMaxLengthError', 'StrError', 'StrRegexError', '_NumberBoundError', 'NumberNotGtError', 'NumberNotGeError', 'NumberNotLtError', 'NumberNotLeError', 'NumberNotMultipleError', 'DecimalError', 'DecimalIsNotFiniteError', 'DecimalMaxDigitsError', 'DecimalMaxPlacesError', 'DecimalWholeDigitsError', 'DateTimeError', 'DateError', 'TimeError', 'DurationError', 'UUIDError', 'UUIDVersionError', 'ArbitraryTypeError', 'ClassError', 'SubclassError', 'JsonError', 'JsonTypeError', 'PatternError', 'DataclassTypeError', 'CallableError', 'IPvAnyAddressError', 'IPvAnyInterfaceError', 'IPvAnyNetworkError', 'IPv4AddressError', 'IPv6AddressError', 'IPv4NetworkError', 'IPv6NetworkError', 'IPv4InterfaceError', 'IPv6InterfaceError', 'ColorError', 'StrictBoolError', 'NotDigitError', 'LuhnValidationError', 'InvalidLengthForBrand', 'InvalidByteSize', 'InvalidByteSizeUnit', ) class PydanticErrorMixin: code: str msg_template: str def __init__(self, **ctx: Any) -> None: self.__dict__ = ctx def __str__(self) -> str: return self.msg_template.format(**self.__dict__) class PydanticTypeError(PydanticErrorMixin, TypeError): pass class PydanticValueError(PydanticErrorMixin, ValueError): pass class ConfigError(RuntimeError): pass class MissingError(PydanticValueError): msg_template = 'field required' class ExtraError(PydanticValueError): msg_template = 'extra fields not permitted' class NoneIsNotAllowedError(PydanticTypeError): code = 'none.not_allowed' msg_template = 'none is not an allowed value' class NoneIsAllowedError(PydanticTypeError): code = 'none.allowed' msg_template = 'value is not none' class WrongConstantError(PydanticValueError): code = 'const' def __str__(self) -> str: permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore return f'unexpected value; permitted: {permitted}' class BoolError(PydanticTypeError): msg_template = 'value could not be parsed to a boolean' class BytesError(PydanticTypeError): msg_template = 'byte type expected' class DictError(PydanticTypeError): msg_template = 'value is not a valid dict' class EmailError(PydanticValueError): msg_template = 'value is not a valid email address' class UrlError(PydanticValueError): code = 'url' class UrlSchemeError(UrlError): code = 'url.scheme' msg_template = 'invalid or missing URL scheme' class UrlSchemePermittedError(UrlError): code = 'url.scheme' msg_template = 'URL scheme not permitted' def __init__(self, allowed_schemes: Set[str]): super().__init__(allowed_schemes=allowed_schemes) class UrlUserInfoError(UrlError): code = 'url.userinfo' msg_template = 'userinfo required in URL but missing' class UrlHostError(UrlError): code = 'url.host' msg_template = 'URL host invalid' class UrlHostTldError(UrlError): code = 'url.host' msg_template = 'URL host invalid, top level domain required' class UrlExtraError(UrlError): code = 'url.extra' msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' class EnumError(PydanticTypeError): def __str__(self) -> str: permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore return f'value is not a valid enumeration member; permitted: {permitted}' class IntegerError(PydanticTypeError): msg_template = 'value is not a valid integer' class FloatError(PydanticTypeError): msg_template = 'value is not a valid float' class PathError(PydanticTypeError): msg_template = 'value is not a valid path' class _PathValueError(PydanticValueError): def __init__(self, *, path: Path) -> None: super().__init__(path=str(path)) class PathNotExistsError(_PathValueError): code = 'path.not_exists' msg_template = 'file or directory at path "{path}" does not exist' class PathNotAFileError(_PathValueError): code = 'path.not_a_file' msg_template = 'path "{path}" does not point to a file' class PathNotADirectoryError(_PathValueError): code = 'path.not_a_directory' msg_template = 'path "{path}" does not point to a directory' class PyObjectError(PydanticTypeError): msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' class SequenceError(PydanticTypeError): msg_template = 'value is not a valid sequence' class ListError(PydanticTypeError): msg_template = 'value is not a valid list' class SetError(PydanticTypeError): msg_template = 'value is not a valid set' class FrozenSetError(PydanticTypeError): msg_template = 'value is not a valid frozenset' class TupleError(PydanticTypeError): msg_template = 'value is not a valid tuple' class TupleLengthError(PydanticValueError): code = 'tuple.length' msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' def __init__(self, *, actual_length: int, expected_length: int) -> None: super().__init__(actual_length=actual_length, expected_length=expected_length) class ListMinLengthError(PydanticValueError): code = 'list.min_items' msg_template = 'ensure this value has at least {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class ListMaxLengthError(PydanticValueError): code = 'list.max_items' msg_template = 'ensure this value has at most {limit_value} items' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class AnyStrMinLengthError(PydanticValueError): code = 'any_str.min_length' msg_template = 'ensure this value has at least {limit_value} characters' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class AnyStrMaxLengthError(PydanticValueError): code = 'any_str.max_length' msg_template = 'ensure this value has at most {limit_value} characters' def __init__(self, *, limit_value: int) -> None: super().__init__(limit_value=limit_value) class StrError(PydanticTypeError): msg_template = 'str type expected' class StrRegexError(PydanticValueError): code = 'str.regex' msg_template = 'string does not match regex "{pattern}"' def __init__(self, *, pattern: str) -> None: super().__init__(pattern=pattern) class _NumberBoundError(PydanticValueError): def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: super().__init__(limit_value=limit_value) class NumberNotGtError(_NumberBoundError): code = 'number.not_gt' msg_template = 'ensure this value is greater than {limit_value}' class NumberNotGeError(_NumberBoundError): code = 'number.not_ge' msg_template = 'ensure this value is greater than or equal to {limit_value}' class NumberNotLtError(_NumberBoundError): code = 'number.not_lt' msg_template = 'ensure this value is less than {limit_value}' class NumberNotLeError(_NumberBoundError): code = 'number.not_le' msg_template = 'ensure this value is less than or equal to {limit_value}' class NumberNotMultipleError(PydanticValueError): code = 'number.not_multiple' msg_template = 'ensure this value is a multiple of {multiple_of}' def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: super().__init__(multiple_of=multiple_of) class DecimalError(PydanticTypeError): msg_template = 'value is not a valid decimal' class DecimalIsNotFiniteError(PydanticValueError): code = 'decimal.not_finite' msg_template = 'value is not a valid decimal' class DecimalMaxDigitsError(PydanticValueError): code = 'decimal.max_digits' msg_template = 'ensure that there are no more than {max_digits} digits in total' def __init__(self, *, max_digits: int) -> None: super().__init__(max_digits=max_digits) class DecimalMaxPlacesError(PydanticValueError): code = 'decimal.max_places' msg_template = 'ensure that there are no more than {decimal_places} decimal places' def __init__(self, *, decimal_places: int) -> None: super().__init__(decimal_places=decimal_places) class DecimalWholeDigitsError(PydanticValueError): code = 'decimal.whole_digits' msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' def __init__(self, *, whole_digits: int) -> None: super().__init__(whole_digits=whole_digits) class DateTimeError(PydanticValueError): msg_template = 'invalid datetime format' class DateError(PydanticValueError): msg_template = 'invalid date format' class TimeError(PydanticValueError): msg_template = 'invalid time format' class DurationError(PydanticValueError): msg_template = 'invalid duration format' class UUIDError(PydanticTypeError): msg_template = 'value is not a valid uuid' class UUIDVersionError(PydanticValueError): code = 'uuid.version' msg_template = 'uuid version {required_version} expected' def __init__(self, *, required_version: int) -> None: super().__init__(required_version=required_version) class ArbitraryTypeError(PydanticTypeError): code = 'arbitrary_type' msg_template = 'instance of {expected_arbitrary_type} expected' def __init__(self, *, expected_arbitrary_type: AnyType) -> None: super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) class ClassError(PydanticTypeError): code = 'class' msg_template = 'a class is expected' class SubclassError(PydanticTypeError): code = 'subclass' msg_template = 'subclass of {expected_class} expected' def __init__(self, *, expected_class: AnyType) -> None: super().__init__(expected_class=display_as_type(expected_class)) class JsonError(PydanticValueError): msg_template = 'Invalid JSON' class JsonTypeError(PydanticTypeError): code = 'json' msg_template = 'JSON object must be str, bytes or bytearray' class PatternError(PydanticValueError): code = 'regex_pattern' msg_template = 'Invalid regular expression' class DataclassTypeError(PydanticTypeError): code = 'dataclass' msg_template = 'instance of {class_name}, tuple or dict expected' class CallableError(PydanticTypeError): msg_template = '{value} is not callable' class IPvAnyAddressError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 address' class IPvAnyInterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 interface' class IPvAnyNetworkError(PydanticValueError): msg_template = 'value is not a valid IPv4 or IPv6 network' class IPv4AddressError(PydanticValueError): msg_template = 'value is not a valid IPv4 address' class IPv6AddressError(PydanticValueError): msg_template = 'value is not a valid IPv6 address' class IPv4NetworkError(PydanticValueError): msg_template = 'value is not a valid IPv4 network' class IPv6NetworkError(PydanticValueError): msg_template = 'value is not a valid IPv6 network' class IPv4InterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv4 interface' class IPv6InterfaceError(PydanticValueError): msg_template = 'value is not a valid IPv6 interface' class ColorError(PydanticValueError): msg_template = 'value is not a valid color: {reason}' class StrictBoolError(PydanticValueError): msg_template = 'value is not a valid boolean' class NotDigitError(PydanticValueError): code = 'payment_card_number.digits' msg_template = 'card number is not all digits' class LuhnValidationError(PydanticValueError): code = 'payment_card_number.luhn_check' msg_template = 'card number is not luhn valid' class InvalidLengthForBrand(PydanticValueError): code = 'payment_card_number.invalid_length_for_brand' msg_template = 'Length for a {brand} card must be {required_length}' class InvalidByteSize(PydanticValueError): msg_template = 'could not parse value and unit from byte string' class InvalidByteSizeUnit(PydanticValueError): msg_template = 'could not interpret byte unit: {unit}' pydantic-1.2/pydantic/fields.py000066400000000000000000000574601357000400300166260ustar00rootroot00000000000000import warnings from typing import ( TYPE_CHECKING, Any, Dict, FrozenSet, Generator, Iterator, List, Mapping, Optional, Pattern, Sequence, Set, Tuple, Type, TypeVar, Union, cast, ) from . import errors as errors_ from .class_validators import Validator, make_generic_validator, prep_validators from .error_wrappers import ErrorWrapper from .errors import NoneIsNotAllowedError from .types import Json, JsonWrapper from .typing import AnyType, Callable, ForwardRef, NoneType, display_as_type, is_literal_type from .utils import PyObjectStr, Representation, lenient_issubclass, sequence_like from .validators import constant_validator, dict_validator, find_validators, validate_json Required: Any = Ellipsis class UndefinedType: def __repr__(self) -> str: return 'PydanticUndefined' Undefined = UndefinedType() if TYPE_CHECKING: from .class_validators import ValidatorsList # noqa: F401 from .error_wrappers import ErrorList from .main import BaseConfig, BaseModel # noqa: F401 from .types import ModelOrDc # noqa: F401 from .typing import ReprArgs # noqa: F401 ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] LocStr = Union[Tuple[Union[int, str], ...], str] BoolUndefined = Union[bool, UndefinedType] class FieldInfo(Representation): """ Captures extra information about a field. """ __slots__ = ( 'default', 'alias', 'title', 'description', 'const', 'gt', 'ge', 'lt', 'le', 'multiple_of', 'min_items', 'max_items', 'min_length', 'max_length', 'regex', 'extra', ) def __init__(self, default: Any, **kwargs: Any) -> None: self.default = default self.alias = kwargs.pop('alias', None) self.title = kwargs.pop('title', None) self.description = kwargs.pop('description', None) self.const = kwargs.pop('const', None) self.gt = kwargs.pop('gt', None) self.ge = kwargs.pop('ge', None) self.lt = kwargs.pop('lt', None) self.le = kwargs.pop('le', None) self.multiple_of = kwargs.pop('multiple_of', None) self.min_items = kwargs.pop('min_items', None) self.max_items = kwargs.pop('max_items', None) self.min_length = kwargs.pop('min_length', None) self.max_length = kwargs.pop('max_length', None) self.regex = kwargs.pop('regex', None) self.extra = kwargs def Field( default: Any, *, alias: str = None, title: str = None, description: str = None, const: bool = None, gt: float = None, ge: float = None, lt: float = None, le: float = None, multiple_of: float = None, min_items: int = None, max_items: int = None, min_length: int = None, max_length: int = None, regex: str = None, **extra: Any, ) -> Any: """ Used to provide extra information about a field, either for the model schema or complex valiation. Some arguments apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. :param default: since this is replacing the field’s default, its first argument is used to set the default, use ellipsis (``...``) to indicate the field is required :param alias: the public name of the field :param title: can be any string, used in the schema :param description: can be any string, used in the schema :param const: this field is required and *must* take it's default value :param gt: only applies to numbers, requires the field to be "greater than". The schema will have an ``exclusiveMinimum`` validation keyword :param ge: only applies to numbers, requires the field to be "greater than or equal to". The schema will have a ``minimum`` validation keyword :param lt: only applies to numbers, requires the field to be "less than". The schema will have an ``exclusiveMaximum`` validation keyword :param le: only applies to numbers, requires the field to be "less than or equal to". The schema will have a ``maximum`` validation keyword :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The schema will have a ``multipleOf`` validation keyword :param min_length: only applies to strings, requires the field to have a minimum length. The schema will have a ``maximum`` validation keyword :param max_length: only applies to strings, requires the field to have a maximum length. The schema will have a ``maxLength`` validation keyword :param regex: only applies to strings, requires the field match agains a regular expression pattern string. The schema will have a ``pattern`` validation keyword :param **extra: any additional keyword arguments will be added as is to the schema """ return FieldInfo( default, alias=alias, title=title, description=description, const=const, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, min_items=min_items, max_items=max_items, min_length=min_length, max_length=max_length, regex=regex, **extra, ) def Schema(default: Any, **kwargs: Any) -> Any: warnings.warn('`Schema` is deprecated, use `Field` instead', DeprecationWarning) return Field(default, **kwargs) # used to be an enum but changed to int's for small performance improvement as less access overhead SHAPE_SINGLETON = 1 SHAPE_LIST = 2 SHAPE_SET = 3 SHAPE_MAPPING = 4 SHAPE_TUPLE = 5 SHAPE_TUPLE_ELLIPSIS = 6 SHAPE_SEQUENCE = 7 SHAPE_FROZENSET = 8 SHAPE_NAME_LOOKUP = { SHAPE_LIST: 'List[{}]', SHAPE_SET: 'Set[{}]', SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', SHAPE_SEQUENCE: 'Sequence[{}]', SHAPE_FROZENSET: 'FrozenSet[{}]', } class ModelField(Representation): __slots__ = ( 'type_', 'sub_fields', 'key_field', 'validators', 'pre_validators', 'post_validators', 'default', 'required', 'model_config', 'name', 'alias', 'has_alias', 'field_info', 'validate_always', 'allow_none', 'shape', 'class_validators', 'parse_json', ) def __init__( self, *, name: str, type_: AnyType, class_validators: Optional[Dict[str, Validator]], model_config: Type['BaseConfig'], default: Any = None, required: 'BoolUndefined' = Undefined, alias: str = None, field_info: Optional[FieldInfo] = None, ) -> None: self.name: str = name self.has_alias: bool = bool(alias) self.alias: str = alias or name self.type_: Any = type_ self.class_validators = class_validators or {} self.default: Any = default self.required: 'BoolUndefined' = required self.model_config = model_config self.field_info: FieldInfo = field_info or FieldInfo(default) self.allow_none: bool = False self.validate_always: bool = False self.sub_fields: Optional[List[ModelField]] = None self.key_field: Optional[ModelField] = None self.validators: 'ValidatorsList' = [] self.pre_validators: Optional['ValidatorsList'] = None self.post_validators: Optional['ValidatorsList'] = None self.parse_json: bool = False self.shape: int = SHAPE_SINGLETON self.model_config.prepare_field(self) self.prepare() @classmethod def infer( cls, *, name: str, value: Any, annotation: Any, class_validators: Optional[Dict[str, Validator]], config: Type['BaseConfig'], ) -> 'ModelField': field_info_from_config = config.get_field_info(name) from .schema import get_annotation_from_field_info if isinstance(value, FieldInfo): field_info = value value = field_info.default else: field_info = FieldInfo(value, **field_info_from_config) required: 'BoolUndefined' = Undefined if value is Required: required = True value = None elif value is not Undefined: required = False field_info.alias = field_info.alias or field_info_from_config.get('alias') annotation = get_annotation_from_field_info(annotation, field_info, name) return cls( name=name, type_=annotation, alias=field_info.alias, class_validators=class_validators, default=value, required=required, model_config=config, field_info=field_info, ) def set_config(self, config: Type['BaseConfig']) -> None: self.model_config = config info_from_config = config.get_field_info(self.name) if info_from_config: self.field_info.alias = info_from_config.get('alias') or self.field_info.alias or self.name self.alias = cast(str, self.field_info.alias) @property def alt_alias(self) -> bool: return self.name != self.alias def prepare(self) -> None: """ Prepare the field but inspecting self.default, self.type_ etc. Note: this method is **not** idempotent (because _type_analysis is not idempotent), e.g. calling it it multiple times may modify the field and configure it incorrectly. """ if self.default is not None and self.type_ is None: self.type_ = type(self.default) if self.type_ is None: raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') if type(self.type_) == ForwardRef: # self.type_ is currently a ForwardRef and there's nothing we can do now, # user will need to call model.update_forward_refs() return self.validate_always = getattr(self.type_, 'validate_always', False) or any( v.always for v in self.class_validators.values() ) if self.required is False and self.default is None: self.allow_none = True self._type_analysis() if self.required is Undefined: self.required = True self.field_info.default = Required if self.default is Undefined: self.default = None self.populate_validators() def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) # typing interface is horrible, we have to do some ugly checks if lenient_issubclass(self.type_, JsonWrapper): self.type_ = self.type_.inner_type self.parse_json = True elif lenient_issubclass(self.type_, Json): self.type_ = Any self.parse_json = True elif isinstance(self.type_, TypeVar): # type: ignore if self.type_.__bound__: self.type_ = self.type_.__bound__ elif self.type_.__constraints__: self.type_ = Union[self.type_.__constraints__] else: self.type_ = Any if self.type_ is Any: if self.required is Undefined: self.required = False self.allow_none = True return elif self.type_ is Pattern: # python 3.7 only, Pattern is a typing object but without sub fields return elif is_literal_type(self.type_): return origin = getattr(self.type_, '__origin__', None) if origin is None: # field is not "typing" object eg. Union, Dict, List etc. return if origin is Callable: return if origin is Union: types_ = [] for type_ in self.type_.__args__: if type_ is NoneType: # type: ignore if self.required is Undefined: self.required = False self.allow_none = True continue types_.append(type_) if len(types_) == 1: self.type_ = types_[0] # re-run to correctly interpret the new self.type_ self._type_analysis() else: self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] return if issubclass(origin, Tuple): # type: ignore self.shape = SHAPE_TUPLE self.sub_fields = [] for i, t in enumerate(self.type_.__args__): if t is Ellipsis: self.type_ = self.type_.__args__[0] self.shape = SHAPE_TUPLE_ELLIPSIS return self.sub_fields.append(self._create_sub_type(t, f'{self.name}_{i}')) return if issubclass(origin, List): # Create self validators get_validators = getattr(self.type_, '__get_validators__', None) if get_validators: self.class_validators.update( { f'list_{i}': Validator(validator, pre=True, always=True) for i, validator in enumerate(get_validators()) } ) self.type_ = self.type_.__args__[0] self.shape = SHAPE_LIST elif issubclass(origin, Set): self.type_ = self.type_.__args__[0] self.shape = SHAPE_SET elif issubclass(origin, FrozenSet): self.type_ = self.type_.__args__[0] self.shape = SHAPE_FROZENSET elif issubclass(origin, Sequence): self.type_ = self.type_.__args__[0] self.shape = SHAPE_SEQUENCE elif issubclass(origin, Mapping): self.key_field = self._create_sub_type(self.type_.__args__[0], 'key_' + self.name, for_keys=True) self.type_ = self.type_.__args__[1] self.shape = SHAPE_MAPPING elif issubclass(origin, Type): # type: ignore return else: raise TypeError(f'Fields of type "{origin}" are not supported.') # type_ has been refined eg. as the type of a List and sub_fields needs to be populated self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] def _create_sub_type(self, type_: AnyType, name: str, *, for_keys: bool = False) -> 'ModelField': return self.__class__( type_=type_, name=name, class_validators=None if for_keys else {k: v for k, v in self.class_validators.items() if v.each_item}, model_config=self.model_config, ) def populate_validators(self) -> None: """ Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ and class validators. This method should be idempotent, e.g. it should be safe to call multiple times without mis-configuring the field. """ class_validators_ = self.class_validators.values() if not self.sub_fields: get_validators = getattr(self.type_, '__get_validators__', None) v_funcs = ( *[v.func for v in class_validators_ if v.each_item and v.pre], *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), *[v.func for v in class_validators_ if v.each_item and not v.pre], ) self.validators = prep_validators(v_funcs) # Add const validator self.pre_validators = [] self.post_validators = [] if self.field_info and self.field_info.const: self.pre_validators = [make_generic_validator(constant_validator)] if class_validators_: self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) self.post_validators = prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) if self.parse_json: self.pre_validators.append(make_generic_validator(validate_json)) self.pre_validators = self.pre_validators or None self.post_validators = self.post_validators or None def validate( self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None ) -> 'ValidateReturn': errors: Optional['ErrorList'] if self.pre_validators: v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) if errors: return v, errors if v is None: if self.allow_none: if self.post_validators: return self._apply_validators(v, values, loc, cls, self.post_validators) else: return None, None else: return v, ErrorWrapper(NoneIsNotAllowedError(), loc) if self.shape == SHAPE_SINGLETON: v, errors = self._validate_singleton(v, values, loc, cls) elif self.shape == SHAPE_MAPPING: v, errors = self._validate_mapping(v, values, loc, cls) elif self.shape == SHAPE_TUPLE: v, errors = self._validate_tuple(v, values, loc, cls) else: # sequence, list, set, generator, tuple with ellipsis, frozen set v, errors = self._validate_sequence_like(v, values, loc, cls) if not errors and self.post_validators: v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) return v, errors def _validate_sequence_like( # noqa: C901 (ignore complexity) self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': """ Validate sequence-like containers: lists, tuples, sets and generators Note that large if-else blocks are necessary to enable Cython optimization, which is why we disable the complexity check above. """ if not sequence_like(v): e: errors_.PydanticTypeError if self.shape == SHAPE_LIST: e = errors_.ListError() elif self.shape == SHAPE_SET: e = errors_.SetError() elif self.shape == SHAPE_FROZENSET: e = errors_.FrozenSetError() else: e = errors_.SequenceError() return v, ErrorWrapper(e, loc) loc = loc if isinstance(loc, tuple) else (loc,) result = [] errors: List[ErrorList] = [] for i, v_ in enumerate(v): v_loc = *loc, i r, ee = self._validate_singleton(v_, values, v_loc, cls) if ee: errors.append(ee) else: result.append(r) if errors: return v, errors converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any]] = result if self.shape == SHAPE_SET: converted = set(result) elif self.shape == SHAPE_FROZENSET: converted = frozenset(result) elif self.shape == SHAPE_TUPLE_ELLIPSIS: converted = tuple(result) elif self.shape == SHAPE_SEQUENCE: if isinstance(v, tuple): converted = tuple(result) elif isinstance(v, set): converted = set(result) elif isinstance(v, Generator): converted = iter(result) return converted, None def _validate_tuple( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': e: Optional[Exception] = None if not sequence_like(v): e = errors_.TupleError() else: actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore if actual_length != expected_length: e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) if e: return v, ErrorWrapper(e, loc) loc = loc if isinstance(loc, tuple) else (loc,) result = [] errors: List[ErrorList] = [] for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore v_loc = *loc, i r, ee = field.validate(v_, values, loc=v_loc, cls=cls) if ee: errors.append(ee) else: result.append(r) if errors: return v, errors else: return tuple(result), None def _validate_mapping( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': try: v_iter = dict_validator(v) except TypeError as exc: return v, ErrorWrapper(exc, loc) loc = loc if isinstance(loc, tuple) else (loc,) result, errors = {}, [] for k, v_ in v_iter.items(): v_loc = *loc, '__key__' key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore if key_errors: errors.append(key_errors) continue v_loc = *loc, k value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) if value_errors: errors.append(value_errors) continue result[key_result] = value_result if errors: return v, errors else: return result, None def _validate_singleton( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] ) -> 'ValidateReturn': if self.sub_fields: errors = [] for field in self.sub_fields: value, error = field.validate(v, values, loc=loc, cls=cls) if error: errors.append(error) else: return value, None return v, errors else: return self._apply_validators(v, values, loc, cls, self.validators) def _apply_validators( self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' ) -> 'ValidateReturn': for validator in validators: try: v = validator(cls, v, values, self, self.model_config) except (ValueError, TypeError, AssertionError) as exc: return v, ErrorWrapper(exc, loc) return v, None def include_in_schema(self) -> bool: """ False if this is a simple field just allowing None as used in Unions/Optional. """ return self.type_ != NoneType # type: ignore def is_complex(self) -> bool: """ Whether the field is "complex" eg. env variables should be parsed as JSON. """ from .main import BaseModel # noqa: F811 return ( self.shape != SHAPE_SINGLETON or lenient_issubclass(self.type_, (BaseModel, list, set, dict)) or hasattr(self.type_, '__pydantic_model__') # pydantic dataclass ) def _type_display(self) -> PyObjectStr: t = display_as_type(self.type_) if self.shape == SHAPE_MAPPING: t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore elif self.shape == SHAPE_TUPLE: t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore elif self.shape != SHAPE_SINGLETON: t = SHAPE_NAME_LOOKUP[self.shape].format(t) if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): t = f'Optional[{t}]' return PyObjectStr(t) def __repr_args__(self) -> 'ReprArgs': args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] if not self.required: args.append(('default', self.default)) if self.alt_alias: args.append(('alias', self.alias)) return args pydantic-1.2/pydantic/generics.py000066400000000000000000000075631357000400300171560ustar00rootroot00000000000000from typing import Any, ClassVar, Dict, Generic, Tuple, Type, TypeVar, Union, get_type_hints from .class_validators import gather_all_validators from .main import BaseModel, create_model _generic_types_cache: Dict[Tuple[Type[Any], Union[Any, Tuple[Any, ...]]], Type[BaseModel]] = {} GenericModelT = TypeVar('GenericModelT', bound='GenericModel') class GenericModel(BaseModel): __slots__ = () __concrete__: ClassVar[bool] = False def __new__(cls, *args: Any, **kwargs: Any) -> Any: if cls.__concrete__: return super().__new__(cls) else: raise TypeError(f'Type {cls.__name__} cannot be used without generic parameters, e.g. {cls.__name__}[T]') # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: cached = _generic_types_cache.get((cls, params)) if cached is not None: return cached if cls.__concrete__: raise TypeError('Cannot parameterize a concrete instantiation of a generic model') if not isinstance(params, tuple): params = (params,) if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): # type: ignore raise TypeError(f'Type parameters should be placed on typing.Generic, not GenericModel') if Generic not in cls.__bases__: raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') check_parameters_count(cls, params) typevars_map: Dict[Any, Any] = dict(zip(cls.__parameters__, params)) # type: ignore type_hints = get_type_hints(cls).items() instance_type_hints = {k: v for k, v in type_hints if getattr(v, '__origin__', None) is not ClassVar} concrete_type_hints: Dict[str, Type[Any]] = { k: resolve_type_hint(v, typevars_map) for k, v in instance_type_hints.items() } model_name = cls.__concrete_name__(params) validators = gather_all_validators(cls) fields: Dict[str, Tuple[Type[Any], Any]] = { k: (v, cls.__fields__[k].field_info) for k, v in concrete_type_hints.items() if k in cls.__fields__ } created_model = create_model( model_name=model_name, __module__=cls.__module__, __base__=cls, __config__=None, __validators__=validators, **fields, ) created_model.Config = cls.Config created_model.__concrete__ = True # type: ignore _generic_types_cache[(cls, params)] = created_model if len(params) == 1: _generic_types_cache[(cls, params[0])] = created_model return created_model @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: """ This method can be overridden to achieve a custom naming scheme for GenericModels """ param_names = [param.__name__ if hasattr(param, '__name__') else str(param) for param in params] params_component = ', '.join(param_names) return f'{cls.__name__}[{params_component}]' def resolve_type_hint(type_: Any, typevars_map: Dict[Any, Any]) -> Type[Any]: if hasattr(type_, '__origin__') and getattr(type_, '__parameters__', None): concrete_type_args = tuple([typevars_map[x] for x in type_.__parameters__]) return type_[concrete_type_args] return typevars_map.get(type_, type_) def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: actual = len(parameters) expected = len(cls.__parameters__) # type: ignore if actual != expected: description = 'many' if actual > expected else 'few' raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') pydantic-1.2/pydantic/json.py000066400000000000000000000041451357000400300163210ustar00rootroot00000000000000import datetime from dataclasses import asdict, is_dataclass from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from types import GeneratorType from typing import Any, Callable, Dict, Type, Union from uuid import UUID from .color import Color from .types import SecretBytes, SecretStr __all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' def isoformat(o: Union[datetime.date, datetime.time]) -> str: return o.isoformat() ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { Color: str, IPv4Address: str, IPv6Address: str, IPv4Interface: str, IPv6Interface: str, IPv4Network: str, IPv6Network: str, SecretStr: str, SecretBytes: str, UUID: str, datetime.datetime: isoformat, datetime.date: isoformat, datetime.time: isoformat, datetime.timedelta: lambda td: td.total_seconds(), set: list, frozenset: list, GeneratorType: list, bytes: lambda o: o.decode(), Decimal: float, } def pydantic_encoder(obj: Any) -> Any: from .main import BaseModel if isinstance(obj, BaseModel): return obj.dict() elif isinstance(obj, Enum): return obj.value elif isinstance(obj, Path): return str(obj) elif is_dataclass(obj): return asdict(obj) try: encoder = ENCODERS_BY_TYPE[type(obj)] except KeyError: raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") else: return encoder(obj) def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: encoder = type_encoders.get(type(obj)) if encoder: return encoder(obj) else: return pydantic_encoder(obj) def timedelta_isoformat(td: datetime.timedelta) -> str: """ ISO 8601 encoding for timedeltas. """ minutes, seconds = divmod(td.seconds, 60) hours, minutes = divmod(minutes, 60) return f'P{td.days}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' pydantic-1.2/pydantic/main.py000066400000000000000000001010411357000400300162650ustar00rootroot00000000000000import json import sys import warnings from abc import ABCMeta from copy import deepcopy from enum import Enum from functools import partial from pathlib import Path from types import FunctionType from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast, no_type_check from .class_validators import ROOT_KEY, ValidatorGroup, extract_root_validators, extract_validators, inherit_validators from .error_wrappers import ErrorWrapper, ValidationError from .errors import ConfigError, DictError, ExtraError, MissingError from .fields import SHAPE_MAPPING, ModelField, Undefined from .json import custom_pydantic_encoder, pydantic_encoder from .parse import Protocol, load_file, load_str_bytes from .schema import model_schema from .types import PyObject, StrBytes from .typing import AnyCallable, AnyType, ForwardRef, is_classvar, resolve_annotations, update_field_forward_refs from .utils import GetterDict, Representation, ValueItems, lenient_issubclass, validate_field_name if TYPE_CHECKING: from .class_validators import ValidatorListDict from .types import ModelOrDc from .typing import CallableGenerator, TupleGenerator, DictStrAny, DictAny, SetStr from .typing import AbstractSetIntStr, DictIntStrAny, ReprArgs # noqa: F401 ConfigType = Type['BaseConfig'] Model = TypeVar('Model', bound='BaseModel') try: import cython # type: ignore except ImportError: compiled: bool = False else: # pragma: no cover try: compiled = cython.compiled except AttributeError: compiled = False __all__ = 'BaseConfig', 'BaseModel', 'Extra', 'compiled', 'create_model', 'validate_model' class Extra(str, Enum): allow = 'allow' ignore = 'ignore' forbid = 'forbid' class BaseConfig: title = None anystr_strip_whitespace = False min_anystr_length = None max_anystr_length = None validate_all = False extra = Extra.ignore allow_mutation = True allow_population_by_field_name = False use_enum_values = False fields: Dict[str, Union[str, Dict[str, str]]] = {} validate_assignment = False error_msg_templates: Dict[str, str] = {} arbitrary_types_allowed = False orm_mode: bool = False getter_dict: Type[GetterDict] = GetterDict alias_generator: Optional[Callable[[str], str]] = None keep_untouched: Tuple[type, ...] = () schema_extra: Dict[str, Any] = {} json_loads: Callable[[str], Any] = json.loads json_dumps: Callable[..., str] = json.dumps json_encoders: Dict[AnyType, AnyCallable] = {} @classmethod def get_field_info(cls, name: str) -> Dict[str, Any]: field_info = cls.fields.get(name) or {} if isinstance(field_info, str): field_info = {'alias': field_info} elif cls.alias_generator and 'alias' not in field_info: alias = cls.alias_generator(name) if not isinstance(alias, str): raise TypeError(f'Config.alias_generator must return str, not {type(alias)}') field_info['alias'] = alias return field_info @classmethod def prepare_field(cls, field: 'ModelField') -> None: """ Optional hook to check or modify fields during model creation. """ pass def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType') -> 'ConfigType': if not self_config: base_classes = (parent_config,) elif self_config == parent_config: base_classes = (self_config,) else: base_classes = self_config, parent_config # type: ignore return type('Config', base_classes, {}) EXTRA_LINK = 'https://pydantic-docs.helpmanual.io/usage/model_config/' def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: if not isinstance(config.extra, Extra): try: config.extra = Extra(config.extra) except ValueError: raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') if hasattr(config, 'allow_population_by_alias'): warnings.warn( f'{cls_name}: "allow_population_by_alias" is deprecated and replaced by "allow_population_by_field_name"', DeprecationWarning, ) config.allow_population_by_field_name = config.allow_population_by_alias # type: ignore if hasattr(config, 'case_insensitive') and any('BaseSettings.Config' in c.__qualname__ for c in config.__mro__): warnings.warn( f'{cls_name}: "case_insensitive" is deprecated on BaseSettings config and replaced by ' f'"case_sensitive" (default False)', DeprecationWarning, ) config.case_sensitive = not config.case_insensitive # type: ignore def is_valid_field(name: str) -> bool: if not name.startswith('_'): return True return ROOT_KEY == name def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: if len(fields) > 1: raise ValueError('__root__ cannot be mixed with other fields') UNTOUCHED_TYPES = FunctionType, property, type, classmethod, staticmethod class ModelMetaclass(ABCMeta): @no_type_check # noqa C901 def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 fields: Dict[str, ModelField] = {} config = BaseConfig validators: 'ValidatorListDict' = {} pre_root_validators, post_root_validators = [], [] for base in reversed(bases): if issubclass(base, BaseModel) and base != BaseModel: fields.update(deepcopy(base.__fields__)) config = inherit_config(base.__config__, config) validators = inherit_validators(base.__validators__, validators) pre_root_validators += base.__pre_root_validators__ post_root_validators += base.__post_root_validators__ config = inherit_config(namespace.get('Config'), config) validators = inherit_validators(extract_validators(namespace), validators) vg = ValidatorGroup(validators) for f in fields.values(): f.set_config(config) extra_validators = vg.get_validators(f.name) if extra_validators: f.class_validators.update(extra_validators) # re-run prepare to add extra validators f.populate_validators() prepare_config(config, name) class_vars = set() if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None)) untouched_types = UNTOUCHED_TYPES + config.keep_untouched # annotation only fields need to come first in fields for ann_name, ann_type in annotations.items(): if is_classvar(ann_type): class_vars.add(ann_name) elif is_valid_field(ann_name): validate_field_name(bases, ann_name) value = namespace.get(ann_name, Undefined) if ( isinstance(value, untouched_types) and ann_type != PyObject and not lenient_issubclass(getattr(ann_type, '__origin__', None), Type) ): continue fields[ann_name] = ModelField.infer( name=ann_name, value=value, annotation=ann_type, class_validators=vg.get_validators(ann_name), config=config, ) for var_name, value in namespace.items(): if ( var_name not in annotations and is_valid_field(var_name) and not isinstance(value, untouched_types) and var_name not in class_vars ): validate_field_name(bases, var_name) inferred = ModelField.infer( name=var_name, value=value, annotation=annotations.get(var_name), class_validators=vg.get_validators(var_name), config=config, ) if var_name in fields and inferred.type_ != fields[var_name].type_: raise TypeError( f'The type of {name}.{var_name} differs from the new default value; ' f'if you wish to change the type of this field, please use a type annotation' ) fields[var_name] = inferred _custom_root_type = ROOT_KEY in fields if _custom_root_type: validate_custom_root_type(fields) vg.check_for_unused() if config.json_encoders: json_encoder = partial(custom_pydantic_encoder, config.json_encoders) else: json_encoder = pydantic_encoder pre_rv_new, post_rv_new = extract_root_validators(namespace) new_namespace = { '__config__': config, '__fields__': fields, '__field_defaults__': {n: f.default for n, f in fields.items() if not f.required}, '__validators__': vg.validators, '__pre_root_validators__': pre_root_validators + pre_rv_new, '__post_root_validators__': post_root_validators + post_rv_new, '__schema_cache__': {}, '__json_encoder__': staticmethod(json_encoder), '__custom_root_type__': _custom_root_type, **{n: v for n, v in namespace.items() if n not in fields}, } return super().__new__(mcs, name, bases, new_namespace, **kwargs) class BaseModel(metaclass=ModelMetaclass): if TYPE_CHECKING: # populated by the metaclass, defined here to help IDEs only __fields__: Dict[str, ModelField] = {} __field_defaults__: Dict[str, Any] = {} __validators__: Dict[str, AnyCallable] = {} __pre_root_validators__: List[AnyCallable] __post_root_validators__: List[AnyCallable] __config__: Type[BaseConfig] = BaseConfig __root__: Any = None __json_encoder__: Callable[[Any], Any] = lambda x: x __schema_cache__: 'DictAny' = {} __custom_root_type__: bool = False Config = BaseConfig __slots__ = ('__dict__', '__fields_set__') # equivalent of inheriting from Representation __repr_name__ = Representation.__repr_name__ __repr_str__ = Representation.__repr_str__ __pretty__ = Representation.__pretty__ __str__ = Representation.__str__ __repr__ = Representation.__repr__ def __init__(__pydantic_self__, **data: Any) -> None: # Uses something other than `self` the first arg to allow "self" as a settable attribute if TYPE_CHECKING: __pydantic_self__.__dict__: Dict[str, Any] = {} __pydantic_self__.__fields_set__: 'SetStr' = set() values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) if validation_error: raise validation_error object.__setattr__(__pydantic_self__, '__dict__', values) object.__setattr__(__pydantic_self__, '__fields_set__', fields_set) @no_type_check def __setattr__(self, name, value): if self.__config__.extra is not Extra.allow and name not in self.__fields__: raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') elif not self.__config__.allow_mutation: raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') elif self.__config__.validate_assignment: known_field = self.__fields__.get(name, None) if known_field: value, error_ = known_field.validate(value, self.dict(exclude={name}), loc=name) if error_: raise ValidationError([error_], type(self)) self.__dict__[name] = value self.__fields_set__.add(name) def __getstate__(self) -> 'DictAny': return {'__dict__': self.__dict__, '__fields_set__': self.__fields_set__} def __setstate__(self, state: 'DictAny') -> None: object.__setattr__(self, '__dict__', state['__dict__']) object.__setattr__(self, '__fields_set__', state['__fields_set__']) def dict( self, *, include: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, exclude: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, by_alias: bool = False, skip_defaults: bool = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, ) -> 'DictStrAny': """ Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. """ if skip_defaults is not None: warnings.warn( f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', DeprecationWarning, ) exclude_unset = skip_defaults get_key = self._get_key_factory(by_alias) get_key = partial(get_key, self.__fields__) allowed_keys = self._calculate_keys(include=include, exclude=exclude, exclude_unset=exclude_unset) return { get_key(k): v for k, v in self._iter( to_dict=True, by_alias=by_alias, allowed_keys=allowed_keys, include=include, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) } def _get_key_factory(self, by_alias: bool) -> Callable[..., str]: if by_alias: return lambda fields, key: fields[key].alias if key in fields else key return lambda _, key: key def json( self, *, include: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, exclude: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, by_alias: bool = False, skip_defaults: bool = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, encoder: Optional[Callable[[Any], Any]] = None, **dumps_kwargs: Any, ) -> str: """ Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. """ if skip_defaults is not None: warnings.warn( f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', DeprecationWarning, ) exclude_unset = skip_defaults encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) data = self.dict( include=include, exclude=exclude, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) if self.__custom_root_type__: data = data[ROOT_KEY] return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) @classmethod def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': if cls.__custom_root_type__ and ( not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) or cls.__fields__[ROOT_KEY].shape == SHAPE_MAPPING ): obj = {ROOT_KEY: obj} elif not isinstance(obj, dict): try: obj = dict(obj) except (TypeError, ValueError) as e: exc = TypeError(f'{cls.__name__} expected dict not {type(obj).__name__}') raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e return cls(**obj) @classmethod def parse_raw( cls: Type['Model'], b: StrBytes, *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, ) -> 'Model': try: obj = load_str_bytes( b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=cls.__config__.json_loads, ) except (ValueError, TypeError, UnicodeDecodeError) as e: raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) return cls.parse_obj(obj) @classmethod def parse_file( cls: Type['Model'], path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, ) -> 'Model': obj = load_file(path, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle) return cls.parse_obj(obj) @classmethod def from_orm(cls: Type['Model'], obj: Any) -> 'Model': if not cls.__config__.orm_mode: raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') obj = cls._decompose_class(obj) m = cls.__new__(cls) values, fields_set, validation_error = validate_model(cls, obj) if validation_error: raise validation_error object.__setattr__(m, '__dict__', values) object.__setattr__(m, '__fields_set__', fields_set) return m @classmethod def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': """ Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. """ m = cls.__new__(cls) object.__setattr__(m, '__dict__', {**deepcopy(cls.__field_defaults__), **values}) if _fields_set is None: _fields_set = set(values.keys()) object.__setattr__(m, '__fields_set__', _fields_set) return m def copy( self: 'Model', *, include: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, exclude: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, update: 'DictStrAny' = None, deep: bool = False, ) -> 'Model': """ Duplicate a model, optionally choose which fields to include, exclude and change. :param include: fields to include in new model :param exclude: fields to exclude from new model, as with values this takes precedence over include :param update: values to change/add in the new model. Note: the data is not validated before creating the new model: you should trust this data :param deep: set to `True` to make a deep copy of the model :return: new model instance """ if include is None and exclude is None and update is None: # skip constructing values if no arguments are passed v = self.__dict__ else: allowed_keys = self._calculate_keys(include=include, exclude=exclude, exclude_unset=False, update=update) if allowed_keys is None: v = {**self.__dict__, **(update or {})} else: v = { **dict( self._iter( to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False, allowed_keys=allowed_keys, ) ), **(update or {}), } if deep: v = deepcopy(v) cls = self.__class__ m = cls.__new__(cls) object.__setattr__(m, '__dict__', v) object.__setattr__(m, '__fields_set__', self.__fields_set__.copy()) return m @classmethod def schema(cls, by_alias: bool = True) -> 'DictStrAny': cached = cls.__schema_cache__.get(by_alias) if cached is not None: return cached s = model_schema(cls, by_alias=by_alias) cls.__schema_cache__[by_alias] = s return s @classmethod def schema_json(cls, *, by_alias: bool = True, **dumps_kwargs: Any) -> str: from .json import pydantic_encoder return cls.__config__.json_dumps(cls.schema(by_alias=by_alias), default=pydantic_encoder, **dumps_kwargs) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls: Type['Model'], value: Any) -> 'Model': if isinstance(value, dict): return cls(**value) elif isinstance(value, cls): return value.copy() elif cls.__config__.orm_mode: return cls.from_orm(value) else: try: value_as_dict = dict(value) except (TypeError, ValueError) as e: raise DictError() from e return cls(**value_as_dict) @classmethod def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: return cls.__config__.getter_dict(obj) @classmethod @no_type_check def _get_value( cls, v: Any, to_dict: bool, by_alias: bool, include: Optional[Union['AbstractSetIntStr', 'DictIntStrAny']], exclude: Optional[Union['AbstractSetIntStr', 'DictIntStrAny']], exclude_unset: bool, exclude_defaults: bool, exclude_none: bool, ) -> Any: if isinstance(v, BaseModel): if to_dict: return v.dict( by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=include, exclude=exclude, exclude_none=exclude_none, ) else: return v.copy(include=include, exclude=exclude) value_exclude = ValueItems(v, exclude) if exclude else None value_include = ValueItems(v, include) if include else None if isinstance(v, dict): return { k_: cls._get_value( v_, to_dict=to_dict, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=value_include and value_include.for_element(k_), exclude=value_exclude and value_exclude.for_element(k_), exclude_none=exclude_none, ) for k_, v_ in v.items() if (not value_exclude or not value_exclude.is_excluded(k_)) and (not value_include or value_include.is_included(k_)) } elif isinstance(v, (list, set, tuple)): return type(v)( cls._get_value( v_, to_dict=to_dict, by_alias=by_alias, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, include=value_include and value_include.for_element(i), exclude=value_exclude and value_exclude.for_element(i), exclude_none=exclude_none, ) for i, v_ in enumerate(v) if (not value_exclude or not value_exclude.is_excluded(i)) and (not value_include or value_include.is_included(i)) ) else: return v @classmethod def update_forward_refs(cls, **localns: Any) -> None: """ Try to update ForwardRefs on fields based on this Model, globalns and localns. """ globalns = sys.modules[cls.__module__].__dict__ globalns.setdefault(cls.__name__, cls) for f in cls.__fields__.values(): update_field_forward_refs(f, globalns=globalns, localns=localns) def __iter__(self) -> 'TupleGenerator': """ so `dict(model)` works """ yield from self._iter() def _iter( self, to_dict: bool = False, by_alias: bool = False, allowed_keys: Optional['SetStr'] = None, include: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, exclude: Union['AbstractSetIntStr', 'DictIntStrAny'] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, ) -> 'TupleGenerator': value_exclude = ValueItems(self, exclude) if exclude else None value_include = ValueItems(self, include) if include else None if exclude_defaults: if allowed_keys is None: allowed_keys = set(self.__fields__) for k, v in self.__field_defaults__.items(): if self.__dict__[k] == v: allowed_keys.discard(k) for k, v in self.__dict__.items(): if allowed_keys is None or k in allowed_keys: value = self._get_value( v, to_dict=to_dict, by_alias=by_alias, include=value_include and value_include.for_element(k), exclude=value_exclude and value_exclude.for_element(k), exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) if not (exclude_none and value is None): yield k, value def _calculate_keys( self, include: Optional[Union['AbstractSetIntStr', 'DictIntStrAny']], exclude: Optional[Union['AbstractSetIntStr', 'DictIntStrAny']], exclude_unset: bool, update: Optional['DictStrAny'] = None, ) -> Optional['SetStr']: if include is None and exclude is None and exclude_unset is False: return None if exclude_unset: keys = self.__fields_set__.copy() else: keys = set(self.__dict__.keys()) if include is not None: if isinstance(include, dict): keys &= include.keys() else: keys &= include if update: keys -= update.keys() if exclude: if isinstance(exclude, dict): keys -= {k for k, v in exclude.items() if v is ...} else: keys -= exclude return keys def __eq__(self, other: Any) -> bool: if isinstance(other, BaseModel): return self.dict() == other.dict() else: return self.dict() == other def __repr_args__(self) -> 'ReprArgs': return self.__dict__.items() # type: ignore @property def fields(self) -> Dict[str, ModelField]: warnings.warn('`fields` attribute is deprecated, use `__fields__` instead', DeprecationWarning) return self.__fields__ def to_string(self, pretty: bool = False) -> str: warnings.warn('`model.to_string()` method is deprecated, use `str(model)` instead', DeprecationWarning) return str(self) @property def __values__(self) -> 'DictStrAny': warnings.warn('`__values__` attribute is deprecated, use `__dict__` instead', DeprecationWarning) return self.__dict__ def create_model( model_name: str, *, __config__: Type[BaseConfig] = None, __base__: Type[BaseModel] = None, __module__: Optional[str] = None, __validators__: Dict[str, classmethod] = None, **field_definitions: Any, ) -> Type[BaseModel]: """ Dynamically create a model. :param model_name: name of the created model :param __config__: config class to use for the new model :param __base__: base class for the new model to inherit from :param __validators__: a dict of method names and @validator class methods :param **field_definitions: fields of the model (or extra fields if a base is supplied) in the format `=(, )` or `= eg. `foobar=(str, ...)` or `foobar=123` """ if __base__: if __config__ is not None: raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') else: __base__ = BaseModel fields = {} annotations = {} for f_name, f_def in field_definitions.items(): if not is_valid_field(f_name): warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) if isinstance(f_def, tuple): try: f_annotation, f_value = f_def except ValueError as e: raise ConfigError( f'field definitions should either be a tuple of (, ) or just a ' f'default value, unfortunately this means tuples as ' f'default values are not allowed' ) from e else: f_annotation, f_value = None, f_def if f_annotation: annotations[f_name] = f_annotation fields[f_name] = f_value namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} if __validators__: namespace.update(__validators__) namespace.update(fields) if __config__: namespace['Config'] = inherit_config(__config__, BaseConfig) return type(model_name, (__base__,), namespace) _missing = object() def validate_model( # noqa: C901 (ignore complexity) model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None ) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: """ validate data against a model. """ values = {} errors = [] # input_data names, possibly alias names_used = set() # field names, never aliases fields_set = set() config = model.__config__ check_extra = config.extra is not Extra.ignore cls_ = cls or model for validator in model.__pre_root_validators__: try: input_data = validator(cls_, input_data) except (ValueError, TypeError, AssertionError) as exc: return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) for name, field in model.__fields__.items(): if type(field.type_) == ForwardRef: raise ConfigError( f'field "{field.name}" not yet prepared so type is still a ForwardRef, ' f'you might need to call {cls_.__name__}.update_forward_refs().' ) value = input_data.get(field.alias, _missing) using_name = False if value is _missing and config.allow_population_by_field_name and field.alt_alias: value = input_data.get(field.name, _missing) using_name = True if value is _missing: if field.required: errors.append(ErrorWrapper(MissingError(), loc=field.alias)) continue if field.default is None: # deepcopy is quite slow on None value = None else: value = deepcopy(field.default) if not config.validate_all and not field.validate_always: values[name] = value continue else: fields_set.add(name) if check_extra: names_used.add(field.name if using_name else field.alias) v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[name] = v_ if check_extra: if isinstance(input_data, GetterDict): extra = input_data.extra_keys() - names_used else: extra = input_data.keys() - names_used if extra: fields_set |= extra if config.extra is Extra.allow: for f in extra: values[f] = input_data[f] else: for f in sorted(extra): errors.append(ErrorWrapper(ExtraError(), loc=f)) for validator in model.__post_root_validators__: try: values = validator(cls_, values) except (ValueError, TypeError, AssertionError) as exc: errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) break if errors: return values, fields_set, ValidationError(errors, cls_) else: return values, fields_set, None pydantic-1.2/pydantic/mypy.py000066400000000000000000000644431357000400300163550ustar00rootroot00000000000000from configparser import ConfigParser from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType from mypy.errorcodes import ErrorCode from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR2, MDEF, Argument, AssignmentStmt, Block, CallExpr, ClassDef, Context, Decorator, EllipsisExpr, FuncDef, JsonDict, MemberExpr, NameExpr, PassStmt, PlaceholderNode, RefExpr, StrExpr, SymbolTableNode, TempNode, TypeInfo, TypeVarExpr, Var, ) from mypy.options import Options from mypy.plugin import CheckerPluginInterface, ClassDefContext, MethodContext, Plugin, SemanticAnalyzerPluginInterface from mypy.plugins import dataclasses from mypy.semanal import set_callable_name # type: ignore from mypy.server.trigger import make_wildcard_trigger from mypy.types import ( AnyType, CallableType, Instance, NoneType, Type, TypeOfAny, TypeType, TypeVarDef, TypeVarType, UnionType, ) from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name CONFIGFILE_KEY = 'pydantic-mypy' METADATA_KEY = 'pydantic-mypy-metadata' BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' BASESETTINGS_FULLNAME = 'pydantic.env_settings.BaseSettings' FIELD_FULLNAME = 'pydantic.fields.Field' DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' def plugin(version: str) -> 'TypingType[Plugin]': """ `version` is the mypy version string We might want to use this to print a warning if the mypy version being used is newer, or especially older, than we expect (or need). """ return PydanticPlugin class PydanticPlugin(Plugin): def __init__(self, options: Options) -> None: self.plugin_config = PydanticPluginConfig(options) super().__init__(options) def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': sym = self.lookup_fully_qualified(fullname) if sym and isinstance(sym.node, TypeInfo): # pragma: no branch # No branching may occur if the mypy cache has not been cleared if any(base.fullname() == BASEMODEL_FULLNAME for base in sym.node.mro): return self._pydantic_model_class_maker_callback return None def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: if fullname.endswith('.from_orm'): return from_orm_callback return None def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: if fullname == DATACLASS_FULLNAME: return dataclasses.dataclass_class_maker_callback return None def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: transformer = PydanticModelTransformer(ctx, self.plugin_config) transformer.transform() class PydanticPluginConfig: __slots__ = ('init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields') init_forbid_extra: bool init_typed: bool warn_required_dynamic_aliases: bool warn_untyped_fields: bool def __init__(self, options: Options) -> None: if options.config_file is None: # pragma: no cover return plugin_config = ConfigParser() plugin_config.read(options.config_file) for key in self.__slots__: setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) setattr(self, key, setting) def from_orm_callback(ctx: MethodContext) -> Type: """ Raise an error if orm_mode is not enabled """ model_type: Instance if isinstance(ctx.type, CallableType) and isinstance(ctx.type.ret_type, Instance): model_type = ctx.type.ret_type # called on the class elif isinstance(ctx.type, Instance): model_type = ctx.type # called on an instance (unusual, but still valid) else: # pragma: no cover detail = f'ctx.type: {ctx.type} (of type {type(ctx.type).__name__})' error_unexpected_behavior(detail, ctx.api, ctx.context) return ctx.default_return_type pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) if pydantic_metadata is None: return ctx.default_return_type orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') if orm_mode is not True: error_from_orm(model_type.type.name(), ctx.api, ctx.context) return ctx.default_return_type class PydanticModelTransformer: tracked_config_fields: Set[str] = { 'extra', 'allow_mutation', 'orm_mode', 'allow_population_by_field_name', 'alias_generator', } def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: self._ctx = ctx self.plugin_config = plugin_config def transform(self) -> None: """ Configures the BaseModel subclass according to the plugin settings. In particular: * determines the model config and fields, * adds a fields-aware signature for the initializer and construct methods * freezes the class if allow_mutation = False * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses """ ctx = self._ctx info = self._ctx.cls.info config = self.collect_config() fields = self.collect_fields(config) for field in fields: if info[field.name].type is None: if not ctx.api.final_iteration: ctx.api.defer() is_settings = any(base.fullname() == BASESETTINGS_FULLNAME for base in info.mro[:-1]) self.add_initializer(fields, config, is_settings) self.add_construct_method(fields) self.set_frozen(fields, frozen=config.allow_mutation is False) info.metadata[METADATA_KEY] = { 'fields': {field.name: field.serialize() for field in fields}, 'config': config.set_values_dict(), } def collect_config(self) -> 'ModelConfigData': """ Collects the values of the config attributes that are used by the plugin, accounting for parent classes. """ ctx = self._ctx cls = ctx.cls config = ModelConfigData() for stmt in cls.defs.body: if not isinstance(stmt, ClassDef): continue if stmt.name == 'Config': for substmt in stmt.defs.body: if not isinstance(substmt, AssignmentStmt): continue config.update(self.get_config_update(substmt)) if ( config.has_alias_generator and not config.allow_population_by_field_name and self.plugin_config.warn_required_dynamic_aliases ): error_required_dynamic_aliases(ctx.api, stmt) for info in cls.info.mro[1:]: # 0 is the current class if METADATA_KEY not in info.metadata: continue # Each class depends on the set of fields in its ancestors ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname())) for name, value in info.metadata[METADATA_KEY]['config'].items(): config.setdefault(name, value) return config def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: """ Collects the fields for the model, accounting for parent classes """ # First, collect fields belonging to the current class. ctx = self._ctx cls = self._ctx.cls fields = [] # type: List[PydanticModelField] known_fields = set() # type: Set[str] for stmt in cls.defs.body: if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation continue lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: error_untyped_fields(ctx.api, stmt) # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet # continue sym = cls.info.names.get(lhs.name) if sym is None: # pragma: no cover # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) # This is the same logic used in the dataclasses plugin continue node = sym.node if isinstance(node, PlaceholderNode): # pragma: no cover # See the PlaceholderNode docstring for more detail about how this can occur # Basically, it is an edge case when dealing with complex import logic # This is the same logic used in the dataclasses plugin continue assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue is_required = self.get_is_required(cls, stmt, lhs) alias, has_dynamic_alias = self.get_alias_info(stmt) if ( has_dynamic_alias and not model_config.allow_population_by_field_name and self.plugin_config.warn_required_dynamic_aliases ): error_required_dynamic_aliases(ctx.api, stmt) fields.append( PydanticModelField( name=lhs.name, is_required=is_required, alias=alias, has_dynamic_alias=has_dynamic_alias, line=stmt.line, column=stmt.column, ) ) known_fields.add(lhs.name) all_fields = fields.copy() for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object if METADATA_KEY not in info.metadata: continue superclass_fields = [] # Each class depends on the set of fields in its ancestors ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname())) for name, data in info.metadata[METADATA_KEY]['fields'].items(): if name not in known_fields: field = PydanticModelField.deserialize(info, data) known_fields.add(name) superclass_fields.append(field) else: (field,) = [a for a in all_fields if a.name == name] all_fields.remove(field) superclass_fields.append(field) all_fields = superclass_fields + all_fields return all_fields def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: """ Adds a fields-aware `__init__` method to the class. The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. """ ctx = self._ctx typed = self.plugin_config.init_typed use_alias = config.allow_population_by_field_name is not True force_all_optional = is_settings or bool( config.has_alias_generator and not config.allow_population_by_field_name ) init_arguments = self.get_field_arguments( fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias ) if not self.should_init_forbid_extra(fields, config): var = Var('kwargs') init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) add_method(ctx, '__init__', init_arguments, NoneType()) def add_construct_method(self, fields: List['PydanticModelField']) -> None: """ Adds a fully typed `construct` classmethod to the class. Similar to the fields-aware __init__ method, but always uses the field names (not aliases), and does not treat settings fields as optional. """ ctx = self._ctx set_str = ctx.api.named_type('__builtins__.set', [ctx.api.named_type('__builtins__.str')]) optional_set_str = UnionType([set_str, NoneType()]) fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) construct_arguments = [fields_set_argument] + construct_arguments obj_type = ctx.api.named_type('__builtins__.object') self_tvar_name = 'Model' tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) self_type = TypeVarType(tvd) add_method( ctx, 'construct', construct_arguments, return_type=self_type, self_type=self_type, tvar_def=tvd, is_classmethod=True, ) def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: """ Marks all fields as properties so that attempts to set them trigger mypy errors. This is the same approach used by the attrs and dataclasses plugins. """ info = self._ctx.cls.info for field in fields: sym_node = info.names.get(field.name) if sym_node is not None: var = sym_node.node assert isinstance(var, Var) var.is_property = frozen else: var = field.to_var(info, use_alias=False) var.info = info var.is_property = frozen var._fullname = info.fullname() + '.' + var.name() info.names[var.name()] = SymbolTableNode(MDEF, var) def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: """ Determines the config update due to a single statement in the Config class definition. Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) """ lhs = substmt.lvalues[0] if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): return None if lhs.name == 'extra': if isinstance(substmt.rvalue, StrExpr): forbid_extra = substmt.rvalue.value == 'forbid' elif isinstance(substmt.rvalue, MemberExpr): forbid_extra = substmt.rvalue.name == 'forbid' else: error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None return ModelConfigData(forbid_extra=forbid_extra) if lhs.name == 'alias_generator': has_alias_generator = True if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': has_alias_generator = False return ModelConfigData(has_alias_generator=has_alias_generator) if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) error_invalid_config_value(lhs.name, self._ctx.api, substmt) return None @staticmethod def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: """ Returns a boolean indicating whether the field defined in `stmt` is a required field. """ expr = stmt.rvalue if isinstance(expr, TempNode): # TempNode means annotation-only, so only non-required if Optional value_type = cls.info[lhs.name].type if isinstance(value_type, UnionType) and any(isinstance(item, NoneType) for item in value_type.items): # Annotated as Optional, or otherwise having NoneType in the union return False return True if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: # The "default value" is a call to `Field`; at this point, the field is # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) return len(expr.args) > 0 and type(expr.args[0]) is EllipsisExpr # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) return isinstance(expr, EllipsisExpr) @staticmethod def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: """ Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. If `has_dynamic_alias` is True, `alias` will be None. """ expr = stmt.rvalue if isinstance(expr, TempNode): # TempNode means annotation-only return None, False if not ( isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME ): # Assigned value is not a call to pydantic.fields.Field return None, False for i, arg_name in enumerate(expr.arg_names): if arg_name != 'alias': continue arg = expr.args[i] if isinstance(arg, StrExpr): return arg.value, False else: return None, True return None, False def get_field_arguments( self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool ) -> List[Argument]: """ Helper function used during the construction of the `__init__` and `construct` method signatures. Returns a list of mypy Argument instances for use in the generated signatures. """ info = self._ctx.cls.info arguments = [ field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) for field in fields if not (use_alias and field.has_dynamic_alias) ] return arguments def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: """ Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, *unless* a required dynamic alias is present (since then we can't determine a valid signature). """ if not config.allow_population_by_field_name: if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): return False if config.forbid_extra: return True return self.plugin_config.init_forbid_extra @staticmethod def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: """ Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be determined during static analysis. """ for field in fields: if field.has_dynamic_alias: return True if has_alias_generator: for field in fields: if field.alias is None: return True return False class PydanticModelField: def __init__( self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int ): self.name = name self.is_required = is_required self.alias = alias self.has_dynamic_alias = has_dynamic_alias self.line = line self.column = column def to_var(self, info: TypeInfo, use_alias: bool) -> Var: name = self.name if use_alias and self.alias is not None: name = self.alias return Var(name, info[self.name].type) def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: if typed and info[self.name].type is not None: type_annotation = info[self.name].type else: type_annotation = AnyType(TypeOfAny.explicit) return Argument( variable=self.to_var(info, use_alias), type_annotation=type_annotation, initializer=None, kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, ) def serialize(self) -> JsonDict: return self.__dict__ @classmethod def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': return cls(**data) class ModelConfigData: def __init__( self, forbid_extra: Optional[bool] = None, allow_mutation: Optional[bool] = None, orm_mode: Optional[bool] = None, allow_population_by_field_name: Optional[bool] = None, has_alias_generator: Optional[bool] = None, ): self.forbid_extra = forbid_extra self.allow_mutation = allow_mutation self.orm_mode = orm_mode self.allow_population_by_field_name = allow_population_by_field_name self.has_alias_generator = has_alias_generator def set_values_dict(self) -> Dict[str, Any]: return {k: v for k, v in self.__dict__.items() if v is not None} def update(self, config: Optional['ModelConfigData']) -> None: if config is None: return for k, v in config.set_values_dict().items(): setattr(self, k, v) def setdefault(self, key: str, value: Any) -> None: if getattr(self, key) is None: setattr(self, key, value) ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) def error_unexpected_behavior(detail: str, api: CheckerPluginInterface, context: Context) -> None: # pragma: no cover # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path link = 'https://github.com/samuelcolvin/pydantic/issues/new/choose' full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' api.fail(full_message, context, code=ERROR_UNEXPECTED) def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) def add_method( ctx: ClassDefContext, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, is_classmethod: bool = False, is_new: bool = False, # is_staticmethod: bool = False, ) -> None: """ Adds a new method to a class. This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged """ info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): ctx.cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) if is_classmethod or is_new: first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] # elif is_staticmethod: # first = [] else: self_type = self_type or fill_typevars(info) first = [Argument(Var('self'), self_type, None, ARG_POS)] args = first + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(arg.variable.name()) arg_kinds.append(arg.kind) function_type = ctx.api.named_type('__builtins__.function') signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func.is_class = is_classmethod # func.is_static = is_staticmethod func._fullname = info.fullname() + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] if is_classmethod: # or is_staticmethod: func.is_decorated = True v = Var(name, func.type) v.info = info v._fullname = func._fullname # if is_classmethod: v.is_classmethod = True dec = Decorator(func, [NameExpr('classmethod')], v) # else: # v.is_staticmethod = True # dec = Decorator(func, [NameExpr('staticmethod')], v) dec.line = info.line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True info.names[name] = sym info.defn.defs.body.append(func) pydantic-1.2/pydantic/networks.py000066400000000000000000000267031357000400300172300ustar00rootroot00000000000000import re from ipaddress import ( IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, _BaseAddress, _BaseNetwork, ) from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Set, Tuple, Type, Union, cast, no_type_check from . import errors from .utils import Representation from .validators import constr_length_validator, str_validator if TYPE_CHECKING: from .fields import ModelField from .main import BaseConfig # noqa: F401 from .typing import AnyCallable CallableGenerator = Generator[AnyCallable, None, None] try: import email_validator except ImportError: email_validator = None NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] __all__ = [ 'AnyUrl', 'AnyHttpUrl', 'HttpUrl', 'stricturl', 'EmailStr', 'NameEmail', 'IPvAnyAddress', 'IPvAnyInterface', 'IPvAnyNetwork', 'PostgresDsn', 'RedisDsn', 'validate_email', ] host_part_names = ('domain', 'ipv4', 'ipv6') url_regex = re.compile( r'(?:(?P[a-z0-9]+?)://)?' # scheme r'(?:(?P[^\s:]+)(?::(?P\S*))?@)?' # user info r'(?:' r'(?P(?:\d{1,3}\.){3}\d{1,3})|' # ipv4 r'(?P\[[A-F0-9]*:[A-F0-9:]+\])|' # ipv6 r'(?P[^\s/:?#]+)' # domain, validation occurs later r')?' r'(?::(?P\d+))?' # port r'(?P/[^\s?]*)?' # path r'(?:\?(?P[^\s#]+))?' # query r'(?:#(?P\S+))?', # fragment re.IGNORECASE, ) _ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' _domain_ending = r'(?P\.[a-z]{2,63})?\.?' ascii_domain_regex = re.compile(fr'(?:{_ascii_chunk}\.)*?{_ascii_chunk}{_domain_ending}', re.IGNORECASE) _int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' int_domain_regex = re.compile(fr'(?:{_int_chunk}\.)*?{_int_chunk}{_domain_ending}', re.IGNORECASE) class AnyUrl(str): strip_whitespace = True min_length = 1 max_length = 2 ** 16 allowed_schemes: Optional[Set[str]] = None tld_required: bool = False user_required: bool = False __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') @no_type_check def __new__(cls, url: Optional[str], **kwargs) -> object: return str.__new__(cls, cls.build(**kwargs) if url is None else url) def __init__( self, url: str, *, scheme: str, user: Optional[str] = None, password: Optional[str] = None, host: str, tld: Optional[str] = None, host_type: str = 'domain', port: Optional[str] = None, path: Optional[str] = None, query: Optional[str] = None, fragment: Optional[str] = None, ) -> None: str.__init__(url) self.scheme = scheme self.user = user self.password = password self.host = host self.tld = tld self.host_type = host_type self.port = port self.path = path self.query = query self.fragment = fragment @classmethod def build( cls, *, scheme: str, user: Optional[str] = None, password: Optional[str] = None, host: str, port: Optional[str] = None, path: Optional[str] = None, query: Optional[str] = None, fragment: Optional[str] = None, **kwargs: str, ) -> str: url = scheme + '://' if user: url += user if password: url += ':' + password url += '@' url += host if port: url += ':' + port if path: url += path if query: url += '?' + query if fragment: url += '#' + fragment return url @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': if type(value) == cls: return value value = str_validator(value) if cls.strip_whitespace: value = value.strip() url: str = cast(str, constr_length_validator(value, field, config)) m = url_regex.match(url) # the regex should always match, if it doesn't please report with details of the URL tried assert m, 'URL regex failed unexpectedly' parts = m.groupdict() scheme = parts['scheme'] if scheme is None: raise errors.UrlSchemeError() if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: raise errors.UrlSchemePermittedError(cls.allowed_schemes) user = parts['user'] if cls.user_required and user is None: raise errors.UrlUserInfoError() host, tld, host_type, rebuild = cls.validate_host(parts) if m.end() != len(url): raise errors.UrlExtraError(extra=url[m.end() :]) return cls( None if rebuild else url, scheme=scheme, user=user, password=parts['password'], host=host, tld=tld, host_type=host_type, port=parts['port'], path=parts['path'], query=parts['query'], fragment=parts['fragment'], ) @classmethod def validate_host(cls, parts: Dict[str, str]) -> Tuple[str, Optional[str], str, bool]: host, tld, host_type, rebuild = None, None, None, False for f in ('domain', 'ipv4', 'ipv6'): host = parts[f] if host: host_type = f break if host is None: raise errors.UrlHostError() elif host_type == 'domain': d = ascii_domain_regex.fullmatch(host) if d is None: d = int_domain_regex.fullmatch(host) if not d: raise errors.UrlHostError() host_type = 'int_domain' rebuild = True host = host.encode('idna').decode('ascii') tld = d.group('tld') if tld is not None: tld = tld[1:] elif cls.tld_required: raise errors.UrlHostTldError() return host, tld, host_type, rebuild # type: ignore def __repr__(self) -> str: extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) return f'{self.__class__.__name__}({super().__repr__()}, {extra})' class AnyHttpUrl(AnyUrl): allowed_schemes = {'http', 'https'} class HttpUrl(AnyUrl): allowed_schemes = {'http', 'https'} tld_required = True # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers max_length = 2083 class PostgresDsn(AnyUrl): allowed_schemes = {'postgres', 'postgresql'} user_required = True class RedisDsn(AnyUrl): allowed_schemes = {'redis'} user_required = True def stricturl( *, strip_whitespace: bool = True, min_length: int = 1, max_length: int = 2 ** 16, tld_required: bool = True, allowed_schemes: Optional[Set[str]] = None, ) -> Type[AnyUrl]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( strip_whitespace=strip_whitespace, min_length=min_length, max_length=max_length, tld_required=tld_required, allowed_schemes=allowed_schemes, ) return type('UrlValue', (AnyUrl,), namespace) class EmailStr(str): @classmethod def __get_validators__(cls) -> 'CallableGenerator': # included here and below so the error happens straight away if email_validator is None: raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') yield str_validator yield cls.validate @classmethod def validate(cls, value: str) -> str: return validate_email(value)[1] class NameEmail(Representation): __slots__ = 'name', 'email' def __init__(self, name: str, email: str): self.name = name self.email = email @classmethod def __get_validators__(cls) -> 'CallableGenerator': if email_validator is None: raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') yield str_validator yield cls.validate @classmethod def validate(cls, value: str) -> 'NameEmail': return cls(*validate_email(value)) def __str__(self) -> str: return f'{self.name} <{self.email}>' class IPvAnyAddress(_BaseAddress): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: try: return IPv4Address(value) except ValueError: pass try: return IPv6Address(value) except ValueError: raise errors.IPvAnyAddressError() class IPvAnyInterface(_BaseAddress): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: try: return IPv4Interface(value) except ValueError: pass try: return IPv6Interface(value) except ValueError: raise errors.IPvAnyInterfaceError() class IPvAnyNetwork(_BaseNetwork): # type: ignore @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: # Assume IP Network is defined with a default value for ``strict`` argument. # Define your own class if you want to specify network address check strictness. try: return IPv4Network(value) except ValueError: pass try: return IPv6Network(value) except ValueError: raise errors.IPvAnyNetworkError() pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') def validate_email(value: str) -> Tuple[str, str]: """ Brutally simple email address validation. Note unlike most email address validation * raw ip address (literal) domain parts are not allowed. * "John Doe " style "pretty" email addresses are processed * the local part check is extremely basic. This raises the possibility of unicode spoofing, but no better solution is really possible. * spaces are striped from the beginning and end of addresses but no error is raised See RFC 5322 but treat it with suspicion, there seems to exist no universally acknowledged test for a valid email! """ if email_validator is None: raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') m = pretty_email_regex.fullmatch(value) name: Optional[str] = None if m: name, value = m.groups() email = value.strip() try: email_validator.validate_email(email, check_deliverability=False) except email_validator.EmailNotValidError as e: raise errors.EmailError() from e at_index = email.index('@') local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. global_part = email[at_index:].lower() return name or local_part, local_part + global_part pydantic-1.2/pydantic/parse.py000066400000000000000000000032721357000400300164620ustar00rootroot00000000000000import json import pickle from enum import Enum from pathlib import Path from typing import Any, Callable, Union from .types import StrBytes class Protocol(str, Enum): json = 'json' pickle = 'pickle' def load_str_bytes( b: StrBytes, *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, json_loads: Callable[[str], Any] = json.loads, ) -> Any: if proto is None and content_type: if content_type.endswith(('json', 'javascript')): pass elif allow_pickle and content_type.endswith('pickle'): proto = Protocol.pickle else: raise TypeError(f'Unknown content-type: {content_type}') proto = proto or Protocol.json if proto == Protocol.json: if isinstance(b, bytes): b = b.decode(encoding) return json_loads(b) elif proto == Protocol.pickle: if not allow_pickle: raise RuntimeError('Trying to decode with pickle with allow_pickle=False') bb = b if isinstance(b, bytes) else b.encode() return pickle.loads(bb) else: raise TypeError(f'Unknown protocol: {proto}') def load_file( path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, ) -> Any: path = Path(path) b = path.read_bytes() if content_type is None: if path.suffix in ('.js', '.json'): proto = Protocol.json elif path.suffix == '.pkl': proto = Protocol.pickle return load_str_bytes(b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle) pydantic-1.2/pydantic/py.typed000066400000000000000000000000001357000400300164570ustar00rootroot00000000000000pydantic-1.2/pydantic/schema.py000066400000000000000000001046121357000400300166100ustar00rootroot00000000000000import inspect import re import warnings from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import ( TYPE_CHECKING, Any, Callable, Dict, FrozenSet, List, Optional, Sequence, Set, Tuple, Type, TypeVar, Union, cast, ) from uuid import UUID from .class_validators import ROOT_KEY from .color import Color from .fields import ( SHAPE_FROZENSET, SHAPE_LIST, SHAPE_MAPPING, SHAPE_SEQUENCE, SHAPE_SET, SHAPE_SINGLETON, SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS, FieldInfo, ModelField, ) from .json import pydantic_encoder from .networks import AnyUrl, EmailStr, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, NameEmail from .types import ( UUID1, UUID3, UUID4, UUID5, ConstrainedDecimal, ConstrainedFloat, ConstrainedInt, ConstrainedList, ConstrainedStr, DirectoryPath, FilePath, Json, SecretBytes, SecretStr, StrictBool, conbytes, condecimal, confloat, conint, conlist, constr, ) from .typing import ( ForwardRef, Literal, is_callable_type, is_literal_type, is_new_type, literal_values, new_type_supertype, ) from .utils import lenient_issubclass if TYPE_CHECKING: from .main import BaseModel # noqa: F401 default_prefix = '#/definitions/' def schema( models: Sequence[Type['BaseModel']], *, by_alias: bool = True, title: Optional[str] = None, description: Optional[str] = None, ref_prefix: Optional[str] = None, ) -> Dict[str, Any]: """ Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` top-level JSON key, including their sub-models. :param models: a list of models to include in the generated JSON Schema :param by_alias: generate the schemas using the aliases defined, if any :param title: title for the generated schema that includes the definitions :param description: description for the generated schema :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the top-level key ``definitions``, so you can extract them from there. But all the references will have the set prefix. :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for the models and sub-models passed in ``models``. """ ref_prefix = ref_prefix or default_prefix flat_models = get_flat_models_from_models(models) model_name_map = get_model_name_map(flat_models) definitions = {} output_schema: Dict[str, Any] = {} if title: output_schema['title'] = title if description: output_schema['description'] = description for model in models: m_schema, m_definitions, m_nested_models = model_process_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix ) definitions.update(m_definitions) model_name = model_name_map[model] definitions[model_name] = m_schema if definitions: output_schema['definitions'] = definitions return output_schema def model_schema(model: Type['BaseModel'], by_alias: bool = True, ref_prefix: Optional[str] = None) -> Dict[str, Any]: """ Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level JSON key. :param model: a Pydantic model (a class that inherits from BaseModel) :param by_alias: generate the schemas using the aliases defined, if any :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the top-level key ``definitions``, so you can extract them from there. But all the references will have the set prefix. :return: dict with the JSON Schema for the passed ``model`` """ ref_prefix = ref_prefix or default_prefix flat_models = get_flat_models_from_model(model) model_name_map = get_model_name_map(flat_models) model_name = model_name_map[model] m_schema, m_definitions, nested_models = model_process_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix ) if model_name in nested_models: # model_name is in Nested models, it has circular references m_definitions[model_name] = m_schema m_schema = {'$ref': ref_prefix + model_name} if m_definitions: m_schema.update({'definitions': m_definitions}) return m_schema def field_schema( field: ModelField, *, by_alias: bool = True, model_name_map: Dict[Type['BaseModel'], str], ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']] = None, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they will be included in the definitions and referenced in the schema instead of included recursively. :param field: a Pydantic ``ModelField`` :param by_alias: use the defined alias (if any) in the returned schema :param model_name_map: used to generate the JSON Schema references to other models included in the definitions :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of #/definitions/ will be used :param known_models: used to solve circular references :return: tuple of the schema for this field and additional definitions """ ref_prefix = ref_prefix or default_prefix schema_overrides = False s = dict(title=field.field_info.title or field.alias.title().replace('_', ' ')) if field.field_info.title: schema_overrides = True if field.field_info.description: s['description'] = field.field_info.description schema_overrides = True if not field.required and not field.field_info.const and field.default is not None: s['default'] = encode_default(field.default) schema_overrides = True validation_schema = get_field_schema_validations(field) if validation_schema: s.update(validation_schema) schema_overrides = True f_schema, f_definitions, f_nested_models = field_type_schema( field, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, known_models=known_models or set(), ) # $ref will only be returned when there are no schema_overrides if '$ref' in f_schema: return f_schema, f_definitions, f_nested_models else: s.update(f_schema) return s, f_definitions, f_nested_models numeric_types = (int, float, Decimal) _str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( ('max_length', numeric_types, 'maxLength'), ('min_length', numeric_types, 'minLength'), ('regex', str, 'pattern'), ) _numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( ('gt', numeric_types, 'exclusiveMinimum'), ('lt', numeric_types, 'exclusiveMaximum'), ('ge', numeric_types, 'minimum'), ('le', numeric_types, 'maximum'), ('multiple_of', numeric_types, 'multipleOf'), ) def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: """ Get the JSON Schema validation keywords for a ``field`` with an annotation of a Pydantic ``FieldInfo`` with validation arguments. """ f_schema: Dict[str, Any] = {} if lenient_issubclass(field.type_, (str, bytes)): for attr_name, t, keyword in _str_types_attrs: attr = getattr(field.field_info, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): for attr_name, t, keyword in _numeric_types_attrs: attr = getattr(field.field_info, attr_name, None) if isinstance(attr, t): f_schema[keyword] = attr if field.field_info is not None and field.field_info.const: f_schema['const'] = field.default if field.field_info.extra: f_schema.update(field.field_info.extra) return f_schema def get_model_name_map(unique_models: Set[Type['BaseModel']]) -> Dict[Type['BaseModel'], str]: """ Process a set of models and generate unique names for them to be used as keys in the JSON Schema definitions. By default the names are the same as the class name. But if two models in different Python modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be based on the Python module path for those conflicting models to prevent name collisions. :param unique_models: a Python set of models :return: dict mapping models to names """ name_model_map = {} conflicting_names: Set[str] = set() for model in unique_models: model_name = model.__name__ model_name = re.sub(r'[^a-zA-Z0-9.\-_]', '_', model_name) if model_name in conflicting_names: model_name = get_long_model_name(model) name_model_map[model_name] = model elif model_name in name_model_map: conflicting_names.add(model_name) conflicting_model = name_model_map.pop(model_name) name_model_map[get_long_model_name(conflicting_model)] = conflicting_model name_model_map[get_long_model_name(model)] = model else: name_model_map[model_name] = model return {v: k for k, v in name_model_map.items()} def get_flat_models_from_model( model: Type['BaseModel'], known_models: Set[Type['BaseModel']] = None ) -> Set[Type['BaseModel']]: """ Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param model: a Pydantic ``BaseModel`` subclass :param known_models: used to solve circular references :return: a set with the initial model and all its sub-models """ known_models = known_models or set() flat_models: Set[Type['BaseModel']] = set() flat_models.add(model) known_models |= flat_models fields = cast(Sequence[ModelField], model.__fields__.values()) flat_models |= get_flat_models_from_fields(fields, known_models=known_models) return flat_models def get_flat_models_from_field(field: ModelField, known_models: Set[Type['BaseModel']]) -> Set[Type['BaseModel']]: """ Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a sublcass of BaseModel (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param field: a Pydantic ``ModelField`` :param known_models: used to solve circular references :return: a set with the model used in the declaration for this field, if any, and all its sub-models """ from .main import BaseModel # noqa: F811 flat_models: Set[Type[BaseModel]] = set() # Handle dataclass-based models field_type = field.type_ if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): field_type = field_type.__pydantic_model__ if field.sub_fields: flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: flat_models |= get_flat_models_from_model(field_type, known_models=known_models) return flat_models def get_flat_models_from_fields( fields: Sequence[ModelField], known_models: Set[Type['BaseModel']] ) -> Set[Type['BaseModel']]: """ Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as sublcasses of ``BaseModel`` (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. :param fields: a list of Pydantic ``ModelField``s :param known_models: used to solve circular references :return: a set with any model declared in the fields, and all their sub-models """ flat_models: Set[Type['BaseModel']] = set() for field in fields: flat_models |= get_flat_models_from_field(field, known_models=known_models) return flat_models def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> Set[Type['BaseModel']]: """ Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. """ flat_models: Set[Type['BaseModel']] = set() for model in models: flat_models |= get_flat_models_from_model(model) return flat_models def get_long_model_name(model: Type['BaseModel']) -> str: return f'{model.__module__}__{model.__name__}'.replace('.', '__') def field_type_schema( field: ModelField, *, by_alias: bool, model_name_map: Dict[Type['BaseModel'], str], schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']], ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Used by ``field_schema()``, you probably should be using that function. Take a single ``field`` and generate the schema for its type only, not including additional information as title, etc. Also return additional schema definitions, from sub-models. """ definitions = {} nested_models: Set[str] = set() ref_prefix = ref_prefix or default_prefix if field.shape in {SHAPE_LIST, SHAPE_TUPLE_ELLIPSIS, SHAPE_SEQUENCE, SHAPE_SET, SHAPE_FROZENSET}: f_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models ) definitions.update(f_definitions) nested_models.update(f_nested_models) s: Dict[str, Any] = {'type': 'array', 'items': f_schema} if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: s['uniqueItems'] = True if field.field_info.min_items is not None: s['minItems'] = field.field_info.min_items if field.field_info.max_items is not None: s['maxItems'] = field.field_info.max_items return s, definitions, nested_models elif field.shape == SHAPE_MAPPING: dict_schema: Dict[str, Any] = {'type': 'object'} key_field = cast(ModelField, field.key_field) regex = getattr(key_field.type_, 'regex', None) f_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models ) definitions.update(f_definitions) nested_models.update(f_nested_models) if regex: # Dict keys have a regex pattern # f_schema might be a schema or empty dict, add it either way dict_schema['patternProperties'] = {regex.pattern: f_schema} elif f_schema: # The dict values are not simply Any, so they need a schema dict_schema['additionalProperties'] = f_schema return dict_schema, definitions, nested_models elif field.shape == SHAPE_TUPLE: sub_schema = [] sub_fields = cast(List[ModelField], field.sub_fields) for sf in sub_fields: sf_schema, sf_definitions, sf_nested_models = field_type_schema( sf, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models ) definitions.update(sf_definitions) nested_models.update(sf_nested_models) sub_schema.append(sf_schema) if len(sub_schema) == 1: sub_schema = sub_schema[0] # type: ignore return {'type': 'array', 'items': sub_schema}, definitions, nested_models else: assert field.shape == SHAPE_SINGLETON, field.shape f_schema, f_definitions, f_nested_models = field_singleton_schema( field, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, known_models=known_models, ) definitions.update(f_definitions) nested_models.update(f_nested_models) return f_schema, definitions, nested_models def model_process_schema( model: Type['BaseModel'], *, by_alias: bool = True, model_name_map: Dict[Type['BaseModel'], str], ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']] = None, ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ Used by ``model_schema()``, you probably should be using that function. Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All the definitions are returned as the second value. """ ref_prefix = ref_prefix or default_prefix known_models = known_models or set() s = {'title': model.__config__.title or model.__name__} doc = inspect.getdoc(model) if doc: s['description'] = doc known_models.add(model) m_schema, m_definitions, nested_models = model_type_schema( model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models ) s.update(m_schema) s.update(model.__config__.schema_extra) return s, m_definitions, nested_models def model_type_schema( model: Type['BaseModel'], *, by_alias: bool, model_name_map: Dict[Type['BaseModel'], str], ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']], ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ You probably should be using ``model_schema()``, this function is indirectly used by that function. Take a single ``model`` and generate the schema for its type only, not including additional information as title, etc. Also return additional schema definitions, from sub-models. """ ref_prefix = ref_prefix or default_prefix properties = {} required = [] definitions: Dict[str, Any] = {} nested_models: Set[str] = set() for k, f in model.__fields__.items(): try: f_schema, f_definitions, f_nested_models = field_schema( f, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models ) except SkipField as skip: warnings.warn(skip.message, UserWarning) continue definitions.update(f_definitions) nested_models.update(f_nested_models) if by_alias: properties[f.alias] = f_schema if f.required: required.append(f.alias) else: properties[k] = f_schema if f.required: required.append(k) if ROOT_KEY in properties: out_schema = properties[ROOT_KEY] out_schema['title'] = model.__config__.title or model.__name__ else: out_schema = {'type': 'object', 'properties': properties} if required: out_schema['required'] = required if model.__config__.extra == 'forbid': out_schema['additionalProperties'] = False return out_schema, definitions, nested_models def field_singleton_sub_fields_schema( sub_fields: Sequence[ModelField], *, by_alias: bool, model_name_map: Dict[Type['BaseModel'], str], schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']], ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ This function is indirectly used by ``field_schema()``, you probably should be using that function. Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. """ ref_prefix = ref_prefix or default_prefix definitions = {} nested_models: Set[str] = set() sub_fields = [sf for sf in sub_fields if sf.include_in_schema()] if len(sub_fields) == 1: return field_type_schema( sub_fields[0], by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, known_models=known_models, ) else: sub_field_schemas = [] for sf in sub_fields: sub_schema, sub_definitions, sub_nested_models = field_type_schema( sf, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, known_models=known_models, ) definitions.update(sub_definitions) sub_field_schemas.append(sub_schema) nested_models.update(sub_nested_models) return {'anyOf': sub_field_schemas}, definitions, nested_models validation_attribute_to_schema_keyword = { 'min_length': 'minLength', 'max_length': 'maxLength', 'regex': 'pattern', 'gt': 'exclusiveMinimum', 'lt': 'exclusiveMaximum', 'ge': 'minimum', 'le': 'maximum', 'multiple_of': 'multipleOf', } # Order is important, subclasses of str must go before str, etc field_class_to_schema_enum_enabled: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( (EmailStr, {'type': 'string', 'format': 'email'}), (AnyUrl, {'type': 'string', 'format': 'uri'}), (SecretStr, {'type': 'string', 'writeOnly': True}), (str, {'type': 'string'}), (SecretBytes, {'type': 'string', 'writeOnly': True}), (bytes, {'type': 'string', 'format': 'binary'}), (StrictBool, {'type': 'boolean'}), (bool, {'type': 'boolean'}), (int, {'type': 'integer'}), (float, {'type': 'number'}), (Decimal, {'type': 'number'}), (UUID1, {'type': 'string', 'format': 'uuid1'}), (UUID3, {'type': 'string', 'format': 'uuid3'}), (UUID4, {'type': 'string', 'format': 'uuid4'}), (UUID5, {'type': 'string', 'format': 'uuid5'}), (UUID, {'type': 'string', 'format': 'uuid'}), (NameEmail, {'type': 'string', 'format': 'name-email'}), (dict, {'type': 'object'}), (list, {'type': 'array', 'items': {}}), (tuple, {'type': 'array', 'items': {}}), (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), (Color, {'type': 'string', 'format': 'color'}), ) json_scheme = {'type': 'string', 'format': 'json-string'} # Order is important, subclasses of Path must go before Path, etc field_class_to_schema_enum_disabled = ( (FilePath, {'type': 'string', 'format': 'file-path'}), (DirectoryPath, {'type': 'string', 'format': 'directory-path'}), (Path, {'type': 'string', 'format': 'path'}), (datetime, {'type': 'string', 'format': 'date-time'}), (date, {'type': 'string', 'format': 'date'}), (time, {'type': 'string', 'format': 'time'}), (timedelta, {'type': 'number', 'format': 'time-delta'}), (Json, json_scheme), (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), (IPvAnyNetwork, {'type': 'string', 'format': 'ipvanynetwork'}), (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), (IPvAnyInterface, {'type': 'string', 'format': 'ipvanyinterface'}), (IPv4Address, {'type': 'string', 'format': 'ipv4'}), (IPv6Address, {'type': 'string', 'format': 'ipv6'}), (IPvAnyAddress, {'type': 'string', 'format': 'ipvanyaddress'}), ) def field_singleton_schema( # noqa: C901 (ignore complexity) field: ModelField, *, by_alias: bool, model_name_map: Dict[Type['BaseModel'], str], schema_overrides: bool = False, ref_prefix: Optional[str] = None, known_models: Set[Type['BaseModel']], ) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: """ This function is indirectly used by ``field_schema()``, you should probably be using that function. Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. """ from .main import BaseModel # noqa: F811 ref_prefix = ref_prefix or default_prefix definitions: Dict[str, Any] = {} nested_models: Set[str] = set() if field.sub_fields: return field_singleton_sub_fields_schema( field.sub_fields, by_alias=by_alias, model_name_map=model_name_map, schema_overrides=schema_overrides, ref_prefix=ref_prefix, known_models=known_models, ) if field.type_ is Any or type(field.type_) == TypeVar: if field.parse_json: return json_scheme, definitions, nested_models else: return {}, definitions, nested_models # no restrictions if is_callable_type(field.type_): raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') f_schema: Dict[str, Any] = {} if field.field_info is not None and field.field_info.const: f_schema['const'] = field.default field_type = field.type_ if is_new_type(field_type): field_type = new_type_supertype(field_type) if is_literal_type(field_type): values = literal_values(field_type) if len(values) > 1: return field_schema( multivalue_literal_field_for_schema(values, field), by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models, ) literal_value = values[0] field_type = type(literal_value) f_schema['const'] = literal_value if issubclass(field_type, Enum): f_schema.update({'enum': [item.value for item in field_type]}) # Don't return immediately, to allow adding specific types for field_name, schema_name in validation_attribute_to_schema_keyword.items(): field_value = getattr(field_type, field_name, None) if field_value is not None: if field_name == 'regex': field_value = field_value.pattern f_schema[schema_name] = field_value for type_, t_schema in field_class_to_schema_enum_enabled: if issubclass(field_type, type_): f_schema.update(t_schema) break # Return schema, with or without enum definitions if f_schema: return f_schema, definitions, nested_models for type_, t_schema in field_class_to_schema_enum_disabled: if issubclass(field_type, type_): return t_schema, definitions, nested_models # Handle dataclass-based models if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): field_type = field_type.__pydantic_model__ if issubclass(field_type, BaseModel): model_name = model_name_map[field_type] if field_type not in known_models: sub_schema, sub_definitions, sub_nested_models = model_process_schema( field_type, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, known_models=known_models, ) definitions.update(sub_definitions) definitions[model_name] = sub_schema nested_models.update(sub_nested_models) else: nested_models.add(model_name) schema_ref = {'$ref': ref_prefix + model_name} if not schema_overrides: return schema_ref, definitions, nested_models else: return {'allOf': [schema_ref]}, definitions, nested_models raise ValueError(f'Value not declarable with JSON Schema, field: {field}') def multivalue_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: return ModelField( name=field.name, type_=Union[tuple(Literal[value] for value in values)], class_validators=field.class_validators, model_config=field.model_config, default=field.default, required=field.required, alias=field.alias, field_info=field.field_info, ) def encode_default(dft: Any) -> Any: if isinstance(dft, (int, float, str)): return dft elif isinstance(dft, (tuple, list, set)): t = type(dft) return t(encode_default(v) for v in dft) elif isinstance(dft, dict): return {encode_default(k): encode_default(v) for k, v in dft.items()} else: return pydantic_encoder(dft) _map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} _field_constraints = { 'min_length', 'max_length', 'regex', 'gt', 'lt', 'ge', 'le', 'multiple_of', 'min_items', 'max_items', } def get_annotation_from_field_info(annotation: Any, field_info: FieldInfo, field_name: str) -> Type[Any]: # noqa: C901 """ Get an annotation with validation implemented for numbers and strings based on the field_info. :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema :param field_name: name of the field for use in error messages :return: the same ``annotation`` if unmodified or a new annotation with validation in place """ constraints = {f for f in _field_constraints if getattr(field_info, f) is not None} if not constraints: return annotation used_constraints: Set[str] = set() def go(type_: Any) -> Type[Any]: if is_literal_type(annotation) or isinstance(type_, ForwardRef) or lenient_issubclass(type_, ConstrainedList): return type_ origin = getattr(type_, '__origin__', None) if origin is not None: args: Tuple[Any, ...] = type_.__args__ if any(isinstance(a, ForwardRef) for a in args): # forward refs cause infinite recursion below return type_ if origin is Union: return Union[tuple(go(a) for a in args)] if issubclass(origin, List) and (field_info.min_items is not None or field_info.max_items is not None): used_constraints.update({'min_items', 'max_items'}) return conlist(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) for t in (Tuple, List, Set, FrozenSet, Sequence): if issubclass(origin, t): # type: ignore return t[tuple(go(a) for a in args)] # type: ignore if issubclass(origin, Dict): return Dict[args[0], go(args[1])] # type: ignore attrs: Optional[Tuple[str, ...]] = None constraint_func: Optional[Callable[..., type]] = None if isinstance(type_, type): if issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl, ConstrainedStr)): attrs = ('max_length', 'min_length', 'regex') constraint_func = constr elif issubclass(type_, bytes): attrs = ('max_length', 'min_length', 'regex') constraint_func = conbytes elif issubclass(type_, numeric_types) and not issubclass( type_, (ConstrainedInt, ConstrainedFloat, ConstrainedDecimal, ConstrainedList, bool) ): # Is numeric type attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch constraint_func = _map_types_constraint[numeric_type] if attrs: used_constraints.update(set(attrs)) kwargs = { attr_name: attr for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) if attr is not None } if kwargs: constraint_func = cast(Callable[..., type], constraint_func) return constraint_func(**kwargs) return type_ ans = go(annotation) unused_constraints = constraints - used_constraints if unused_constraints: raise ValueError( f'On field "{field_name}" the following field constraints are set but not enforced: ' f'{", ".join(unused_constraints)}. ' f'\nFor more details see https://pydantic-docs.helpmanual.io/usage/schema/#unenforced-field-constraints' ) return ans class SkipField(Exception): """ Utility exception used to exclude fields from schema. """ def __init__(self, message: str) -> None: self.message = message pydantic-1.2/pydantic/tools.py000066400000000000000000000026101357000400300165030ustar00rootroot00000000000000from functools import lru_cache from pathlib import Path from typing import Any, Callable, Optional, Type, TypeVar, Union from pydantic.parse import Protocol, load_file from .typing import display_as_type __all__ = ('parse_file_as', 'parse_obj_as') NameFactory = Union[str, Callable[[Type[Any]], str]] def _generate_parsing_type_name(type_: Any) -> str: return f'ParsingModel[{display_as_type(type_)}]' @lru_cache(maxsize=2048) def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: from pydantic.main import create_model if type_name is None: type_name = _generate_parsing_type_name if not isinstance(type_name, str): type_name = type_name(type_) return create_model(type_name, __root__=(type_, ...)) T = TypeVar('T') def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: model_type = _get_parsing_type(type_, type_name=type_name) return model_type(__root__=obj).__root__ def parse_file_as( type_: Type[T], path: Union[str, Path], *, content_type: str = None, encoding: str = 'utf8', proto: Protocol = None, allow_pickle: bool = False, type_name: Optional[NameFactory] = None, ) -> T: obj = load_file(path, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle) return parse_obj_as(type_, obj, type_name=type_name) pydantic-1.2/pydantic/types.py000066400000000000000000000456731357000400300165270ustar00rootroot00000000000000import re import warnings from decimal import Decimal from enum import Enum from pathlib import Path from types import new_class from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, List, Optional, Pattern, Type, TypeVar, Union, cast from uuid import UUID from . import errors from .typing import AnyType from .utils import import_string from .validators import ( bytes_validator, constr_length_validator, constr_strip_whitespace, decimal_validator, float_validator, int_validator, list_validator, number_multiple_validator, number_size_validator, path_exists_validator, path_validator, str_validator, strict_float_validator, strict_int_validator, strict_str_validator, ) __all__ = [ 'NoneStr', 'NoneBytes', 'StrBytes', 'NoneStrBytes', 'StrictStr', 'ConstrainedBytes', 'conbytes', 'ConstrainedList', 'conlist', 'ConstrainedStr', 'constr', 'PyObject', 'ConstrainedInt', 'conint', 'PositiveInt', 'NegativeInt', 'ConstrainedFloat', 'confloat', 'PositiveFloat', 'NegativeFloat', 'ConstrainedDecimal', 'condecimal', 'UUID1', 'UUID3', 'UUID4', 'UUID5', 'FilePath', 'DirectoryPath', 'Json', 'JsonWrapper', 'SecretStr', 'SecretBytes', 'StrictBool', 'StrictInt', 'StrictFloat', 'PaymentCardNumber', 'ByteSize', ] NoneStr = Optional[str] NoneBytes = Optional[bytes] StrBytes = Union[str, bytes] NoneStrBytes = Optional[StrBytes] OptionalInt = Optional[int] OptionalIntFloat = Union[OptionalInt, float] OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] StrIntFloat = Union[str, int, float] if TYPE_CHECKING: from .dataclasses import DataclassType # noqa: F401 from .main import BaseModel, BaseConfig # noqa: F401 from .typing import CallableGenerator ModelOrDc = Type[Union['BaseModel', 'DataclassType']] class ConstrainedBytes(bytes): strip_whitespace = False min_length: OptionalInt = None max_length: OptionalInt = None @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield bytes_validator yield constr_strip_whitespace yield constr_length_validator def conbytes(*, strip_whitespace: bool = False, min_length: int = None, max_length: int = None) -> Type[bytes]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(strip_whitespace=strip_whitespace, min_length=min_length, max_length=max_length) return type('ConstrainedBytesValue', (ConstrainedBytes,), namespace) T = TypeVar('T') # This types superclass should be List[T], but cython chokes on that... class ConstrainedList(list): # type: ignore # Needed for pydantic to detect that this is a list __origin__ = list __args__: List[Type[T]] # type: ignore min_items: Optional[int] = None max_items: Optional[int] = None item_type: Type[T] # type: ignore @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield list_validator yield cls.list_length_validator @classmethod def list_length_validator(cls, v: 'List[T]') -> 'List[T]': v_len = len(v) if cls.min_items is not None and v_len < cls.min_items: raise errors.ListMinLengthError(limit_value=cls.min_items) if cls.max_items is not None and v_len > cls.max_items: raise errors.ListMaxLengthError(limit_value=cls.max_items) return v def conlist(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[List[T]]: # __args__ is needed to conform to typing generics api namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} # We use new_class to be able to deal with Generic types return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) class ConstrainedStr(str): strip_whitespace = False min_length: OptionalInt = None max_length: OptionalInt = None curtail_length: OptionalInt = None regex: Optional[Pattern[str]] = None strict = False @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_str_validator if cls.strict else str_validator yield constr_strip_whitespace yield constr_length_validator yield cls.validate @classmethod def validate(cls, value: str) -> str: if cls.curtail_length and len(value) > cls.curtail_length: value = value[: cls.curtail_length] if cls.regex: if not cls.regex.match(value): raise errors.StrRegexError(pattern=cls.regex.pattern) return value def constr( *, strip_whitespace: bool = False, strict: bool = False, min_length: int = None, max_length: int = None, curtail_length: int = None, regex: str = None, ) -> Type[str]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( strip_whitespace=strip_whitespace, strict=strict, min_length=min_length, max_length=max_length, curtail_length=curtail_length, regex=regex and re.compile(regex), ) return type('ConstrainedStrValue', (ConstrainedStr,), namespace) class StrictStr(ConstrainedStr): strict = True if TYPE_CHECKING: StrictBool = bool else: class StrictBool(int): """ StrictBool to allow for bools which are not type-coerced. """ @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any) -> bool: """ Ensure that we only allow bools. """ if isinstance(value, bool): return value raise errors.StrictBoolError() class PyObject: validate_always = True @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, value: Any) -> Any: if isinstance(value, Callable): # type: ignore return value try: value = str_validator(value) except errors.StrError: raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') try: return import_string(value) except ImportError as e: raise errors.PyObjectError(error_message=str(e)) class ConstrainedNumberMeta(type): def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) if new_cls.gt is not None and new_cls.ge is not None: raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') if new_cls.lt is not None and new_cls.le is not None: raise errors.ConfigError('bounds lt and le cannot be specified at the same time') return new_cls class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): strict: bool = False gt: OptionalInt = None ge: OptionalInt = None lt: OptionalInt = None le: OptionalInt = None multiple_of: OptionalInt = None @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_int_validator if cls.strict else int_validator yield number_size_validator yield number_multiple_validator def conint( *, strict: bool = False, gt: int = None, ge: int = None, lt: int = None, le: int = None, multiple_of: int = None ) -> Type[int]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) return type('ConstrainedIntValue', (ConstrainedInt,), namespace) class PositiveInt(ConstrainedInt): gt = 0 class NegativeInt(ConstrainedInt): lt = 0 class StrictInt(ConstrainedInt): strict = True class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): strict: bool = False gt: OptionalIntFloat = None ge: OptionalIntFloat = None lt: OptionalIntFloat = None le: OptionalIntFloat = None multiple_of: OptionalIntFloat = None @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield strict_float_validator if cls.strict else float_validator yield number_size_validator yield number_multiple_validator def confloat( *, strict: bool = False, gt: float = None, ge: float = None, lt: float = None, le: float = None, multiple_of: float = None, ) -> Type[float]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) class PositiveFloat(ConstrainedFloat): gt = 0 class NegativeFloat(ConstrainedFloat): lt = 0 class StrictFloat(ConstrainedFloat): strict = True class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): gt: OptionalIntFloatDecimal = None ge: OptionalIntFloatDecimal = None lt: OptionalIntFloatDecimal = None le: OptionalIntFloatDecimal = None max_digits: OptionalInt = None decimal_places: OptionalInt = None multiple_of: OptionalIntFloatDecimal = None @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield decimal_validator yield number_size_validator yield number_multiple_validator yield cls.validate @classmethod def validate(cls, value: Decimal) -> Decimal: digit_tuple, exponent = value.as_tuple()[1:] if exponent in {'F', 'n', 'N'}: raise errors.DecimalIsNotFiniteError() if exponent >= 0: # A positive exponent adds that many trailing zeros. digits = len(digit_tuple) + exponent decimals = 0 else: # If the absolute value of the negative exponent is larger than the # number of digits, then it's the same as the number of digits, # because it'll consume all of the digits in digit_tuple and then # add abs(exponent) - len(digit_tuple) leading zeros after the # decimal point. if abs(exponent) > len(digit_tuple): digits = decimals = abs(exponent) else: digits = len(digit_tuple) decimals = abs(exponent) whole_digits = digits - decimals if cls.max_digits is not None and digits > cls.max_digits: raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) if cls.decimal_places is not None and decimals > cls.decimal_places: raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) if cls.max_digits is not None and cls.decimal_places is not None: expected = cls.max_digits - cls.decimal_places if whole_digits > expected: raise errors.DecimalWholeDigitsError(whole_digits=expected) return value def condecimal( *, gt: Decimal = None, ge: Decimal = None, lt: Decimal = None, le: Decimal = None, max_digits: int = None, decimal_places: int = None, multiple_of: Decimal = None, ) -> Type[Decimal]: # use kwargs then define conf in a dict to aid with IDE type hinting namespace = dict( gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of ) return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) class UUID1(UUID): _required_version = 1 class UUID3(UUID): _required_version = 3 class UUID4(UUID): _required_version = 4 class UUID5(UUID): _required_version = 5 class FilePath(Path): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield path_validator yield path_exists_validator yield cls.validate @classmethod def validate(cls, value: Path) -> Path: if not value.is_file(): raise errors.PathNotAFileError(path=value) return value class DirectoryPath(Path): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield path_validator yield path_exists_validator yield cls.validate @classmethod def validate(cls, value: Path) -> Path: if not value.is_dir(): raise errors.PathNotADirectoryError(path=value) return value class JsonWrapper: pass class JsonMeta(type): def __getitem__(self, t: AnyType) -> Type[JsonWrapper]: return type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t}) class Json(metaclass=JsonMeta): pass class SecretStr: @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield str_validator yield cls.validate @classmethod def validate(cls, value: str) -> 'SecretStr': return cls(value) def __init__(self, value: str): self._secret_value = value def __repr__(self) -> str: return f"SecretStr('{self}')" def __str__(self) -> str: return '**********' if self._secret_value else '' def display(self) -> str: warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) return str(self) def get_secret_value(self) -> str: return self._secret_value class SecretBytes: @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield bytes_validator yield cls.validate @classmethod def validate(cls, value: bytes) -> 'SecretBytes': return cls(value) def __init__(self, value: bytes): self._secret_value = value def __repr__(self) -> str: return f"SecretBytes(b'{self}')" def __str__(self) -> str: return '**********' if self._secret_value else '' def display(self) -> str: warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) return str(self) def get_secret_value(self) -> bytes: return self._secret_value class PaymentCardBrand(Enum): amex = 'American Express' mastercard = 'Mastercard' visa = 'Visa' other = 'other' def __str__(self) -> str: return self.value class PaymentCardNumber(str): """ Based on: https://en.wikipedia.org/wiki/Payment_card_number """ strip_whitespace: ClassVar[bool] = True min_length: ClassVar[int] = 12 max_length: ClassVar[int] = 19 bin: str last4: str brand: PaymentCardBrand def __init__(self, card_number: str): self.bin = card_number[:6] self.last4 = card_number[-4:] self.brand = self._get_brand(card_number) @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield str_validator yield constr_strip_whitespace yield constr_length_validator yield cls.validate_digits yield cls.validate_luhn_check_digit yield cls yield cls.validate_length_for_brand @property def masked(self) -> str: num_masked = len(self) - 10 # len(bin) + len(last4) == 10 return f'{self.bin}{"*" * num_masked}{self.last4}' @classmethod def validate_digits(cls, card_number: str) -> str: if not card_number.isdigit(): raise errors.NotDigitError return card_number @classmethod def validate_luhn_check_digit(cls, card_number: str) -> str: """ Based on: https://en.wikipedia.org/wiki/Luhn_algorithm """ sum_ = int(card_number[-1]) length = len(card_number) parity = length % 2 for i in range(length - 1): digit = int(card_number[i]) if i % 2 == parity: digit *= 2 sum_ += digit valid = sum_ % 10 == 0 if not valid: raise errors.LuhnValidationError return card_number @classmethod def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': """ Validate length based on BIN for major brands: https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) """ required_length: Optional[int] = None if card_number.brand is (PaymentCardBrand.visa or PaymentCardBrand.mastercard): required_length = 16 valid = len(card_number) == required_length elif card_number.brand is PaymentCardBrand.amex: required_length = 15 valid = len(card_number) == required_length else: valid = True if not valid: raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) return card_number @staticmethod def _get_brand(card_number: str) -> PaymentCardBrand: if card_number[0] == '4': brand = PaymentCardBrand.visa elif 51 <= int(card_number[:2]) <= 55: brand = PaymentCardBrand.mastercard elif card_number[:2] in {'34', '37'}: brand = PaymentCardBrand.amex else: brand = PaymentCardBrand.other return brand BYTE_SIZES = { 'b': 1, 'kb': 10 ** 3, 'mb': 10 ** 6, 'gb': 10 ** 9, 'tb': 10 ** 12, 'pb': 10 ** 15, 'eb': 10 ** 18, 'kib': 2 ** 10, 'mib': 2 ** 20, 'gib': 2 ** 30, 'tib': 2 ** 40, 'pib': 2 ** 50, 'eib': 2 ** 60, } BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) class ByteSize(int): @classmethod def __get_validators__(cls) -> 'CallableGenerator': yield cls.validate @classmethod def validate(cls, v: StrIntFloat) -> 'ByteSize': try: return cls(int(v)) except ValueError: pass str_match = byte_string_re.match(str(v)) if str_match is None: raise errors.InvalidByteSize() scalar, unit = str_match.groups() if unit is None: unit = 'b' try: unit_mult = BYTE_SIZES[unit.lower()] except KeyError: raise errors.InvalidByteSizeUnit(unit=unit) return cls(int(float(scalar) * unit_mult)) def human_readable(self, decimal: bool = False) -> str: if decimal: divisor = 1000 units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] final_unit = 'EB' else: divisor = 1024 units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] final_unit = 'EiB' num = float(self) for unit in units: if abs(num) < divisor: return f'{num:0.1f}{unit}' num /= divisor return f'{num:0.1f}{final_unit}' def to(self, unit: str) -> float: try: unit_div = BYTE_SIZES[unit.lower()] except KeyError: raise errors.InvalidByteSizeUnit(unit=unit) return self / unit_div pydantic-1.2/pydantic/typing.py000066400000000000000000000140211357000400300166540ustar00rootroot00000000000000import sys from enum import Enum from typing import ( # type: ignore TYPE_CHECKING, AbstractSet, Any, ClassVar, Dict, Generator, List, NewType, Optional, Sequence, Set, Tuple, Type, Union, _eval_type, ) try: from typing import _TypingBase as typing_base # type: ignore except ImportError: from typing import _Final as typing_base # type: ignore try: from typing import ForwardRef # type: ignore def evaluate_forwardref(type_, globalns, localns): # type: ignore return type_._evaluate(globalns, localns) except ImportError: # python 3.6 from typing import _ForwardRef as ForwardRef # type: ignore def evaluate_forwardref(type_, globalns, localns): # type: ignore return type_._eval_type(globalns, localns) if sys.version_info < (3, 7): from typing import Callable as Callable AnyCallable = Callable[..., Any] else: from collections.abc import Callable as Callable from typing import Callable as TypingCallable AnyCallable = TypingCallable[..., Any] if sys.version_info < (3, 8): if TYPE_CHECKING: from typing_extensions import Literal else: # due to different mypy warnings raised during CI for python 3.7 and 3.8 try: from typing_extensions import Literal except ImportError: Literal = None else: from typing import Literal if TYPE_CHECKING: from .fields import ModelField TupleGenerator = Generator[Tuple[str, Any], None, None] DictStrAny = Dict[str, Any] DictAny = Dict[Any, Any] SetStr = Set[str] ListStr = List[str] IntStr = Union[int, str] AbstractSetIntStr = AbstractSet[IntStr] DictIntStrAny = Dict[IntStr, Any] CallableGenerator = Generator[AnyCallable, None, None] ReprArgs = Sequence[Tuple[Optional[str], Any]] __all__ = ( 'ForwardRef', 'Callable', 'AnyCallable', 'AnyType', 'NoneType', 'display_as_type', 'resolve_annotations', 'is_callable_type', 'is_literal_type', 'literal_values', 'Literal', 'is_new_type', 'new_type_supertype', 'is_classvar', 'update_field_forward_refs', 'TupleGenerator', 'DictStrAny', 'DictAny', 'SetStr', 'ListStr', 'IntStr', 'AbstractSetIntStr', 'DictIntStrAny', 'CallableGenerator', 'ReprArgs', 'CallableGenerator', ) AnyType = Type[Any] NoneType = type(None) def display_as_type(v: AnyType) -> str: if not isinstance(v, typing_base) and not isinstance(v, type): v = type(v) if isinstance(v, type) and issubclass(v, Enum): if issubclass(v, int): return 'int' elif issubclass(v, str): return 'str' else: return 'enum' try: return v.__name__ except AttributeError: # happens with typing objects return str(v).replace('typing.', '') def resolve_annotations(raw_annotations: Dict[str, AnyType], module_name: Optional[str]) -> Dict[str, AnyType]: """ Partially taken from typing.get_type_hints. Resolve string or ForwardRef annotations into type objects if possible. """ if module_name: base_globals: Optional[Dict[str, Any]] = sys.modules[module_name].__dict__ else: base_globals = None annotations = {} for name, value in raw_annotations.items(): if isinstance(value, str): if sys.version_info >= (3, 7): value = ForwardRef(value, is_argument=False) else: value = ForwardRef(value) try: value = _eval_type(value, base_globals, None) except NameError: # this is ok, it can be fixed with update_forward_refs pass annotations[name] = value return annotations def is_callable_type(type_: AnyType) -> bool: return type_ is Callable or getattr(type_, '__origin__', None) is Callable if sys.version_info >= (3, 7): def is_literal_type(type_: AnyType) -> bool: return Literal is not None and getattr(type_, '__origin__', None) is Literal def literal_values(type_: AnyType) -> Tuple[Any, ...]: return type_.__args__ else: def is_literal_type(type_: AnyType) -> bool: return Literal is not None and hasattr(type_, '__values__') and type_ == Literal[type_.__values__] def literal_values(type_: AnyType) -> Tuple[Any, ...]: return type_.__values__ test_type = NewType('test_type', str) def is_new_type(type_: AnyType) -> bool: return isinstance(type_, type(test_type)) and hasattr(type_, '__supertype__') def new_type_supertype(type_: AnyType) -> AnyType: while hasattr(type_, '__supertype__'): type_ = type_.__supertype__ return type_ def _check_classvar(v: AnyType) -> bool: return type(v) == type(ClassVar) and (sys.version_info < (3, 7) or getattr(v, '_name', None) == 'ClassVar') def is_classvar(ann_type: AnyType) -> bool: return _check_classvar(ann_type) or _check_classvar(getattr(ann_type, '__origin__', None)) def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: """ Try to update ForwardRefs on fields based on this ModelField, globalns and localns. """ if type(field.type_) == ForwardRef: field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) field.prepare() if field.sub_fields: for sub_f in field.sub_fields: update_field_forward_refs(sub_f, globalns=globalns, localns=localns) def get_class(type_: AnyType) -> Union[None, bool, AnyType]: """ Tries to get the class of a Type[T] annotation. Returns True if Type is used without brackets. Otherwise returns None. """ try: origin = getattr(type_, '__origin__') if origin is None: # Python 3.6 origin = type_ if issubclass(origin, Type): # type: ignore if type_.__args__ is None or not isinstance(type_.__args__[0], type): return True return type_.__args__[0] except AttributeError: pass return None pydantic-1.2/pydantic/utils.py000066400000000000000000000232221357000400300165050ustar00rootroot00000000000000import inspect import warnings from importlib import import_module from typing import ( TYPE_CHECKING, AbstractSet, Any, Callable, Dict, Generator, Iterator, List, Optional, Set, Tuple, Type, TypeVar, Union, no_type_check, ) from .typing import AnyType, display_as_type if TYPE_CHECKING: from .main import BaseModel # noqa: F401 from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, ReprArgs # noqa: F401 KeyType = TypeVar('KeyType') def import_string(dotted_path: str) -> Any: """ Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImportError if the import fails. """ try: module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) except ValueError as e: raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e module = import_module(module_path) try: return getattr(module, class_name) except AttributeError as e: raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e def truncate(v: Union[str], *, max_len: int = 80) -> str: """ Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long """ warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) if isinstance(v, str) and len(v) > (max_len - 2): # -3 so quote + string + … + quote has correct length return (v[: (max_len - 3)] + '…').__repr__() try: v = v.__repr__() except TypeError: v = type(v).__repr__(v) # in case v is a type if len(v) > max_len: v = v[: max_len - 1] + '…' return v ExcType = Type[Exception] def sequence_like(v: AnyType) -> bool: return isinstance(v, (list, tuple, set, frozenset)) or inspect.isgenerator(v) def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: """ Ensure that the field's name does not shadow an existing attribute of the model. """ for base in bases: if getattr(base, field_name, None): raise NameError( f'Field name "{field_name}" shadows a BaseModel attribute; ' f'use a different field name with "alias=\'{field_name}\'".' ) def lenient_issubclass(cls: Any, class_or_tuple: Union[AnyType, Tuple[AnyType, ...]]) -> bool: return isinstance(cls, type) and issubclass(cls, class_or_tuple) def in_ipython() -> bool: """ Check whether we're in an ipython environment, including jupyter notebooks. """ try: eval('__IPYTHON__') except NameError: return False else: # pragma: no cover return True def deep_update(mapping: Dict[KeyType, Any], updating_mapping: Dict[KeyType, Any]) -> Dict[KeyType, Any]: updated_mapping = mapping.copy() for k, v in updating_mapping.items(): if k in mapping and isinstance(mapping[k], dict) and isinstance(v, dict): updated_mapping[k] = deep_update(mapping[k], v) else: updated_mapping[k] = v return updated_mapping def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: """ Return True if two floats are almost equal """ return abs(value_1 - value_2) <= delta class PyObjectStr(str): """ String class where repr doesn't include quotes. Useful with Representation when you want to return a string representation of something that valid (or pseudo-valid) python. """ def __repr__(self) -> str: return str(self) class Representation: """ Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations of objects. """ def __repr_args__(self) -> 'ReprArgs': """ Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. Can either return: * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` """ attrs = ((s, getattr(self, s)) for s in self.__slots__) return [(a, v) for a, v in attrs if v is not None] def __repr_name__(self) -> str: """ Name of the instance's class, used in __repr__. """ return self.__class__.__name__ def __repr_str__(self, join_str: str) -> str: return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: """ Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects """ yield self.__repr_name__() + '(' yield 1 for name, value in self.__repr_args__(): if name is not None: yield name + '=' yield fmt(value) yield ',' yield 0 yield -1 yield ')' def __str__(self) -> str: return self.__repr_str__(' ') def __repr__(self) -> str: return f'{self.__repr_name__()}({self.__repr_str__(", ")})' class GetterDict(Representation): """ Hack to make object's smell just enough like dicts for validate_model. We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. """ __slots__ = ('_obj',) def __init__(self, obj: Any): self._obj = obj def __getitem__(self, key: str) -> Any: try: return getattr(self._obj, key) except AttributeError as e: raise KeyError(key) from e def get(self, key: Any, default: Any = None) -> Any: return getattr(self._obj, key, default) def extra_keys(self) -> Set[Any]: """ We don't want to get any other attributes of obj if the model didn't explicitly ask for them """ return set() def keys(self) -> List[Any]: """ Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python dictionaries. """ return list(self) def values(self) -> List[Any]: return [self[k] for k in self] def items(self) -> Iterator[Tuple[str, Any]]: for k in self: yield k, self.get(k) def __iter__(self) -> Iterator[str]: for name in dir(self._obj): if not name.startswith('_'): yield name def __len__(self) -> int: return sum(1 for _ in self) def __contains__(self, item: Any) -> bool: return item in self.keys() def __eq__(self, other: Any) -> bool: return dict(self) == dict(other.items()) # type: ignore def __repr_args__(self) -> 'ReprArgs': return [(None, dict(self))] # type: ignore def __repr_name__(self) -> str: return f'GetterDict[{display_as_type(self._obj)}]' class ValueItems(Representation): """ Class for more convenient calculation of excluded or included fields on values. """ __slots__ = ('_items', '_type') def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'DictIntStrAny']) -> None: if TYPE_CHECKING: self._items: Union['AbstractSetIntStr', 'DictIntStrAny'] self._type: Type[Union[set, dict]] # type: ignore # For further type checks speed-up if isinstance(items, dict): self._type = dict elif isinstance(items, AbstractSet): self._type = set else: raise TypeError(f'Unexpected type of exclude value {type(items)}') if isinstance(value, (list, tuple)): items = self._normalize_indexes(items, len(value)) self._items = items @no_type_check def is_excluded(self, item: Any) -> bool: """ Check if item is fully excluded (value considered excluded if self._type is set and item contained in self._items or self._type is dict and self._items.get(item) is ... :param item: key or index of a value """ if self._type is set: return item in self._items return self._items.get(item) is ... @no_type_check def is_included(self, item: Any) -> bool: """ Check if value is contained in self._items :param item: key or index of value """ return item in self._items @no_type_check def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'DictIntStrAny']]: """ :param e: key or index of element on value :return: raw values for elemet if self._items is dict and contain needed element """ if self._type is dict: item = self._items.get(e) return item if item is not ... else None return None @no_type_check def _normalize_indexes( self, items: Union['AbstractSetIntStr', 'DictIntStrAny'], v_length: int ) -> Union['AbstractSetIntStr', 'DictIntStrAny']: """ :param items: dict or set of indexes which will be normalized :param v_length: length of sequence indexes of which will be >>> self._normalize_indexes({0, -2, -1}, 4) {0, 2, 3} """ if self._type is set: return {v_length + i if i < 0 else i for i in items} else: return {v_length + i if i < 0 else i: v for i, v in items.items()} def __repr_args__(self) -> 'ReprArgs': return [(None, self._items)] pydantic-1.2/pydantic/validators.py000066400000000000000000000371371357000400300175270ustar00rootroot00000000000000import re import sys from collections import OrderedDict from datetime import date, datetime, time, timedelta from decimal import Decimal, DecimalException from enum import Enum, IntEnum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import ( TYPE_CHECKING, Any, Callable, Dict, FrozenSet, Generator, List, Optional, Pattern, Set, Tuple, Type, TypeVar, Union, ) from uuid import UUID from . import errors from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time from .typing import AnyCallable, AnyType, ForwardRef, display_as_type, get_class, is_callable_type, is_literal_type from .utils import almost_equal_floats, lenient_issubclass, sequence_like if TYPE_CHECKING: from .fields import ModelField from .main import BaseConfig from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] AnyOrderedDict = OrderedDict[Any, Any] Number = Union[int, float, Decimal] StrBytes = Union[str, bytes] def str_validator(v: Any) -> Optional[str]: if isinstance(v, str): if isinstance(v, Enum): return v.value else: return v elif isinstance(v, (float, int, Decimal)): # is there anything else we want to add here? If you think so, create an issue. return str(v) elif isinstance(v, (bytes, bytearray)): return v.decode() else: raise errors.StrError() def strict_str_validator(v: Any) -> str: if isinstance(v, str): return v raise errors.StrError() def bytes_validator(v: Any) -> bytes: if isinstance(v, bytes): return v elif isinstance(v, bytearray): return bytes(v) elif isinstance(v, str): return v.encode() elif isinstance(v, (float, int, Decimal)): return str(v).encode() else: raise errors.BytesError() BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} def bool_validator(v: Any) -> bool: if v is True or v is False: return v if isinstance(v, bytes): v = v.decode() if isinstance(v, str): v = v.lower() try: if v in BOOL_TRUE: return True if v in BOOL_FALSE: return False except TypeError: raise errors.BoolError() raise errors.BoolError() def int_validator(v: Any) -> int: if isinstance(v, int) and not (v is True or v is False): return v try: return int(v) except (TypeError, ValueError): raise errors.IntegerError() def strict_int_validator(v: Any) -> int: if isinstance(v, int) and not (v is True or v is False): return v raise errors.IntegerError() def float_validator(v: Any) -> float: if isinstance(v, float): return v try: return float(v) except (TypeError, ValueError): raise errors.FloatError() def strict_float_validator(v: Any) -> float: if isinstance(v, float): return v raise errors.FloatError() def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': field_type: ConstrainedNumber = field.type_ if field_type.multiple_of is not None: mod = float(v) / float(field_type.multiple_of) % 1 if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) return v def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': field_type: ConstrainedNumber = field.type_ if field_type.gt is not None and not v > field_type.gt: raise errors.NumberNotGtError(limit_value=field_type.gt) elif field_type.ge is not None and not v >= field_type.ge: raise errors.NumberNotGeError(limit_value=field_type.ge) if field_type.lt is not None and not v < field_type.lt: raise errors.NumberNotLtError(limit_value=field_type.lt) if field_type.le is not None and not v <= field_type.le: raise errors.NumberNotLeError(limit_value=field_type.le) return v def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': """Validate ``const`` fields. The value provided for a ``const`` field must be equal to the default value of the field. This is to support the keyword of the same name in JSON Schema. """ if v != field.default: raise errors.WrongConstantError(given=v, permitted=[field.default]) return v def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': v_len = len(v) min_length = config.min_anystr_length if min_length is not None and v_len < min_length: raise errors.AnyStrMinLengthError(limit_value=min_length) max_length = config.max_anystr_length if max_length is not None and v_len > max_length: raise errors.AnyStrMaxLengthError(limit_value=max_length) return v def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': return v.strip() def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': if isinstance(v, OrderedDict): return v try: return OrderedDict(v) except (TypeError, ValueError): raise errors.DictError() def dict_validator(v: Any) -> Dict[Any, Any]: if isinstance(v, dict): return v try: return dict(v) except (TypeError, ValueError): raise errors.DictError() def list_validator(v: Any) -> List[Any]: if isinstance(v, list): return v elif sequence_like(v): return list(v) else: raise errors.ListError() def tuple_validator(v: Any) -> Tuple[Any, ...]: if isinstance(v, tuple): return v elif sequence_like(v): return tuple(v) else: raise errors.TupleError() def set_validator(v: Any) -> Set[Any]: if isinstance(v, set): return v elif sequence_like(v): return set(v) else: raise errors.SetError() def frozenset_validator(v: Any) -> FrozenSet[Any]: if isinstance(v, frozenset): return v elif sequence_like(v): return frozenset(v) else: raise errors.FrozenSetError() def enum_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: try: enum_v = field.type_(v) except ValueError: # field.type_ should be an enum, so will be iterable raise errors.EnumError(enum_values=list(field.type_)) return enum_v.value if config.use_enum_values else enum_v def uuid_validator(v: Any, field: 'ModelField') -> UUID: try: if isinstance(v, str): v = UUID(v) elif isinstance(v, (bytes, bytearray)): v = UUID(v.decode()) except ValueError: raise errors.UUIDError() if not isinstance(v, UUID): raise errors.UUIDError() required_version = getattr(field.type_, '_required_version', None) if required_version and v.version != required_version: raise errors.UUIDVersionError(required_version=required_version) return v def decimal_validator(v: Any) -> Decimal: if isinstance(v, Decimal): return v elif isinstance(v, (bytes, bytearray)): v = v.decode() v = str(v).strip() try: v = Decimal(v) except DecimalException: raise errors.DecimalError() if not v.is_finite(): raise errors.DecimalIsNotFiniteError() return v def ip_v4_address_validator(v: Any) -> IPv4Address: if isinstance(v, IPv4Address): return v try: return IPv4Address(v) except ValueError: raise errors.IPv4AddressError() def ip_v6_address_validator(v: Any) -> IPv6Address: if isinstance(v, IPv6Address): return v try: return IPv6Address(v) except ValueError: raise errors.IPv6AddressError() def ip_v4_network_validator(v: Any) -> IPv4Network: """ Assume IPv4Network initialised with a default ``strict`` argument See more: https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network """ if isinstance(v, IPv4Network): return v try: return IPv4Network(v) except ValueError: raise errors.IPv4NetworkError() def ip_v6_network_validator(v: Any) -> IPv6Network: """ Assume IPv6Network initialised with a default ``strict`` argument See more: https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network """ if isinstance(v, IPv6Network): return v try: return IPv6Network(v) except ValueError: raise errors.IPv6NetworkError() def ip_v4_interface_validator(v: Any) -> IPv4Interface: if isinstance(v, IPv4Interface): return v try: return IPv4Interface(v) except ValueError: raise errors.IPv4InterfaceError() def ip_v6_interface_validator(v: Any) -> IPv6Interface: if isinstance(v, IPv6Interface): return v try: return IPv6Interface(v) except ValueError: raise errors.IPv6InterfaceError() def path_validator(v: Any) -> Path: if isinstance(v, Path): return v try: return Path(v) except TypeError: raise errors.PathError() def path_exists_validator(v: Any) -> Path: if not v.exists(): raise errors.PathNotExistsError(path=v) return v def callable_validator(v: Any) -> AnyCallable: """ Perform a simple check if the value is callable. Note: complete matching of argument type hints and return types is not performed """ if callable(v): return v raise errors.CallableError(value=v) def make_literal_validator(type_: Any) -> Callable[[Any], Any]: if sys.version_info >= (3, 7): permitted_choices = type_.__args__ else: permitted_choices = type_.__values__ allowed_choices_set = set(permitted_choices) def literal_validator(v: Any) -> Any: if v not in allowed_choices_set: raise errors.WrongConstantError(given=v, permitted=permitted_choices) return v return literal_validator def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': v_len = len(v) min_length = field.type_.min_length or config.min_anystr_length if min_length is not None and v_len < min_length: raise errors.AnyStrMinLengthError(limit_value=min_length) max_length = field.type_.max_length or config.max_anystr_length if max_length is not None and v_len > max_length: raise errors.AnyStrMaxLengthError(limit_value=max_length) return v def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace if strip_whitespace: v = v.strip() return v def validate_json(v: Any, config: 'BaseConfig') -> Any: try: return config.json_loads(v) # type: ignore except ValueError: raise errors.JsonError() except TypeError: raise errors.JsonTypeError() T = TypeVar('T') def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: def arbitrary_type_validator(v: Any) -> T: if isinstance(v, type_): return v raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) return arbitrary_type_validator def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: def class_validator(v: Any) -> Type[T]: if lenient_issubclass(v, type_): return v raise errors.SubclassError(expected_class=type_) return class_validator def any_class_validator(v: Any) -> Type[T]: if isinstance(v, type): return v raise errors.ClassError() def pattern_validator(v: Any) -> Pattern[str]: try: return re.compile(v) except re.error: raise errors.PatternError() class IfConfig: def __init__(self, validator: AnyCallable, *config_attr_names: str) -> None: self.validator = validator self.config_attr_names = config_attr_names def check(self, config: Type['BaseConfig']) -> bool: return any(getattr(config, name) not in {None, False} for name in self.config_attr_names) pattern_validators = [str_validator, pattern_validator] # order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, # IPv4Interface before IPv4Address, etc _VALIDATORS: List[Tuple[AnyType, List[Any]]] = [ (IntEnum, [int_validator, enum_validator]), (Enum, [enum_validator]), ( str, [ str_validator, IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), ], ), ( bytes, [ bytes_validator, IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), ], ), (bool, [bool_validator]), (int, [int_validator]), (float, [float_validator]), (Path, [path_validator]), (datetime, [parse_datetime]), (date, [parse_date]), (time, [parse_time]), (timedelta, [parse_duration]), (OrderedDict, [ordered_dict_validator]), (dict, [dict_validator]), (list, [list_validator]), (tuple, [tuple_validator]), (set, [set_validator]), (frozenset, [frozenset_validator]), (UUID, [uuid_validator]), (Decimal, [decimal_validator]), (IPv4Interface, [ip_v4_interface_validator]), (IPv6Interface, [ip_v6_interface_validator]), (IPv4Address, [ip_v4_address_validator]), (IPv6Address, [ip_v6_address_validator]), (IPv4Network, [ip_v4_network_validator]), (IPv6Network, [ip_v6_network_validator]), ] def find_validators( # noqa: C901 (ignore complexity) type_: AnyType, config: Type['BaseConfig'] ) -> Generator[AnyCallable, None, None]: if type_ is Any: return type_type = type(type_) if type_type == ForwardRef or type_type == TypeVar: return if type_ is Pattern: yield from pattern_validators return if is_callable_type(type_): yield callable_validator return if is_literal_type(type_): yield make_literal_validator(type_) return class_ = get_class(type_) if class_ is not None: if isinstance(class_, type): yield make_class_validator(class_) else: yield any_class_validator return supertype = _find_supertype(type_) if supertype is not None: type_ = supertype for val_type, validators in _VALIDATORS: try: if issubclass(type_, val_type): for v in validators: if isinstance(v, IfConfig): if v.check(config): yield v.validator else: yield v return except TypeError: raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') if config.arbitrary_types_allowed: yield make_arbitrary_type_validator(type_) else: raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') def _find_supertype(type_: AnyType) -> Optional[AnyType]: if not _is_new_type(type_): return None supertype = type_.__supertype__ if _is_new_type(supertype): supertype = _find_supertype(supertype) return supertype def _is_new_type(type_: AnyType) -> bool: return hasattr(type_, '__name__') and hasattr(type_, '__supertype__') pydantic-1.2/pydantic/version.py000066400000000000000000000001431357000400300170270ustar00rootroot00000000000000from distutils.version import StrictVersion __all__ = ['VERSION'] VERSION = StrictVersion('1.2') pydantic-1.2/requirements.txt000066400000000000000000000002761357000400300164500ustar00rootroot00000000000000-r benchmarks/requirements.txt -r docs/requirements.txt -r tests/requirements.txt devtools==0.5.1 email-validator==1.0.5 dataclasses==0.6; python_version < '3.7' typing-extensions==3.7.4.1 pydantic-1.2/runtime.txt000066400000000000000000000000041357000400300153750ustar00rootroot000000000000003.7 pydantic-1.2/setup.cfg000066400000000000000000000017241357000400300150040ustar00rootroot00000000000000[tool:pytest] testpaths = tests timeout = 10 filterwarnings = error ignore::DeprecationWarning:distutils [flake8] max-line-length = 120 max-complexity = 14 inline-quotes = ' multiline-quotes = """ ignore = E203, W503 [bdist_wheel] python-tag = py36.py37.py38 [coverage:run] source = pydantic branch = True [coverage:report] precision = 2 exclude_lines = pragma: no cover raise NotImplementedError raise NotImplemented if TYPE_CHECKING: @overload [isort] line_length=120 known_first_party=pydantic known_standard_library=dataclasses multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 combine_as_imports=True [mypy] follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True [mypy-email_validator] ignore_missing_imports = true pydantic-1.2/setup.py000066400000000000000000000071041357000400300146730ustar00rootroot00000000000000import os import re import sys from importlib.machinery import SourceFileLoader from pathlib import Path from setuptools import setup class ReplaceLinks: def __init__(self): self.links = set() def replace_issues(self, m): id = m.group(1) self.links.add(f'.. _#{id}: https://github.com/samuelcolvin/pydantic/issues/{id}') return f'`#{id}`_' def replace_users(self, m): name = m.group(2) self.links.add(f'.. _@{name}: https://github.com/{name}') return f'{m.group(1)}`@{name}`_' def extra(self): return '\n\n' + '\n'.join(sorted(self.links)) + '\n' description = 'Data validation and settings management using python 3.6 type hinting' THIS_DIR = Path(__file__).resolve().parent try: history = (THIS_DIR / 'HISTORY.md').read_text() history = re.sub(r'#(\d+)', r'[#\1](https://github.com/samuelcolvin/pydantic/issues/\1)', history) history = re.sub(r'( +)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I) history = re.sub('@@', '@', history) long_description = (THIS_DIR / 'README.md').read_text() + '\n\n' + history except FileNotFoundError: long_description = description + '.\n\nSee https://pydantic-docs.helpmanual.io/ for documentation.' # avoid loading the package before requirements are installed: version = SourceFileLoader('version', 'pydantic/version.py').load_module() ext_modules = None if not any(arg in sys.argv for arg in ['clean', 'check']) and 'SKIP_CYTHON' not in os.environ: try: from Cython.Build import cythonize except ImportError: pass else: # For cython test coverage install with `make build-cython-trace` compiler_directives = {} if 'CYTHON_TRACE' in sys.argv: compiler_directives['linetrace'] = True os.environ['CFLAGS'] = '-O3' ext_modules = cythonize( 'pydantic/*.py', exclude=['pydantic/generics.py'], nthreads=4, language_level=3, compiler_directives=compiler_directives, ) setup( name='pydantic', version=str(version.VERSION), description=description, long_description=long_description, long_description_content_type='text/markdown', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: Unix', 'Operating System :: POSIX :: Linux', 'Environment :: Console', 'Environment :: MacOS X', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet', ], author='Samuel Colvin', author_email='s@muelcolvin.com', url='https://github.com/samuelcolvin/pydantic', license='MIT', packages=['pydantic'], package_data={'pydantic': ['py.typed']}, python_requires='>=3.6', zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html install_requires=[ 'dataclasses>=0.6;python_version<"3.7"' ], extras_require={ 'email': ['email-validator>=1.0.3'], 'typing_extensions': ['typing-extensions>=3.7.2'] }, ext_modules=ext_modules, ) pydantic-1.2/tests/000077500000000000000000000000001357000400300143215ustar00rootroot00000000000000pydantic-1.2/tests/__init__.py000066400000000000000000000000001357000400300164200ustar00rootroot00000000000000pydantic-1.2/tests/check_tag.py000077500000000000000000000010471357000400300166100ustar00rootroot00000000000000#!/usr/bin/env python3 import os import sys from importlib.machinery import SourceFileLoader VERSION = SourceFileLoader('version', 'pydantic/version.py').load_module().VERSION git_tag = os.getenv('TRAVIS_TAG') if git_tag: if git_tag.lower().lstrip('v') != str(VERSION).lower(): print('✖ "TRAVIS_TAG" environment variable does not match package version: "%s" vs. "%s"' % (git_tag, VERSION)) sys.exit(1) else: print('✓ "TRAVIS_TAG" environment variable matches package version: "%s" vs. "%s"' % (git_tag, VERSION)) pydantic-1.2/tests/conftest.py000066400000000000000000000012671357000400300165260ustar00rootroot00000000000000import os import secrets from importlib.machinery import SourceFileLoader import pytest class SetEnv: def __init__(self): self.envars = set() def set(self, name, value): self.envars.add(name) os.environ[name] = value def clear(self): for n in self.envars: os.environ.pop(n) @pytest.yield_fixture def env(): setenv = SetEnv() yield setenv setenv.clear() @pytest.fixture def create_module(tmp_path): def run(code): name = f'test_code_{secrets.token_hex(5)}' path = tmp_path / f'{name}.py' path.write_text(code) return SourceFileLoader(name, str(path)).load_module() return run pydantic-1.2/tests/mypy/000077500000000000000000000000001357000400300153175ustar00rootroot00000000000000pydantic-1.2/tests/mypy/__init__.py000066400000000000000000000000001357000400300174160ustar00rootroot00000000000000pydantic-1.2/tests/mypy/configs/000077500000000000000000000000001357000400300167475ustar00rootroot00000000000000pydantic-1.2/tests/mypy/configs/mypy-default.ini000066400000000000000000000004171357000400300220720ustar00rootroot00000000000000[mypy] follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True ;no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True pydantic-1.2/tests/mypy/configs/mypy-plugin-strict.ini000066400000000000000000000006441357000400300232540ustar00rootroot00000000000000[mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True ;no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True [pydantic-mypy] init_forbid_extra = True init_typed = True warn_required_dynamic_aliases = True warn_untyped_fields = True pydantic-1.2/tests/mypy/configs/mypy-plugin.ini000066400000000000000000000004501357000400300217410ustar00rootroot00000000000000[mypy] plugins = pydantic.mypy follow_imports = silent strict_optional = True warn_redundant_casts = True warn_unused_ignores = True disallow_any_generics = True check_untyped_defs = True ;no_implicit_reexport = True # for strict mypy: (this is the tricky one :-)) disallow_untyped_defs = True pydantic-1.2/tests/mypy/modules/000077500000000000000000000000001357000400300167675ustar00rootroot00000000000000pydantic-1.2/tests/mypy/modules/fail1.py000066400000000000000000000006131357000400300203350ustar00rootroot00000000000000""" Test mypy failure with missing attribute """ from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.age + 'not integer') pydantic-1.2/tests/mypy/modules/fail2.py000066400000000000000000000005731357000400300203430ustar00rootroot00000000000000""" Test mypy failure with invalid types. """ from datetime import datetime from typing import List, Optional from pydantic import BaseModel, NoneStr class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] m = Model(age=42, list_of_ints=[1, '2', b'3']) print(m.foobar) pydantic-1.2/tests/mypy/modules/fail3.py000066400000000000000000000007061357000400300203420ustar00rootroot00000000000000""" Test mypy failure with invalid types. """ from typing import Generic, List, TypeVar from pydantic import BaseModel from pydantic.generics import GenericModel T = TypeVar('T') class Model(BaseModel): list_of_ints: List[int] class WrapperModel(GenericModel, Generic[T]): payload: T model_instance = Model(list_of_ints=[1]) wrapper_instance = WrapperModel[Model](payload=model_instance) wrapper_instance.payload.list_of_ints.append('1') pydantic-1.2/tests/mypy/modules/plugin_fail.py000066400000000000000000000072631357000400300216420ustar00rootroot00000000000000from typing import Any, Generic, Optional, Set, TypeVar, Union from pydantic import BaseModel, BaseSettings, Extra, Field from pydantic.dataclasses import dataclass from pydantic.generics import GenericModel class Model(BaseModel): x: int y: str def method(self) -> None: pass class Config: alias_generator = None allow_mutation = False extra = Extra.forbid def config_method(self) -> None: ... model = Model(x=1, y='y', z='z') model = Model(x=1) model.y = 'a' Model.from_orm({}) Model.from_orm({}) # type: ignore[pydantic-orm] # noqa F821 class ForbidExtraModel(BaseModel): class Config: extra = 'forbid' ForbidExtraModel(x=1) class ForbidExtraModel2(BaseModel): class Config: extra = 'forbid' validate_all = False Config.validate_all = True ForbidExtraModel2(x=1) class BadExtraModel(BaseModel): class Config: extra = 1 # type: ignore[pydantic-config] # noqa F821 extra = 1 class BadConfig1(BaseModel): class Config: orm_mode: Any = {} # not sensible, but should still be handled gracefully class BadConfig2(BaseModel): class Config: orm_mode = list # not sensible, but should still be handled gracefully class InheritingModel(Model): class Config: allow_mutation = True class DefaultTestingModel(BaseModel): # Required a: int b: int = ... c: int = Field(...) d: Union[int, str] e = ... # Not required f: Optional[int] g: int = 1 h: int = Field(1) i: int = Field(None) j = 1 DefaultTestingModel() class UndefinedAnnotationModel(BaseModel): undefined: Undefined # noqa F821 UndefinedAnnotationModel() class Settings(BaseSettings): x: int Model.construct(x=1) Model.construct(_fields_set={'x'}, x=1, y='2') Model.construct(x='1', y='2') Settings() # should pass here due to possibly reading from environment # Strict mode fails inheriting = InheritingModel(x='1', y='1') Settings(x='1') Model(x='1', y='2') class Blah(BaseModel): fields_set: Optional[Set[str]] = None # Need to test generic checking here since generics don't work in 3.6, and plugin-success.py is executed T = TypeVar('T') class Response(GenericModel, Generic[T]): data: T error: Optional[str] response = Response[Model](data=model, error=None) response = Response[Model](data=1, error=None) class AliasModel(BaseModel): x: str = Field(..., alias='y') z: int AliasModel(y=1, z=2) x_alias = 'y' class DynamicAliasModel(BaseModel): x: str = Field(..., alias=x_alias) z: int DynamicAliasModel(y='y', z='1') class DynamicAliasModel2(BaseModel): x: str = Field(..., alias=x_alias) z: int class Config: allow_population_by_field_name = True DynamicAliasModel2(y='y', z=1) DynamicAliasModel2(x='y', z=1) class AliasGeneratorModel(BaseModel): x: int class Config: alias_generator = lambda x: x + '_' # noqa E731 AliasGeneratorModel(x=1) AliasGeneratorModel(x_=1) AliasGeneratorModel(z=1) class AliasGeneratorModel2(BaseModel): x: int = Field(..., alias='y') class Config: # type: ignore[pydantic-alias] # noqa F821 alias_generator = lambda x: x + '_' # noqa E731 class UntypedFieldModel(BaseModel): x: int = 1 y = 2 z = 2 # type: ignore[pydantic-field] # noqa F821 AliasGeneratorModel2(x=1) AliasGeneratorModel2(y=1, z=1) class CoverageTester(Missing): # noqa F821 def from_orm(self) -> None: pass CoverageTester().from_orm() @dataclass(config={}) class AddProject: name: str slug: Optional[str] description: Optional[str] p = AddProject(name='x', slug='y', description='z') pydantic-1.2/tests/mypy/modules/plugin_success.py000066400000000000000000000035651357000400300224000ustar00rootroot00000000000000from typing import ClassVar, Optional from pydantic import BaseModel, Field from pydantic.dataclasses import dataclass class Model(BaseModel): x: float y: str class Config: orm_mode = True class NotConfig: allow_mutation = False class SelfReferencingModel(BaseModel): submodel: Optional['SelfReferencingModel'] @property def prop(self) -> None: ... SelfReferencingModel.update_forward_refs() model = Model(x=1, y='y') Model(x=1, y='y', z='z') model.x = 2 model.from_orm(model) self_referencing_model = SelfReferencingModel(submodel=SelfReferencingModel(submodel=None)) class InheritingModel(Model): z: int = 1 InheritingModel.from_orm(model) class ForwardReferencingModel(Model): future: 'FutureModel' class FutureModel(Model): pass ForwardReferencingModel.update_forward_refs() future_model = FutureModel(x=1, y='a') forward_model = ForwardReferencingModel(x=1, y='a', future=future_model) class NoMutationModel(BaseModel): x: int class Config: allow_mutation = False class MutationModel(NoMutationModel): a = 1 class Config: allow_mutation = True orm_mode = True MutationModel(x=1).x = 2 MutationModel.from_orm(model) class OverrideModel(Model): x: int OverrideModel(x=1.5, y='b') class Mixin: def f(self) -> None: pass class MultiInheritanceModel(BaseModel, Mixin): pass MultiInheritanceModel().f() class AliasModel(BaseModel): x: str = Field(..., alias='y') alias_model = AliasModel(y='hello') assert alias_model.x == 'hello' class ClassVarModel(BaseModel): x: int y: ClassVar[int] = 1 ClassVarModel(x=1) class Config: validate_assignment = True @dataclass(config=Config) class AddProject: name: str slug: Optional[str] description: Optional[str] p = AddProject(name='x', slug='y', description='z') pydantic-1.2/tests/mypy/modules/success.py000066400000000000000000000062111357000400300210110ustar00rootroot00000000000000""" Test pydantic's compliance with mypy. Do a little skipping about with types to demonstrate its usage. """ import json import sys from datetime import datetime from typing import Any, Dict, Generic, List, Optional, TypeVar from pydantic import BaseModel, NoneStr, StrictBool, root_validator, validator from pydantic.fields import Field from pydantic.generics import GenericModel class Flags(BaseModel): strict_bool: StrictBool = False class Model(BaseModel): age: int first_name = 'John' last_name: NoneStr = None signup_ts: Optional[datetime] = None list_of_ints: List[int] @validator('age') def check_age(cls, value: int) -> int: assert value < 100, 'too old' return value @root_validator def root_check(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values @root_validator(pre=True) def pre_root_check(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values def dog_years(age: int) -> int: return age * 7 def day_of_week(dt: datetime) -> int: return dt.date().isoweekday() m = Model(age=21, list_of_ints=[1, '2', b'3']) assert m.age == 21, m.age m.age = 42 assert m.age == 42, m.age assert m.first_name == 'John', m.first_name assert m.last_name is None, m.last_name assert m.list_of_ints == [1, 2, 3], m.list_of_ints dog_age = dog_years(m.age) assert dog_age == 294, dog_age m = Model(age=2, first_name=b'Woof', last_name=b'Woof', signup_ts='2017-06-07 00:00', list_of_ints=[1, '2', b'3']) assert m.first_name == 'Woof', m.first_name assert m.last_name == 'Woof', m.last_name assert m.signup_ts == datetime(2017, 6, 7), m.signup_ts assert day_of_week(m.signup_ts) == 3 data = {'age': 10, 'first_name': 'Alena', 'last_name': 'Sousova', 'list_of_ints': [410]} m_from_obj = Model.parse_obj(data) assert isinstance(m_from_obj, Model) assert m_from_obj.age == 10 assert m_from_obj.first_name == data['first_name'] assert m_from_obj.last_name == data['last_name'] assert m_from_obj.list_of_ints == data['list_of_ints'] m_from_raw = Model.parse_raw(json.dumps(data)) assert isinstance(m_from_raw, Model) assert m_from_raw.age == m_from_obj.age assert m_from_raw.first_name == m_from_obj.first_name assert m_from_raw.last_name == m_from_obj.last_name assert m_from_raw.list_of_ints == m_from_obj.list_of_ints m_copy = m_from_obj.copy() assert isinstance(m_from_raw, Model) assert m_copy.age == m_from_obj.age assert m_copy.first_name == m_from_obj.first_name assert m_copy.last_name == m_from_obj.last_name assert m_copy.list_of_ints == m_from_obj.list_of_ints if sys.version_info >= (3, 7): T = TypeVar('T') class WrapperModel(GenericModel, Generic[T]): payload: T int_instance = WrapperModel[int](payload=1) int_instance.payload += 1 assert int_instance.payload == 2 str_instance = WrapperModel[str](payload='a') str_instance.payload += 'a' assert str_instance.payload == 'aa' model_instance = WrapperModel[Model](payload=m) model_instance.payload.list_of_ints.append(4) assert model_instance.payload.list_of_ints == [1, 2, 3, 4] class WithField(BaseModel): age: int first_name: str = Field('John', const=True) pydantic-1.2/tests/mypy/outputs/000077500000000000000000000000001357000400300170425ustar00rootroot00000000000000pydantic-1.2/tests/mypy/outputs/fail1.txt000066400000000000000000000001101357000400300205670ustar00rootroot0000000000000020: error: Unsupported operand types for + ("int" and "str") [operator]pydantic-1.2/tests/mypy/outputs/fail2.txt000066400000000000000000000000741357000400300206010ustar00rootroot0000000000000020: error: "Model" has no attribute "foobar" [attr-defined]pydantic-1.2/tests/mypy/outputs/fail3.txt000066400000000000000000000001431357000400300205770ustar00rootroot0000000000000022: error: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" [arg-type]pydantic-1.2/tests/mypy/outputs/fail4.txt000066400000000000000000000003521357000400300206020ustar00rootroot00000000000000121: error: Unexpected keyword argument "name" for "AddProject" [call-arg] 121: error: Unexpected keyword argument "slug" for "AddProject" [call-arg] 121: error: Unexpected keyword argument "description" for "AddProject" [call-arg]pydantic-1.2/tests/mypy/outputs/plugin-fail-strict.txt000066400000000000000000000053211357000400300233210ustar00rootroot0000000000000024: error: Unexpected keyword argument "z" for "Model" [call-arg] 25: error: Missing named argument "y" for "Model" [call-arg] 26: error: Property "y" defined in "Model" is read-only [misc] 27: error: "Model" does not have orm_mode=True [pydantic-orm] 36: error: Unexpected keyword argument "x" for "ForbidExtraModel" [call-arg] 47: error: Unexpected keyword argument "x" for "ForbidExtraModel2" [call-arg] 53: error: Invalid value for "Config.extra" [pydantic-config] 58: error: Invalid value for "Config.orm_mode" [pydantic-config] 63: error: Invalid value for "Config.orm_mode" [pydantic-config] 74: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [assignment] 77: error: Untyped fields disallowed [pydantic-field] 84: error: Untyped fields disallowed [pydantic-field] 87: error: Missing named argument "a" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "b" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "c" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "d" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "e" for "DefaultTestingModel" [call-arg] 91: error: Name 'Undefined' is not defined [name-defined] 94: error: Missing named argument "undefined" for "UndefinedAnnotationModel" [call-arg] 101: error: Missing named argument "y" for "construct" of "Model" [call-arg] 103: error: Argument "x" to "construct" of "Model" has incompatible type "str"; expected "int" [arg-type] 108: error: Argument "x" to "InheritingModel" has incompatible type "str"; expected "int" [arg-type] 109: error: Argument "x" to "Settings" has incompatible type "str"; expected "int" [arg-type] 110: error: Argument "x" to "Model" has incompatible type "str"; expected "int" [arg-type] 127: error: Argument "data" to "Response" has incompatible type "int"; expected "Model" [arg-type] 135: error: Argument "y" to "AliasModel" has incompatible type "int"; expected "str" [arg-type] 141: error: Required dynamic aliases disallowed [pydantic-alias] 145: error: Argument "z" to "DynamicAliasModel" has incompatible type "str"; expected "int" [arg-type] 156: error: Unexpected keyword argument "y" for "DynamicAliasModel2" [call-arg] 163: error: Required dynamic aliases disallowed [pydantic-alias] 181: error: Untyped fields disallowed [pydantic-field] 185: error: Unexpected keyword argument "x" for "AliasGeneratorModel2" [call-arg] 186: error: Unexpected keyword argument "z" for "AliasGeneratorModel2" [call-arg] 189: error: Name 'Missing' is not defined [name-defined] 197: error: Argument "config" to "dataclass" has incompatible type "Dict[, ]"; expected "Optional[Type[Any]]" [arg-type]pydantic-1.2/tests/mypy/outputs/plugin-fail.txt000066400000000000000000000034021357000400300220110ustar00rootroot0000000000000024: error: Unexpected keyword argument "z" for "Model" [call-arg] 25: error: Missing named argument "y" for "Model" [call-arg] 26: error: Property "y" defined in "Model" is read-only [misc] 27: error: "Model" does not have orm_mode=True [pydantic-orm] 36: error: Unexpected keyword argument "x" for "ForbidExtraModel" [call-arg] 47: error: Unexpected keyword argument "x" for "ForbidExtraModel2" [call-arg] 53: error: Invalid value for "Config.extra" [pydantic-config] 58: error: Invalid value for "Config.orm_mode" [pydantic-config] 63: error: Invalid value for "Config.orm_mode" [pydantic-config] 74: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int") [assignment] 87: error: Missing named argument "a" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "b" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "c" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "d" for "DefaultTestingModel" [call-arg] 87: error: Missing named argument "e" for "DefaultTestingModel" [call-arg] 91: error: Name 'Undefined' is not defined [name-defined] 94: error: Missing named argument "undefined" for "UndefinedAnnotationModel" [call-arg] 101: error: Missing named argument "y" for "construct" of "Model" [call-arg] 103: error: Argument "x" to "construct" of "Model" has incompatible type "str"; expected "int" [arg-type] 156: error: Missing named argument "x" for "DynamicAliasModel2" [call-arg] 175: error: unused 'type: ignore' comment 182: error: unused 'type: ignore' comment 189: error: Name 'Missing' is not defined [name-defined] 197: error: Argument "config" to "dataclass" has incompatible type "Dict[, ]"; expected "Optional[Type[Any]]" [arg-type]pydantic-1.2/tests/mypy/outputs/plugin-success-strict.txt000066400000000000000000000003361357000400300240570ustar00rootroot0000000000000029: error: Unexpected keyword argument "z" for "Model" [call-arg] 64: error: Untyped fields disallowed [pydantic-field] 79: error: Argument "x" to "OverrideModel" has incompatible type "float"; expected "int" [arg-type]pydantic-1.2/tests/mypy/test_mypy.py000066400000000000000000000062221357000400300177300ustar00rootroot00000000000000import importlib import os import re from pathlib import Path import pytest try: from mypy import api as mypy_api except ImportError: mypy_api = None try: import typing_extensions except ImportError: typing_extensions = None # This ensures mypy can find the test files, no matter where tests are run from: os.chdir(Path(__file__).parent.parent.parent) # You can change the following variable to True during development to overwrite expected output with generated output GENERATE = False cases = [ ('mypy-plugin.ini', 'plugin_success.py', None), ('mypy-plugin.ini', 'plugin_fail.py', 'plugin-fail.txt'), ('mypy-plugin-strict.ini', 'plugin_success.py', 'plugin-success-strict.txt'), ('mypy-plugin-strict.ini', 'plugin_fail.py', 'plugin-fail-strict.txt'), ('mypy-default.ini', 'success.py', None), ('mypy-default.ini', 'fail1.py', 'fail1.txt'), ('mypy-default.ini', 'fail2.py', 'fail2.txt'), ('mypy-default.ini', 'fail3.py', 'fail3.txt'), ('mypy-default.ini', 'plugin_success.py', 'fail4.txt'), ] executable_modules = list({fname[:-3] for _, fname, out_fname in cases if out_fname is None}) @pytest.mark.skipif(not (typing_extensions and mypy_api), reason='typing_extensions or mypy are not installed') @pytest.mark.parametrize('config_filename,python_filename,output_filename', cases) def test_mypy_results(config_filename, python_filename, output_filename): full_config_filename = f'tests/mypy/configs/{config_filename}' full_filename = f'tests/mypy/modules/{python_filename}' full_output_filename = None if output_filename is None else f'tests/mypy/outputs/{output_filename}' expected_out = '' expected_err = '' expected_returncode = 0 if output_filename is None else 1 if full_output_filename is not None: with open(full_output_filename, 'r') as f: expected_out = f.read() # Specifying a different cache dir for each configuration dramatically speeds up subsequent execution # It also prevents cache-invalidation-related bugs in the tests cache_dir = f'.mypy_cache/test-{config_filename[:-4]}' actual_result = mypy_api.run( [full_filename, '--config-file', full_config_filename, '--cache-dir', cache_dir, '--show-error-codes'] ) actual_out, actual_err, actual_returncode = actual_result # Need to strip filenames due to differences in formatting by OS actual_out = '\n'.join(['.py:'.join(line.split('.py:')[1:]) for line in actual_out.split('\n') if line]).strip() actual_out = re.sub(r'\n\s*\n', r'\n', actual_out) if GENERATE and output_filename is not None: with open(full_output_filename, 'w') as f: f.write(actual_out) else: assert actual_out == expected_out, actual_out assert actual_err == expected_err assert actual_returncode == expected_returncode @pytest.mark.parametrize('module', executable_modules) def test_success_cases_run(module): """ Ensure the "success" files can actually be executed """ importlib.import_module(f'tests.mypy.modules.{module}') def test_generation_is_disabled(): """ Makes sure we don't accidentally leave generation on """ assert not GENERATE pydantic-1.2/tests/requirements.txt000066400000000000000000000003721357000400300176070ustar00rootroot00000000000000black==19.10b0 coverage==4.5.4 Cython==0.29.14;sys_platform!='win32' flake8==3.7.9 flake8-quotes==2.1.1 isort==4.3.21 mypy==0.740 pycodestyle==2.5.0 pyflakes==2.1.1 pytest==5.3.1 pytest-cov==2.8.1 pytest-mock==1.12.1 pytest-sugar==0.9.2 twine==3.1.1 pydantic-1.2/tests/test_abc.py000066400000000000000000000022301357000400300164540ustar00rootroot00000000000000import abc import pytest from pydantic import BaseModel def test_model_subclassing_abstract_base_classes(): class Model(BaseModel, abc.ABC): some_field: str def test_model_subclassing_abstract_base_classes_without_implementation_raises_exception(): class Model(BaseModel, abc.ABC): some_field: str @abc.abstractmethod def my_abstract_method(self): pass @classmethod @abc.abstractmethod def my_abstract_classmethod(cls): pass @staticmethod @abc.abstractmethod def my_abstract_staticmethod(): pass @property @abc.abstractmethod def my_abstract_property(self): pass @my_abstract_property.setter @abc.abstractmethod def my_abstract_property(self, val): pass with pytest.raises(TypeError) as excinfo: Model(some_field='some_value') assert str(excinfo.value) == ( "Can't instantiate abstract class Model with abstract methods " "my_abstract_classmethod, my_abstract_method, my_abstract_property, my_abstract_staticmethod" # noqa: Q000 ) pydantic-1.2/tests/test_callable.py000066400000000000000000000010571357000400300174740ustar00rootroot00000000000000from typing import Callable import pytest from pydantic import BaseModel, ValidationError @pytest.mark.parametrize('annotation', [Callable, Callable[[int], int]]) def test_callable(annotation): class Model(BaseModel): callback: annotation m = Model(callback=lambda x: x) assert callable(m.callback) @pytest.mark.parametrize('annotation', [Callable, Callable[[int], int]]) def test_non_callable(annotation): class Model(BaseModel): callback: annotation with pytest.raises(ValidationError): Model(callback=1) pydantic-1.2/tests/test_color.py000066400000000000000000000141771357000400300170620ustar00rootroot00000000000000from datetime import datetime import pytest from pydantic import BaseModel, ValidationError from pydantic.color import Color from pydantic.errors import ColorError from pydantic.utils import almost_equal_floats @pytest.mark.parametrize( 'raw_color, as_tuple', [ # named colors ('aliceblue', (240, 248, 255)), ('Antiquewhite', (250, 235, 215)), ('#000000', (0, 0, 0)), ('#DAB', (221, 170, 187)), ('#dab', (221, 170, 187)), ('#000', (0, 0, 0)), ('0x797979', (121, 121, 121)), ('0x777', (119, 119, 119)), ('0x777777', (119, 119, 119)), ('0x777777cc', (119, 119, 119, 0.8)), ('777', (119, 119, 119)), ('777c', (119, 119, 119, 0.8)), (' 777', (119, 119, 119)), ('777 ', (119, 119, 119)), (' 777 ', (119, 119, 119)), ((0, 0, 128), (0, 0, 128)), ([0, 0, 128], (0, 0, 128)), ((0, 0, 205, 1.0), (0, 0, 205)), ((0, 0, 205, 0.5), (0, 0, 205, 0.5)), ('rgb(0, 0, 205)', (0, 0, 205)), ('rgb(0, 0, 205.2)', (0, 0, 205)), ('rgb(0, 0.2, 205)', (0, 0, 205)), ('rgba(0, 0, 128, 0.6)', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, .6)', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, 60%)', (0, 0, 128, 0.6)), (' rgba(0, 0, 128,0.6) ', (0, 0, 128, 0.6)), ('rgba(00,0,128,0.6 )', (0, 0, 128, 0.6)), ('rgba(0, 0, 128, 0)', (0, 0, 128, 0)), ('rgba(0, 0, 128, 1)', (0, 0, 128)), ('hsl(270, 60%, 70%)', (178, 133, 224)), ('hsl(180, 100%, 50%)', (0, 255, 255)), ('hsl(630, 60%, 70%)', (178, 133, 224)), ('hsl(270deg, 60%, 70%)', (178, 133, 224)), ('hsl(.75turn, 60%, 70%)', (178, 133, 224)), ('hsl(-.25turn, 60%, 70%)', (178, 133, 224)), ('hsl(-0.25turn, 60%, 70%)', (178, 133, 224)), ('hsl(4.71238rad, 60%, 70%)', (178, 133, 224)), ('hsl(10.9955rad, 60%, 70%)', (178, 133, 224)), ('hsl(270, 60%, 50%, .15)', (127, 51, 204, 0.15)), ('hsl(270.00deg, 60%, 50%, 15%)', (127, 51, 204, 0.15)), ], ) def test_color_success(raw_color, as_tuple): c = Color(raw_color) assert c.as_rgb_tuple() == as_tuple assert c.original() == raw_color @pytest.mark.parametrize( 'color', [ # named colors 'nosuchname', 'chucknorris', # hex '#0000000', 'x000', # rgb/rgba tuples (256, 256, 256), (128, 128, 128, 0.5, 128), (0, 0, 'x'), (0, 0, 0, 1.5), (0, 0, 0, 'x'), (0, 0, 1280), (0, 0, 1205, 0.1), (0, 0, 1128, 0.5), (0, 0, 1128, -0.5), (0, 0, 1128, 1.5), # rgb/rgba strings 'rgb(0, 0, 1205)', 'rgb(0, 0, 1128)', 'rgba(0, 0, 11205, 0.1)', 'rgba(0, 0, 128, 11.5)', 'hsl(180, 101%, 50%)', # neither a tuple, not a string datetime(2017, 10, 5, 19, 47, 7), object, range(10), ], ) def test_color_fail(color): with pytest.raises(ColorError): Color(color) def test_model_validation(): class Model(BaseModel): color: Color assert Model(color='red').color.as_hex() == '#f00' with pytest.raises(ValidationError) as exc_info: Model(color='snot') assert exc_info.value.errors() == [ { 'loc': ('color',), 'msg': 'value is not a valid color: string not recognised as a valid color', 'type': 'value_error.color', 'ctx': {'reason': 'string not recognised as a valid color'}, } ] def test_as_rgb(): assert Color('bad').as_rgb() == 'rgb(187, 170, 221)' assert Color((1, 2, 3, 0.123456)).as_rgb() == 'rgba(1, 2, 3, 0.12)' assert Color((1, 2, 3, 0.1)).as_rgb() == 'rgba(1, 2, 3, 0.1)' def test_as_rgb_tuple(): assert Color((1, 2, 3)).as_rgb_tuple(alpha=None) == (1, 2, 3) assert Color((1, 2, 3, 1)).as_rgb_tuple(alpha=None) == (1, 2, 3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3) assert Color((1, 2, 3)).as_rgb_tuple(alpha=False) == (1, 2, 3) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=False) == (1, 2, 3) assert Color((1, 2, 3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 1) assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 0.3) def test_as_hsl(): assert Color('bad').as_hsl() == 'hsl(260, 43%, 77%)' assert Color((1, 2, 3, 0.123456)).as_hsl() == 'hsl(210, 50%, 1%, 0.12)' assert Color('hsl(260, 43%, 77%)').as_hsl() == 'hsl(260, 43%, 77%)' def test_as_hsl_tuple(): c = Color('016997') h, s, l, a = c.as_hsl_tuple(alpha=True) assert almost_equal_floats(h, 0.551, delta=0.01) assert almost_equal_floats(s, 0.986, delta=0.01) assert almost_equal_floats(l, 0.298, delta=0.01) assert a == 1 assert c.as_hsl_tuple(alpha=False) == c.as_hsl_tuple(alpha=None) == (h, s, l) c = Color((3, 40, 50, 0.5)) hsla = c.as_hsl_tuple(alpha=None) assert len(hsla) == 4 assert hsla[3] == 0.5 def test_as_hex(): assert Color((1, 2, 3)).as_hex() == '#010203' assert Color((119, 119, 119)).as_hex() == '#777' assert Color((119, 0, 238)).as_hex() == '#70e' assert Color('B0B').as_hex() == '#b0b' assert Color((1, 2, 3, 0.123456)).as_hex() == '#0102031f' assert Color((1, 2, 3, 0.1)).as_hex() == '#0102031a' def test_as_named(): assert Color((0, 255, 255)).as_named() == 'cyan' assert Color('#808000').as_named() == 'olive' assert Color('hsl(180, 100%, 50%)').as_named() == 'cyan' assert Color((240, 248, 255)).as_named() == 'aliceblue' with pytest.raises(ValueError) as exc_info: Color((1, 2, 3)).as_named() assert exc_info.value.args[0] == 'no named color found, use fallback=True, as_hex() or as_rgb()' assert Color((1, 2, 3)).as_named(fallback=True) == '#010203' assert Color((1, 2, 3, 0.1)).as_named(fallback=True) == '#0102031a' def test_str_repr(): assert str(Color('red')) == 'red' assert repr(Color('red')) == "Color('red', rgb=(255, 0, 0))" assert str(Color((1, 2, 3))) == '#010203' assert repr(Color((1, 2, 3))) == "Color('#010203', rgb=(1, 2, 3))" pydantic-1.2/tests/test_construction.py000066400000000000000000000144261357000400300204730ustar00rootroot00000000000000import pickle from typing import List import pytest from pydantic import BaseModel class Model(BaseModel): a: float b: int = 10 def test_simple_construct(): m = Model.construct(a=3.14) assert m.a == 3.14 assert m.b == 10 assert m.__fields_set__ == {'a'} assert m.dict() == {'a': 3.14, 'b': 10} def test_construct_misuse(): m = Model.construct(b='foobar') assert m.b == 'foobar' assert m.dict() == {'b': 'foobar'} with pytest.raises(AttributeError, match="'Model' object has no attribute 'a'"): print(m.a) def test_construct_fields_set(): m = Model.construct(a=3.0, b=-1, _fields_set={'a'}) assert m.a == 3 assert m.b == -1 assert m.__fields_set__ == {'a'} assert m.dict() == {'a': 3, 'b': -1} def test_large_any_str(): class Model(BaseModel): a: bytes b: str content_bytes = b'x' * (2 ** 16 + 1) content_str = 'x' * (2 ** 16 + 1) m = Model(a=content_bytes, b=content_str) assert m.a == content_bytes assert m.b == content_str def test_simple_copy(): m = Model(a=24) m2 = m.copy() assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m == m2 assert m.__fields__ == m2.__fields__ class ModelTwo(BaseModel): a: float b: int = 10 c: str = 'foobar' d: Model def test_deep_copy(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(deep=True) assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m.c == m2.c == 'foobar' assert m.d is not m2.d assert m == m2 assert m.__fields__ == m2.__fields__ def test_copy_exclude(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(exclude={'b'}) assert m.a == m2.a == 24 assert isinstance(m2.d, Model) assert m2.d.a == 12 assert hasattr(m2, 'c') assert not hasattr(m2, 'b') assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a', 'c', 'd'} assert m != m2 def test_copy_include(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(include={'a'}) assert m.a == m2.a == 24 assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a'} assert m != m2 def test_copy_include_exclude(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(include={'a', 'b', 'c'}, exclude={'c'}) assert set(m.dict().keys()) == {'a', 'b', 'c', 'd'} assert set(m2.dict().keys()) == {'a', 'b'} def test_copy_advanced_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}) assert hasattr(m.f, 'c') assert not hasattr(m2.f, 'c') assert m2.dict() == {'e': 'e', 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}} m2 = m.copy(exclude={'e': ..., 'f': {'d'}}) assert m2.dict() == {'f': {'c': 'foo'}} def test_copy_advanced_include(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(include={'f': {'c'}}) assert hasattr(m.f, 'c') assert hasattr(m2.f, 'c') assert m2.dict() == {'f': {'c': 'foo'}} m2 = m.copy(include={'e': ..., 'f': {'d': {-1}}}) assert m2.dict() == {'e': 'e', 'f': {'d': [{'a': 'c', 'b': 'e'}]}} def test_copy_advanced_include_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) m2 = m.copy(include={'e': ..., 'f': {'d'}}, exclude={'e': ..., 'f': {'d': {0}}}) assert m2.dict() == {'f': {'d': [{'a': 'c', 'b': 'e'}]}} def test_copy_update(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy(update={'a': 'different'}) assert m.a == 24 assert m2.a == 'different' assert set(m.dict().keys()) == set(m2.dict().keys()) == {'a', 'b', 'c', 'd'} assert m != m2 def test_copy_set_fields(): m = ModelTwo(a=24, d=Model(a='12')) m2 = m.copy() assert m.dict(exclude_unset=True) == {'a': 24.0, 'd': {'a': 12}} assert m.dict(exclude_unset=True) == m2.dict(exclude_unset=True) def test_simple_pickle(): m = Model(a='24') b = pickle.dumps(m) m2 = pickle.loads(b) assert m.a == m2.a == 24 assert m.b == m2.b == 10 assert m == m2 assert m is not m2 assert tuple(m) == (('a', 24.0), ('b', 10)) assert tuple(m2) == (('a', 24.0), ('b', 10)) assert m.__fields__ == m2.__fields__ def test_recursive_pickle(): m = ModelTwo(a=24, d=Model(a='123.45')) m2 = pickle.loads(pickle.dumps(m)) assert m == m2 assert m.d.a == 123.45 assert m2.d.a == 123.45 assert m.__fields__ == m2.__fields__ def test_immutable_copy(): class Model(BaseModel): a: int b: int class Config: allow_mutation = False m = Model(a=40, b=10) assert m == m.copy() m2 = m.copy(update={'b': 12}) assert repr(m2) == 'Model(a=40, b=12)' with pytest.raises(TypeError): m2.b = 13 def test_pickle_fields_set(): m = Model(a=24) assert m.dict(exclude_unset=True) == {'a': 24} m2 = pickle.loads(pickle.dumps(m)) assert m2.dict(exclude_unset=True) == {'a': 24} def test_copy_update_exclude(): class SubModel(BaseModel): a: str b: str class Model(BaseModel): c: str d: SubModel m = Model(c='ex', d=dict(a='ax', b='bx')) assert m.dict() == {'c': 'ex', 'd': {'a': 'ax', 'b': 'bx'}} assert m.copy(exclude={'c'}).dict() == {'d': {'a': 'ax', 'b': 'bx'}} assert m.copy(exclude={'c'}, update={'c': 42}).dict() == {'c': 42, 'd': {'a': 'ax', 'b': 'bx'}} assert m._calculate_keys(exclude={'x'}, include=None, exclude_unset=False) == {'c', 'd'} assert m._calculate_keys(exclude={'x'}, include=None, exclude_unset=False, update={'c': 42}) == {'d'} pydantic-1.2/tests/test_create_model.py000066400000000000000000000115651357000400300203650ustar00rootroot00000000000000import pytest from pydantic import BaseModel, Extra, ValidationError, create_model, errors, validator def test_create_model(): model = create_model('FooModel', foo=(str, ...), bar=123) assert issubclass(model, BaseModel) assert issubclass(model.__config__, BaseModel.Config) assert model.__name__ == 'FooModel' assert model.__fields__.keys() == {'foo', 'bar'} assert model.__validators__ == {} assert model.__config__.__name__ == 'Config' def test_create_model_usage(): model = create_model('FooModel', foo=(str, ...), bar=123) m = model(foo='hello') assert m.foo == 'hello' assert m.bar == 123 with pytest.raises(ValidationError): model() with pytest.raises(ValidationError): model(foo='hello', bar='xxx') def test_invalid_name(): with pytest.warns(RuntimeWarning): model = create_model('FooModel', _foo=(str, ...)) assert len(model.__fields__) == 0 def test_field_wrong_tuple(): with pytest.raises(errors.ConfigError): create_model('FooModel', foo=(1, 2, 3)) def test_config_and_base(): with pytest.raises(errors.ConfigError): create_model('FooModel', __config__=BaseModel.Config, __base__=BaseModel) def test_inheritance(): class BarModel(BaseModel): x = 1 y = 2 model = create_model('FooModel', foo=(str, ...), bar=(int, 123), __base__=BarModel) assert model.__fields__.keys() == {'foo', 'bar', 'x', 'y'} m = model(foo='a', x=4) assert m.dict() == {'bar': 123, 'foo': 'a', 'x': 4, 'y': 2} def test_custom_config(): class Config: fields = {'foo': 'api-foo-field'} model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(**{'api-foo-field': '987'}).foo == 987 assert issubclass(model.__config__, BaseModel.Config) with pytest.raises(ValidationError): model(foo=654) def test_custom_config_inherits(): class Config(BaseModel.Config): fields = {'foo': 'api-foo-field'} model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(**{'api-foo-field': '987'}).foo == 987 assert issubclass(model.__config__, BaseModel.Config) with pytest.raises(ValidationError): model(foo=654) def test_custom_config_extras(): class Config(BaseModel.Config): extra = Extra.forbid model = create_model('FooModel', foo=(int, ...), __config__=Config) assert model(foo=654) with pytest.raises(ValidationError): model(bar=654) def test_inheritance_validators(): class BarModel(BaseModel): @validator('a', check_fields=False) def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v model = create_model('FooModel', a='cake', __base__=BarModel) assert model().a == 'cake' assert model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): model(a='something else') def test_inheritance_validators_always(): class BarModel(BaseModel): @validator('a', check_fields=False, always=True) def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v model = create_model('FooModel', a='cake', __base__=BarModel) with pytest.raises(ValidationError): model() assert model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): model(a='something else') def test_inheritance_validators_all(): class BarModel(BaseModel): @validator('*') def check_all(cls, v): return v * 2 model = create_model('FooModel', a=(int, ...), b=(int, ...), __base__=BarModel) assert model(a=2, b=6).dict() == {'a': 4, 'b': 12} def test_funky_name(): model = create_model('FooModel', **{'this-is-funky': (int, ...)}) m = model(**{'this-is-funky': '123'}) assert m.dict() == {'this-is-funky': 123} with pytest.raises(ValidationError) as exc_info: model() assert exc_info.value.errors() == [ {'loc': ('this-is-funky',), 'msg': 'field required', 'type': 'value_error.missing'} ] def test_repeat_base_usage(): class Model(BaseModel): a: str assert Model.__fields__.keys() == {'a'} model = create_model('FooModel', b=1, __base__=Model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} model2 = create_model('Foo2Model', c=1, __base__=Model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} assert model2.__fields__.keys() == {'a', 'c'} model3 = create_model('Foo2Model', d=1, __base__=model) assert Model.__fields__.keys() == {'a'} assert model.__fields__.keys() == {'a', 'b'} assert model2.__fields__.keys() == {'a', 'c'} assert model3.__fields__.keys() == {'a', 'b', 'd'} pydantic-1.2/tests/test_dataclasses.py000066400000000000000000000332021357000400300202210ustar00rootroot00000000000000import dataclasses from datetime import datetime from pathlib import Path from typing import ClassVar, Dict, FrozenSet, Optional import pytest import pydantic from pydantic import BaseModel, ValidationError, validator def test_simple(): @pydantic.dataclasses.dataclass class MyDataclass: a: int b: float d = MyDataclass('1', '2.5') assert d.a == 1 assert d.b == 2.5 d = MyDataclass(b=10, a=20) assert d.a == 20 assert d.b == 10 def test_value_error(): @pydantic.dataclasses.dataclass class MyDataclass: a: int b: int with pytest.raises(ValidationError) as exc_info: MyDataclass(1, 'wrong') assert exc_info.value.errors() == [ {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_frozen(): @pydantic.dataclasses.dataclass(frozen=True) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 with pytest.raises(AttributeError): d.a = 7 def test_validate_assignment(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.a = '7' assert d.a == 7 def test_validate_assignment_error(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: int d = MyDataclass(1) with pytest.raises(ValidationError) as exc_info: d.a = 'xxx' assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_not_validate_assignment(): @pydantic.dataclasses.dataclass class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.a = '7' assert d.a == '7' def test_validate_assignment_value_change(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config, frozen=False) class MyDataclass: a: int @validator('a') def double_a(cls, v): return v * 2 d = MyDataclass(2) assert d.a == 4 d.a = 3 assert d.a == 6 def test_validate_assignment_extra(): class Config: validate_assignment = True @pydantic.dataclasses.dataclass(config=Config, frozen=False) class MyDataclass: a: int d = MyDataclass(1) assert d.a == 1 d.extra_field = 1.23 assert d.extra_field == 1.23 d.extra_field = 'bye' assert d.extra_field == 'bye' def test_post_init(): post_init_called = False @pydantic.dataclasses.dataclass class MyDataclass: a: int def __post_init__(self): nonlocal post_init_called post_init_called = True d = MyDataclass('1') assert d.a == 1 assert post_init_called def test_post_init_inheritance_chain(): parent_post_init_called = False post_init_called = False @pydantic.dataclasses.dataclass class ParentDataclass: a: int def __post_init__(self): nonlocal parent_post_init_called parent_post_init_called = True @pydantic.dataclasses.dataclass class MyDataclass(ParentDataclass): b: int def __post_init__(self): super().__post_init__() nonlocal post_init_called post_init_called = True d = MyDataclass(a=1, b=2) assert d.a == 1 assert d.b == 2 assert parent_post_init_called assert post_init_called def test_post_init_post_parse(): post_init_post_parse_called = False @pydantic.dataclasses.dataclass class MyDataclass: a: int def __post_init_post_parse__(self): nonlocal post_init_post_parse_called post_init_post_parse_called = True d = MyDataclass('1') assert d.a == 1 assert post_init_post_parse_called def test_post_init_post_parse_types(): @pydantic.dataclasses.dataclass class CustomType(object): b: int @pydantic.dataclasses.dataclass class MyDataclass: a: CustomType def __post_init__(self): assert type(self.a) == dict def __post_init_post_parse__(self): assert type(self.a) == CustomType d = MyDataclass(**{'a': {'b': 1}}) assert d.a.b == 1 def test_post_init_assignment(): from dataclasses import field # Based on: https://docs.python.org/3/library/dataclasses.html#post-init-processing @pydantic.dataclasses.dataclass class C: a: float b: float c: float = field(init=False) def __post_init__(self): self.c = self.a + self.b c = C(0.1, 0.2) assert c.a == 0.1 assert c.b == 0.2 assert c.c == 0.30000000000000004 def test_inheritance(): @pydantic.dataclasses.dataclass class A: a: str = None @pydantic.dataclasses.dataclass class B(A): b: int = None b = B(a='a', b=12) assert b.a == 'a' assert b.b == 12 with pytest.raises(ValidationError): B(a='a', b='b') def test_validate_long_string_error(): class Config: max_anystr_length = 3 @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str with pytest.raises(ValidationError) as exc_info: MyDataclass('xxxx') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value has at most 3 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 3}, } ] def test_validate_assigment_long_string_error(): class Config: max_anystr_length = 3 validate_assignment = True @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str d = MyDataclass('xxx') with pytest.raises(ValidationError) as exc_info: d.a = 'xxxx' assert issubclass(MyDataclass.__pydantic_model__.__config__, BaseModel.Config) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value has at most 3 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 3}, } ] def test_no_validate_assigment_long_string_error(): class Config: max_anystr_length = 3 validate_assignment = False @pydantic.dataclasses.dataclass(config=Config) class MyDataclass: a: str d = MyDataclass('xxx') d.a = 'xxxx' assert d.a == 'xxxx' def test_nested_dataclass(): @pydantic.dataclasses.dataclass class Nested: number: int @pydantic.dataclasses.dataclass class Outer: n: Nested navbar = Outer(n=Nested(number='1')) assert isinstance(navbar.n, Nested) assert navbar.n.number == 1 navbar = Outer(n=('2',)) assert isinstance(navbar.n, Nested) assert navbar.n.number == 2 navbar = Outer(n={'number': '3'}) assert isinstance(navbar.n, Nested) assert navbar.n.number == 3 with pytest.raises(ValidationError) as exc_info: Outer(n='not nested') assert exc_info.value.errors() == [ { 'loc': ('n',), 'msg': 'instance of Nested, tuple or dict expected', 'type': 'type_error.dataclass', 'ctx': {'class_name': 'Nested'}, } ] with pytest.raises(ValidationError) as exc_info: Outer(n=('x',)) assert exc_info.value.errors() == [ {'loc': ('n', 'number'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_arbitrary_types_allowed(): @dataclasses.dataclass class Button: href: str class Config: arbitrary_types_allowed = True @pydantic.dataclasses.dataclass(config=Config) class Navbar: button: Button btn = Button(href='a') navbar = Navbar(button=btn) assert navbar.button.href == 'a' with pytest.raises(ValidationError) as exc_info: Navbar(button=('b',)) assert exc_info.value.errors() == [ { 'loc': ('button',), 'msg': 'instance of Button expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'Button'}, } ] def test_nested_dataclass_model(): @pydantic.dataclasses.dataclass class Nested: number: int class Outer(BaseModel): n: Nested navbar = Outer(n=Nested(number='1')) assert navbar.n.number == 1 def test_fields(): @pydantic.dataclasses.dataclass class User: id: int name: str = 'John Doe' signup_ts: datetime = None user = User(id=123) fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True assert fields['id'].default is None assert fields['name'].required is False assert fields['name'].default == 'John Doe' assert fields['signup_ts'].required is False assert fields['signup_ts'].default is None def test_default_factory_field(): @pydantic.dataclasses.dataclass class User: id: int aliases: Dict[str, str] = dataclasses.field(default_factory=lambda: {'John': 'Joey'}) user = User(id=123) fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True assert fields['id'].default is None assert fields['aliases'].required is False assert fields['aliases'].default == {'John': 'Joey'} def test_schema(): @pydantic.dataclasses.dataclass class User: id: int name: str = 'John Doe' aliases: Dict[str, str] = dataclasses.field(default_factory=lambda: {'John': 'Joey'}) signup_ts: datetime = None user = User(id=123) assert user.__pydantic_model__.schema() == { 'title': 'User', 'type': 'object', 'properties': { 'id': {'title': 'Id', 'type': 'integer'}, 'name': {'title': 'Name', 'default': 'John Doe', 'type': 'string'}, 'aliases': { 'title': 'Aliases', 'default': {'John': 'Joey'}, 'type': 'object', 'additionalProperties': {'type': 'string'}, }, 'signup_ts': {'title': 'Signup Ts', 'type': 'string', 'format': 'date-time'}, }, 'required': ['id'], } def test_nested_schema(): @pydantic.dataclasses.dataclass class Nested: number: int @pydantic.dataclasses.dataclass class Outer: n: Nested assert Outer.__pydantic_model__.schema() == { 'title': 'Outer', 'type': 'object', 'properties': {'n': {'$ref': '#/definitions/Nested'}}, 'required': ['n'], 'definitions': { 'Nested': { 'title': 'Nested', 'type': 'object', 'properties': {'number': {'title': 'Number', 'type': 'integer'}}, 'required': ['number'], } }, } def test_initvar(): InitVar = dataclasses.InitVar @pydantic.dataclasses.dataclass class TestInitVar: x: int y: InitVar tiv = TestInitVar(1, 2) assert tiv.x == 1 with pytest.raises(AttributeError): tiv.y def test_derived_field_from_initvar(): InitVar = dataclasses.InitVar @pydantic.dataclasses.dataclass class DerivedWithInitVar: plusone: int = dataclasses.field(init=False) number: InitVar[int] def __post_init__(self, number): self.plusone = number + 1 derived = DerivedWithInitVar(1) assert derived.plusone == 2 with pytest.raises(TypeError): DerivedWithInitVar('Not A Number') def test_initvars_post_init(): @pydantic.dataclasses.dataclass class PathDataPostInit: path: Path base_path: dataclasses.InitVar[Optional[Path]] = None def __post_init__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathDataPostInit('world') assert 'path' in path_data.__dict__ assert 'base_path' not in path_data.__dict__ assert path_data.path == Path('world') with pytest.raises(TypeError) as exc_info: PathDataPostInit('world', base_path='/hello') assert str(exc_info.value) == "unsupported operand type(s) for /: 'str' and 'str'" def test_initvars_post_init_post_parse(): @pydantic.dataclasses.dataclass class PathDataPostInitPostParse: path: Path base_path: dataclasses.InitVar[Optional[Path]] = None def __post_init_post_parse__(self, base_path): if base_path is not None: self.path = base_path / self.path path_data = PathDataPostInitPostParse('world') assert 'path' in path_data.__dict__ assert 'base_path' not in path_data.__dict__ assert path_data.path == Path('world') assert PathDataPostInitPostParse('world', base_path='/hello').path == Path('/hello/world') def test_classvar(): @pydantic.dataclasses.dataclass class TestClassVar: klassvar: ClassVar = "I'm a Class variable" x: int tcv = TestClassVar(2) assert tcv.klassvar == "I'm a Class variable" def test_frozenset_field(): @pydantic.dataclasses.dataclass class TestFrozenSet: set: FrozenSet[int] test_set = frozenset({1, 2, 3}) object_under_test = TestFrozenSet(set=test_set) assert object_under_test.set == test_set def test_inheritance_post_init(): post_init_called = False @pydantic.dataclasses.dataclass class Base: a: int def __post_init__(self): nonlocal post_init_called post_init_called = True @pydantic.dataclasses.dataclass class Child(Base): b: int Child(a=1, b=2) assert post_init_called pydantic-1.2/tests/test_datetime_parse.py000066400000000000000000000241371357000400300207270ustar00rootroot00000000000000""" Stolen from https://github.com/django/django/blob/master/tests/utils_tests/test_dateparse.py at 9718fa2e8abe430c3526a9278dd976443d4ae3c6 Changed to: * use standard pytest layout * parametrize tests """ from datetime import date, datetime, time, timedelta, timezone import pytest from pydantic import BaseModel, ValidationError, errors from pydantic.datetime_parse import parse_date, parse_datetime, parse_duration, parse_time def create_tz(minutes): return timezone(timedelta(minutes=minutes)) @pytest.mark.parametrize( 'value,result', [ # Valid inputs ('1494012444.883309', date(2017, 5, 5)), (b'1494012444.883309', date(2017, 5, 5)), (1_494_012_444.883_309, date(2017, 5, 5)), ('1494012444', date(2017, 5, 5)), (1_494_012_444, date(2017, 5, 5)), (0, date(1970, 1, 1)), ('2012-04-23', date(2012, 4, 23)), (b'2012-04-23', date(2012, 4, 23)), ('2012-4-9', date(2012, 4, 9)), (date(2012, 4, 9), date(2012, 4, 9)), (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), # Invalid inputs ('x20120423', errors.DateError), ('2012-04-56', errors.DateError), (19_999_999_999, date(2603, 10, 11)), # just before watershed (20_000_000_001, date(1970, 8, 20)), # just after watershed (1_549_316_052, date(2019, 2, 4)), # nowish in s (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns ], ) def test_date_parsing(value, result): if result == errors.DateError: with pytest.raises(errors.DateError): parse_date(value) else: assert parse_date(value) == result @pytest.mark.parametrize( 'value,result', [ # Valid inputs ('09:15:00', time(9, 15)), ('10:10', time(10, 10)), ('10:20:30.400', time(10, 20, 30, 400_000)), (b'10:20:30.400', time(10, 20, 30, 400_000)), ('4:8:16', time(4, 8, 16)), (time(4, 8, 16), time(4, 8, 16)), (3610, time(1, 0, 10)), (3600.5, time(1, 0, 0, 500000)), (86400 - 1, time(23, 59, 59)), # Invalid inputs (86400, errors.TimeError), ('xxx', errors.TimeError), ('091500', errors.TimeError), (b'091500', errors.TimeError), ('09:15:90', errors.TimeError), ], ) def test_time_parsing(value, result): if result == errors.TimeError: with pytest.raises(errors.TimeError): parse_time(value) else: assert parse_time(value) == result @pytest.mark.parametrize( 'value,result', [ # Valid inputs # values in seconds ('1494012444.883309', datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), ('1494012444', datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), (b'1494012444', datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), # values in ms ('1494012444000.883309', datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), ('2012-04-23T09:15:00', datetime(2012, 4, 23, 9, 15)), ('2012-4-9 4:8:16', datetime(2012, 4, 9, 4, 8, 16)), ('2012-04-23T09:15:00Z', datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), ('2012-4-9 4:8:16-0320', datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), ('2012-04-23T10:20:30.400+02:30', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), ('2012-04-23T10:20:30.400+02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), ('2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), (b'2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), (datetime(2017, 5, 5), datetime(2017, 5, 5)), (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), # Invalid inputs ('x20120423091500', errors.DateTimeError), ('2012-04-56T09:15:90', errors.DateTimeError), (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns ], ) def test_datetime_parsing(value, result): if result == errors.DateTimeError: with pytest.raises(errors.DateTimeError): parse_datetime(value) else: assert parse_datetime(value) == result @pytest.mark.parametrize( 'delta', [ timedelta(days=4, minutes=15, seconds=30, milliseconds=100), # fractions of seconds timedelta(hours=10, minutes=15, seconds=30), # hours, minutes, seconds timedelta(days=4, minutes=15, seconds=30), # multiple days timedelta(days=1, minutes=00, seconds=00), # single day timedelta(days=-4, minutes=15, seconds=30), # negative durations timedelta(minutes=15, seconds=30), # minute & seconds timedelta(seconds=30), # seconds ], ) def test_parse_python_format(delta): assert parse_duration(delta) == delta assert parse_duration(str(delta)) == delta @pytest.mark.parametrize( 'value,result', [ # seconds (timedelta(seconds=30), timedelta(seconds=30)), ('30', timedelta(seconds=30)), (30, timedelta(seconds=30)), (30.1, timedelta(seconds=30, milliseconds=100)), # minutes seconds ('15:30', timedelta(minutes=15, seconds=30)), ('5:30', timedelta(minutes=5, seconds=30)), # hours minutes seconds ('10:15:30', timedelta(hours=10, minutes=15, seconds=30)), ('1:15:30', timedelta(hours=1, minutes=15, seconds=30)), ('100:200:300', timedelta(hours=100, minutes=200, seconds=300)), # days ('4 15:30', timedelta(days=4, minutes=15, seconds=30)), ('4 10:15:30', timedelta(days=4, hours=10, minutes=15, seconds=30)), # fractions of seconds ('15:30.1', timedelta(minutes=15, seconds=30, milliseconds=100)), ('15:30.01', timedelta(minutes=15, seconds=30, milliseconds=10)), ('15:30.001', timedelta(minutes=15, seconds=30, milliseconds=1)), ('15:30.0001', timedelta(minutes=15, seconds=30, microseconds=100)), ('15:30.00001', timedelta(minutes=15, seconds=30, microseconds=10)), ('15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)), (b'15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)), # negative ('-4 15:30', timedelta(days=-4, minutes=15, seconds=30)), ('-172800', timedelta(days=-2)), ('-15:30', timedelta(minutes=-15, seconds=30)), ('-1:15:30', timedelta(hours=-1, minutes=15, seconds=30)), ('-30.1', timedelta(seconds=-30, milliseconds=-100)), # iso_8601 ('P4Y', errors.DurationError), ('P4M', errors.DurationError), ('P4W', errors.DurationError), ('P4D', timedelta(days=4)), ('P0.5D', timedelta(hours=12)), ('PT5H', timedelta(hours=5)), ('PT5M', timedelta(minutes=5)), ('PT5S', timedelta(seconds=5)), ('PT0.000005S', timedelta(microseconds=5)), (b'PT0.000005S', timedelta(microseconds=5)), ], ) def test_parse_durations(value, result): if result == errors.DurationError: with pytest.raises(errors.DurationError): parse_duration(value) else: assert parse_duration(value) == result @pytest.mark.parametrize( 'field, value, error_message', [ ('dt', [], 'invalid type; expected datetime, string, bytes, int or float'), ('dt', {}, 'invalid type; expected datetime, string, bytes, int or float'), ('dt', object, 'invalid type; expected datetime, string, bytes, int or float'), ('d', [], 'invalid type; expected date, string, bytes, int or float'), ('d', {}, 'invalid type; expected date, string, bytes, int or float'), ('d', object, 'invalid type; expected date, string, bytes, int or float'), ('t', [], 'invalid type; expected time, string, bytes, int or float'), ('t', {}, 'invalid type; expected time, string, bytes, int or float'), ('t', object, 'invalid type; expected time, string, bytes, int or float'), ('td', [], 'invalid type; expected timedelta, string, bytes, int or float'), ('td', {}, 'invalid type; expected timedelta, string, bytes, int or float'), ('td', object, 'invalid type; expected timedelta, string, bytes, int or float'), ], ) def test_model_type_errors(field, value, error_message): class Model(BaseModel): dt: datetime = None d: date = None t: time = None td: timedelta = None with pytest.raises(ValidationError) as exc_info: Model(**{field: value}) assert len(exc_info.value.errors()) == 1 error = exc_info.value.errors()[0] assert error == {'loc': (field,), 'type': 'type_error', 'msg': error_message} @pytest.mark.parametrize('field', ['dt', 'd', 't', 'dt']) def test_unicode_decode_error(field): class Model(BaseModel): dt: datetime = None d: date = None t: time = None td: timedelta = None with pytest.raises(ValidationError) as exc_info: Model(**{field: b'\x81'}) assert len(exc_info.value.errors()) == 1 error = exc_info.value.errors()[0] assert error == { 'loc': (field,), 'type': 'value_error.unicodedecode', 'msg': "'utf-8' codec can't decode byte 0x81 in position 0: invalid start byte", } pydantic-1.2/tests/test_edge_cases.py000066400000000000000000001230351357000400300200200ustar00rootroot00000000000000import re import sys from decimal import Decimal from enum import Enum from typing import Any, Dict, List, Optional, Set, Tuple, Type, TypeVar, Union import pytest from pydantic import ( BaseConfig, BaseModel, BaseSettings, Extra, NoneStrBytes, StrBytes, ValidationError, constr, errors, validate_model, validator, ) from pydantic.fields import Field, Schema def test_str_bytes(): class Model(BaseModel): v: StrBytes = ... m = Model(v='s') assert m.v == 's' assert repr(m.__fields__['v']) == "ModelField(name='v', type=Union[str, bytes], required=True)" m = Model(v=b'b') assert m.v == 'b' with pytest.raises(ValidationError) as exc_info: Model(v=None) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_str_bytes_none(): class Model(BaseModel): v: NoneStrBytes = ... m = Model(v='s') assert m.v == 's' m = Model(v=b'b') assert m.v == 'b' m = Model(v=None) assert m.v is None def test_union_int_str(): class Model(BaseModel): v: Union[int, str] = ... m = Model(v=123) assert m.v == 123 m = Model(v='123') assert m.v == 123 m = Model(v=b'foobar') assert m.v == 'foobar' # here both validators work and it's impossible to work out which value "closer" m = Model(v=12.2) assert m.v == 12 with pytest.raises(ValidationError) as exc_info: Model(v=None) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_union_priority(): class ModelOne(BaseModel): v: Union[int, str] = ... class ModelTwo(BaseModel): v: Union[str, int] = ... assert ModelOne(v='123').v == 123 assert ModelTwo(v='123').v == '123' def test_typed_list(): class Model(BaseModel): v: List[int] = ... m = Model(v=[1, 2, '3']) assert m.v == [1, 2, 3] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 'x', 'y']) assert exc_info.value.errors() == [ {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model(v=1) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_typed_set(): class Model(BaseModel): v: Set[int] = ... assert Model(v={1, 2, '3'}).v == {1, 2, 3} assert Model(v=[1, 2, '3']).v == {1, 2, 3} with pytest.raises(ValidationError) as exc_info: Model(v=[1, 'x']) assert exc_info.value.errors() == [ {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_dict_dict(): class Model(BaseModel): v: Dict[str, int] = ... assert Model(v={'foo': 1}).dict() == {'v': {'foo': 1}} @pytest.mark.parametrize( 'value,result', [ ({'a': 2, 'b': 4}, {'a': 2, 'b': 4}), ({1: '2', 'b': 4}, {'1': 2, 'b': 4}), ([('a', 2), ('b', 4)], {'a': 2, 'b': 4}), ], ) def test_typed_dict(value, result): class Model(BaseModel): v: Dict[str, int] = ... assert Model(v=value).v == result @pytest.mark.parametrize( 'value,errors', [ (1, [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}]), ({'a': 'b'}, [{'loc': ('v', 'a'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), ([1, 2, 3], [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}]), ], ) def test_typed_dict_error(value, errors): class Model(BaseModel): v: Dict[str, int] = ... with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_dict_key_error(): class Model(BaseModel): v: Dict[int, int] = ... assert Model(v={1: 2, '3': '4'}).v == {1: 2, 3: 4} with pytest.raises(ValidationError) as exc_info: Model(v={'foo': 2, '3': '4'}) assert exc_info.value.errors() == [ {'loc': ('v', '__key__'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_tuple(): class Model(BaseModel): v: Tuple[int, float, bool] m = Model(v=[1.2, '2.2', 'true']) assert m.v == (1, 2.2, True) def test_tuple_more(): class Model(BaseModel): simple_tuple: tuple = None tuple_of_different_types: Tuple[int, float, str, bool] = None m = Model(simple_tuple=[1, 2, 3, 4], tuple_of_different_types=[4, 3, 2, 1]) assert m.dict() == {'simple_tuple': (1, 2, 3, 4), 'tuple_of_different_types': (4, 3.0, '2', True)} def test_tuple_length_error(): class Model(BaseModel): v: Tuple[int, float, bool] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'wrong tuple length 2, expected 3', 'type': 'value_error.tuple.length', 'ctx': {'actual_length': 2, 'expected_length': 3}, } ] def test_tuple_invalid(): class Model(BaseModel): v: Tuple[int, float, bool] with pytest.raises(ValidationError) as exc_info: Model(v='xxx') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid tuple', 'type': 'type_error.tuple'}] def test_tuple_value_error(): class Model(BaseModel): v: Tuple[int, float, Decimal] with pytest.raises(ValidationError) as exc_info: Model(v=['x', 'y', 'x']) assert exc_info.value.errors() == [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 2), 'msg': 'value is not a valid decimal', 'type': 'type_error.decimal'}, ] def test_recursive_list(): class SubModel(BaseModel): name: str = ... count: int = None class Model(BaseModel): v: List[SubModel] = [] m = Model(v=[]) assert m.v == [] m = Model(v=[{'name': 'testing', 'count': 4}]) assert repr(m) == "Model(v=[SubModel(name='testing', count=4)])" assert m.v[0].name == 'testing' assert m.v[0].count == 4 assert m.dict() == {'v': [{'count': 4, 'name': 'testing'}]} with pytest.raises(ValidationError) as exc_info: Model(v=['x']) assert exc_info.value.errors() == [{'loc': ('v', 0), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] def test_recursive_list_error(): class SubModel(BaseModel): name: str = ... count: int = None class Model(BaseModel): v: List[SubModel] = [] with pytest.raises(ValidationError) as exc_info: Model(v=[{}]) assert exc_info.value.errors() == [ {'loc': ('v', 0, 'name'), 'msg': 'field required', 'type': 'value_error.missing'} ] def test_list_unions(): class Model(BaseModel): v: List[Union[int, str]] = ... assert Model(v=[123, '456', 'foobar']).v == [123, 456, 'foobar'] with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, None]) assert exc_info.value.errors() == [ {'loc': ('v', 2), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_recursive_lists(): class Model(BaseModel): v: List[List[Union[int, float]]] = ... assert Model(v=[[1, 2], [3, '4', '4.1']]).v == [[1, 2], [3, 4, 4.1]] assert Model.__fields__['v'].sub_fields[0].name == '_v' assert len(Model.__fields__['v'].sub_fields) == 1 assert Model.__fields__['v'].sub_fields[0].sub_fields[0].name == '__v' assert len(Model.__fields__['v'].sub_fields[0].sub_fields) == 1 assert Model.__fields__['v'].sub_fields[0].sub_fields[0].sub_fields[1].name == '__v_float' assert len(Model.__fields__['v'].sub_fields[0].sub_fields[0].sub_fields) == 2 class StrEnum(str, Enum): a = 'a10' b = 'b10' def test_str_enum(): class Model(BaseModel): v: StrEnum = ... assert Model(v='a10').v is StrEnum.a with pytest.raises(ValidationError): Model(v='different') def test_any_dict(): class Model(BaseModel): v: Dict[int, Any] = ... assert Model(v={1: 'foobar'}).dict() == {'v': {1: 'foobar'}} assert Model(v={123: 456}).dict() == {'v': {123: 456}} assert Model(v={2: [1, 2, 3]}).dict() == {'v': {2: [1, 2, 3]}} def test_infer_alias(): class Model(BaseModel): a = 'foobar' class Config: fields = {'a': '_a'} assert Model(_a='different').a == 'different' assert repr(Model.__fields__['a']) == ( "ModelField(name='a', type=str, required=False, default='foobar', alias='_a')" ) def test_alias_error(): class Model(BaseModel): a = 123 class Config: fields = {'a': '_a'} assert Model(_a='123').a == 123 with pytest.raises(ValidationError) as exc_info: Model(_a='foo') assert exc_info.value.errors() == [ {'loc': ('_a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_annotation_config(): class Model(BaseModel): b: float a: int = 10 _c: str class Config: fields = {'b': 'foobar'} assert list(Model.__fields__.keys()) == ['b', 'a'] assert [f.alias for f in Model.__fields__.values()] == ['foobar', 'a'] assert Model(foobar='123').b == 123.0 def test_success_values_include(): class Model(BaseModel): a: int = 1 b: int = 2 c: int = 3 m = Model() assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.dict(include={'a'}) == {'a': 1} assert m.dict(exclude={'a'}) == {'b': 2, 'c': 3} assert m.dict(include={'a', 'b'}, exclude={'a'}) == {'b': 2} def test_include_exclude_unset(): class Model(BaseModel): a: int b: int c: int = 3 d: int = 4 e: int = 5 f: int = 6 m = Model(a=1, b=2, e=5, f=7) assert m.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.__fields_set__ == {'a', 'b', 'e', 'f'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2, 'e': 5, 'f': 7} assert m.dict(include={'a'}, exclude_unset=True) == {'a': 1} assert m.dict(include={'c'}, exclude_unset=True) == {} assert m.dict(exclude={'a'}, exclude_unset=True) == {'b': 2, 'e': 5, 'f': 7} assert m.dict(exclude={'c'}, exclude_unset=True) == {'a': 1, 'b': 2, 'e': 5, 'f': 7} assert m.dict(include={'a', 'b', 'c'}, exclude={'b'}, exclude_unset=True) == {'a': 1} assert m.dict(include={'a', 'b', 'c'}, exclude={'a', 'c'}, exclude_unset=True) == {'b': 2} def test_include_exclude_defaults(): class Model(BaseModel): a: int b: int c: int = 3 d: int = 4 e: int = 5 f: int = 6 m = Model(a=1, b=2, e=5, f=7) assert m.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.__fields_set__ == {'a', 'b', 'e', 'f'} assert m.dict(exclude_defaults=True) == {'a': 1, 'b': 2, 'f': 7} assert m.dict(include={'a'}, exclude_defaults=True) == {'a': 1} assert m.dict(include={'c'}, exclude_defaults=True) == {} assert m.dict(exclude={'a'}, exclude_defaults=True) == {'b': 2, 'f': 7} assert m.dict(exclude={'c'}, exclude_defaults=True) == {'a': 1, 'b': 2, 'f': 7} assert m.dict(include={'a', 'b', 'c'}, exclude={'b'}, exclude_defaults=True) == {'a': 1} assert m.dict(include={'a', 'b', 'c'}, exclude={'a', 'c'}, exclude_defaults=True) == {'b': 2} # abstract set assert m.dict(include={'a': 1}.keys()) == {'a': 1} assert m.dict(exclude={'a': 1}.keys()) == {'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 7} assert m.dict(include={'a': 1}.keys(), exclude_unset=True) == {'a': 1} assert m.dict(exclude={'a': 1}.keys(), exclude_unset=True) == {'b': 2, 'e': 5, 'f': 7} def test_skip_defaults_deprecated(): class Model(BaseModel): x: int b: int = 2 m = Model(x=1) match = r'Model.dict\(\): "skip_defaults" is deprecated and replaced by "exclude_unset"' with pytest.warns(DeprecationWarning, match=match): assert m.dict(skip_defaults=True) == m.dict(exclude_unset=True) with pytest.warns(DeprecationWarning, match=match): assert m.dict(skip_defaults=False) == m.dict(exclude_unset=False) match = r'Model.json\(\): "skip_defaults" is deprecated and replaced by "exclude_unset"' with pytest.warns(DeprecationWarning, match=match): assert m.json(skip_defaults=True) == m.json(exclude_unset=True) with pytest.warns(DeprecationWarning, match=match): assert m.json(skip_defaults=False) == m.json(exclude_unset=False) def test_advanced_exclude(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}) == { 'e': 'e', 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}, } assert m.dict(exclude={'e': ..., 'f': {'d'}}) == {'f': {'c': 'foo'}} def test_advanced_value_inclide(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(include={'f'}) == {'f': {'c': 'foo', 'd': [{'a': 'a', 'b': 'b'}, {'a': 'c', 'b': 'e'}]}} assert m.dict(include={'e'}) == {'e': 'e'} assert m.dict(include={'f': {'d': {0: ..., -1: {'b'}}}}) == {'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]}} def test_advanced_value_exclude_include(): class SubSubModel(BaseModel): a: str b: str class SubModel(BaseModel): c: str d: List[SubSubModel] class Model(BaseModel): e: str f: SubModel m = Model(e='e', f=SubModel(c='foo', d=[SubSubModel(a='a', b='b'), SubSubModel(a='c', b='e')])) assert m.dict(exclude={'f': {'c': ..., 'd': {-1: {'a'}}}}, include={'f'}) == { 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]} } assert m.dict(exclude={'e': ..., 'f': {'d'}}, include={'e', 'f'}) == {'f': {'c': 'foo'}} assert m.dict(exclude={'f': {'d': {-1: {'a'}}}}, include={'f': {'d'}}) == { 'f': {'d': [{'a': 'a', 'b': 'b'}, {'b': 'e'}]} } def test_field_set_ignore_extra(): class Model(BaseModel): a: int b: int c: int = 3 class Config: extra = Extra.ignore m = Model(a=1, b=2) assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.__fields_set__ == {'a', 'b'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2} m2 = Model(a=1, b=2, d=4) assert m2.dict() == {'a': 1, 'b': 2, 'c': 3} assert m2.__fields_set__ == {'a', 'b'} assert m2.dict(exclude_unset=True) == {'a': 1, 'b': 2} def test_field_set_allow_extra(): class Model(BaseModel): a: int b: int c: int = 3 class Config: extra = Extra.allow m = Model(a=1, b=2) assert m.dict() == {'a': 1, 'b': 2, 'c': 3} assert m.__fields_set__ == {'a', 'b'} assert m.dict(exclude_unset=True) == {'a': 1, 'b': 2} m2 = Model(a=1, b=2, d=4) assert m2.dict() == {'a': 1, 'b': 2, 'c': 3, 'd': 4} assert m2.__fields_set__ == {'a', 'b', 'd'} assert m2.dict(exclude_unset=True) == {'a': 1, 'b': 2, 'd': 4} def test_field_set_field_name(): class Model(BaseModel): a: int field_set: int b: int = 3 assert Model(a=1, field_set=2).dict() == {'a': 1, 'field_set': 2, 'b': 3} assert Model(a=1, field_set=2).dict(exclude_unset=True) == {'a': 1, 'field_set': 2} assert Model.construct(a=1, field_set=3).dict() == {'a': 1, 'field_set': 3, 'b': 3} def test_values_order(): class Model(BaseModel): a: int = 1 b: int = 2 c: int = 3 m = Model(c=30, b=20, a=10) assert list(m) == [('a', 10), ('b', 20), ('c', 30)] def test_inheritance(): class Foo(BaseModel): a: float = ... class Bar(Foo): x: float = 12.3 a = 123.0 assert Bar().dict() == {'x': 12.3, 'a': 123.0} def test_invalid_type(): with pytest.raises(RuntimeError) as exc_info: class Model(BaseModel): x: 43 = 123 assert 'error checking inheritance of 43 (type: int)' in exc_info.value.args[0] class CustomStr(str): def foobar(self): return 7 @pytest.mark.parametrize( 'value,expected', [ ('a string', 'a string'), (b'some bytes', 'some bytes'), (bytearray('foobar', encoding='utf8'), 'foobar'), (123, '123'), (123.45, '123.45'), (Decimal('12.45'), '12.45'), (True, 'True'), (False, 'False'), (StrEnum.a, 'a10'), (CustomStr('whatever'), 'whatever'), ], ) def test_valid_string_types(value, expected): class Model(BaseModel): v: str assert Model(v=value).v == expected @pytest.mark.parametrize( 'value,errors', [ ({'foo': 'bar'}, [{'loc': ('v',), 'msg': 'str type expected', 'type': 'type_error.str'}]), ([1, 2, 3], [{'loc': ('v',), 'msg': 'str type expected', 'type': 'type_error.str'}]), ], ) def test_invalid_string_types(value, errors): class Model(BaseModel): v: str with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_inheritance_config(): class Parent(BaseModel): a: int class Child(Parent): b: str class Config: fields = {'a': 'aaa', 'b': 'bbb'} m = Child(aaa=1, bbb='s') assert repr(m) == "Child(a=1, b='s')" def test_partial_inheritance_config(): class Parent(BaseModel): a: int class Config: fields = {'a': 'aaa'} class Child(Parent): b: str class Config: fields = {'b': 'bbb'} m = Child(aaa=1, bbb='s') assert repr(m) == "Child(a=1, b='s')" def test_annotation_inheritance(): class A(BaseModel): integer: int = 1 class B(A): integer = 2 assert B.__annotations__['integer'] == int assert B.__fields__['integer'].type_ == int class C(A): integer: str = 'G' assert C.__annotations__['integer'] == str assert C.__fields__['integer'].type_ == str with pytest.raises(TypeError) as exc_info: class D(A): integer = 'G' assert str(exc_info.value) == ( 'The type of D.integer differs from the new default value; ' 'if you wish to change the type of this field, please use a type annotation' ) def test_string_none(): class Model(BaseModel): a: constr(min_length=20, max_length=1000) = ... class Config: extra = Extra.ignore with pytest.raises(ValidationError) as exc_info: Model(a=None) assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'} ] def test_alias_camel_case(): class Model(BaseModel): one_thing: int another_thing: int class Config(BaseConfig): @classmethod def get_field_info(cls, name): field_config = super().get_field_info(name) or {} if 'alias' not in field_config: field_config['alias'] = re.sub(r'(?:^|_)([a-z])', lambda m: m.group(1).upper(), name) return field_config v = Model(**{'OneThing': 123, 'AnotherThing': '321'}) assert v.one_thing == 123 assert v.another_thing == 321 assert v == {'one_thing': 123, 'another_thing': 321} def test_get_field_info_inherit(): class ModelOne(BaseModel): class Config(BaseConfig): @classmethod def get_field_info(cls, name): field_config = super().get_field_info(name) or {} if 'alias' not in field_config: field_config['alias'] = re.sub(r'_([a-z])', lambda m: m.group(1).upper(), name) return field_config class ModelTwo(ModelOne): one_thing: int another_thing: int third_thing: int class Config: fields = {'third_thing': 'Banana'} v = ModelTwo(**{'oneThing': 123, 'anotherThing': '321', 'Banana': 1}) assert v == {'one_thing': 123, 'another_thing': 321, 'third_thing': 1} def test_return_errors_ok(): class Model(BaseModel): foo: int bar: List[int] assert validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}) == ( {'foo': 123, 'bar': [1, 2, 3]}, {'foo', 'bar'}, None, ) d, f, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}, False) assert d == {'foo': 123, 'bar': [1, 2, 3]} assert f == {'foo', 'bar'} assert e is None def test_return_errors_error(): class Model(BaseModel): foo: int bar: List[int] d, f, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 'x')}, False) assert d == {'foo': 123} assert f == {'foo', 'bar'} assert e.errors() == [{'loc': ('bar', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}] d, f, e = validate_model(Model, {'bar': (1, 2, 3)}, False) assert d == {'bar': [1, 2, 3]} assert f == {'bar'} assert e.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_optional_required(): class Model(BaseModel): bar: Optional[int] assert Model(bar=123).dict() == {'bar': 123} assert Model().dict() == {'bar': None} assert Model(bar=None).dict() == {'bar': None} def test_invalid_validator(): class InvalidValidator: @classmethod def __get_validators__(cls): yield cls.has_wrong_arguments @classmethod def has_wrong_arguments(cls, value, bar): pass with pytest.raises(errors.ConfigError) as exc_info: class InvalidValidatorModel(BaseModel): x: InvalidValidator = ... assert exc_info.value.args[0].startswith('Invalid signature for validator') def test_unable_to_infer(): with pytest.raises(errors.ConfigError) as exc_info: class InvalidDefinitionModel(BaseModel): x = None assert exc_info.value.args[0] == 'unable to infer type for attribute "x"' def test_multiple_errors(): class Model(BaseModel): a: Union[None, int, float, Decimal] with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('a',), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('a',), 'msg': 'value is not a valid decimal', 'type': 'type_error.decimal'}, ] assert Model().a is None assert Model(a=None).a is None def test_pop_by_alias(): class Model(BaseModel): last_updated_by: Optional[str] = None class Config: extra = Extra.forbid allow_population_by_field_name = True fields = {'last_updated_by': 'lastUpdatedBy'} assert Model(lastUpdatedBy='foo').dict() == {'last_updated_by': 'foo'} assert Model(last_updated_by='foo').dict() == {'last_updated_by': 'foo'} with pytest.raises(ValidationError) as exc_info: Model(lastUpdatedBy='foo', last_updated_by='bar') assert exc_info.value.errors() == [ {'loc': ('last_updated_by',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'} ] def test_validate_all(): class Model(BaseModel): a: int b: int class Config: validate_all = True with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_force_extra(): class Model(BaseModel): foo: int class Config: extra = 'ignore' assert Model.__config__.extra is Extra.ignore def test_illegal_extra_value(): with pytest.raises(ValueError, match='is not a valid value for "extra"'): class Model(BaseModel): foo: int class Config: extra = 'foo' def test_multiple_inheritance_config(): class Parent(BaseModel): class Config: allow_mutation = False extra = Extra.forbid class Mixin(BaseModel): class Config: use_enum_values = True class Child(Mixin, Parent): class Config: allow_population_by_field_name = True assert BaseModel.__config__.allow_mutation is True assert BaseModel.__config__.allow_population_by_field_name is False assert BaseModel.__config__.extra is Extra.ignore assert BaseModel.__config__.use_enum_values is False assert Parent.__config__.allow_mutation is False assert Parent.__config__.allow_population_by_field_name is False assert Parent.__config__.extra is Extra.forbid assert Parent.__config__.use_enum_values is False assert Mixin.__config__.allow_mutation is True assert Mixin.__config__.allow_population_by_field_name is False assert Mixin.__config__.extra is Extra.ignore assert Mixin.__config__.use_enum_values is True assert Child.__config__.allow_mutation is False assert Child.__config__.allow_population_by_field_name is True assert Child.__config__.extra is Extra.forbid assert Child.__config__.use_enum_values is True def test_submodel_different_type(): class Foo(BaseModel): a: int class Bar(BaseModel): b: int class Spam(BaseModel): c: Foo assert Spam(c={'a': '123'}).dict() == {'c': {'a': 123}} with pytest.raises(ValidationError): Spam(c={'b': '123'}) assert Spam(c=Foo(a='123')).dict() == {'c': {'a': 123}} with pytest.raises(ValidationError): Spam(c=Bar(b='123')) def test_self(): class Model(BaseModel): self: str m = Model.parse_obj(dict(self='some value')) assert m.dict() == {'self': 'some value'} assert m.self == 'some value' assert m.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'self': {'title': 'Self', 'type': 'string'}}, 'required': ['self'], } @pytest.mark.parametrize('model', [BaseModel, BaseSettings]) def test_self_recursive(model): class SubModel(model): self: int class Model(model): sm: SubModel m = Model.parse_obj({'sm': {'self': '123'}}) assert m.dict() == {'sm': {'self': 123}} @pytest.mark.parametrize('model', [BaseModel, BaseSettings]) def test_nested_init(model): class NestedModel(model): self: str modified_number: int = 1 def __init__(someinit, **kwargs): super().__init__(**kwargs) someinit.modified_number += 1 class TopModel(model): self: str nest: NestedModel m = TopModel.parse_obj(dict(self='Top Model', nest=dict(self='Nested Model', modified_number=0))) assert m.self == 'Top Model' assert m.nest.self == 'Nested Model' assert m.nest.modified_number == 1 def test_values_attr_deprecation(): class Model(BaseModel): foo: int bar: str m = Model(foo=4, bar='baz') with pytest.warns(DeprecationWarning, match='`__values__` attribute is deprecated, use `__dict__` instead'): assert m.__values__ == m.__dict__ def test_init_inspection(): class Foobar(BaseModel): x: int def __init__(self, **data) -> None: with pytest.raises(AttributeError): assert self.x super().__init__(**data) Foobar(x=1) def test_type_on_annotation(): class FooBar: pass class Model(BaseModel): a: int = int b: Type[int] c: Type[int] = int d: FooBar = FooBar e: Type[FooBar] f: Type[FooBar] = FooBar assert Model.__fields__.keys() == {'b', 'c', 'e', 'f'} def test_assign_type(): class Parent: def echo(self): return 'parent' class Child(Parent): def echo(self): return 'child' class Different: def echo(self): return 'different' class Model(BaseModel): v: Type[Parent] = Parent assert Model(v=Parent).v().echo() == 'parent' assert Model().v().echo() == 'parent' assert Model(v=Child).v().echo() == 'child' with pytest.raises(ValidationError) as exc_info: Model(v=Different) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'subclass of Parent expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'Parent'}, } ] def test_optional_subfields(): class Model(BaseModel): a: Optional[int] assert Model.__fields__['a'].sub_fields is None assert Model.__fields__['a'].allow_none is True with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model().a is None assert Model(a=None).a is None assert Model(a=12).a == 12 def test_not_optional_subfields(): class Model(BaseModel): a: Optional[int] @validator('a') def check_a(cls, v): return v assert Model.__fields__['a'].sub_fields is None # assert Model.__fields__['a'].required is True assert Model.__fields__['a'].allow_none is True with pytest.raises(ValidationError) as exc_info: Model(a='foobar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model().a is None assert Model(a=None).a is None assert Model(a=12).a == 12 def test_scheme_deprecated(): with pytest.warns(DeprecationWarning, match='`Schema` is deprecated, use `Field` instead'): class Model(BaseModel): foo: int = Schema(4) def test_population_by_alias(): with pytest.warns(DeprecationWarning, match='"allow_population_by_alias" is deprecated and replaced by'): class Model(BaseModel): a: str class Config: allow_population_by_alias = True fields = {'a': {'alias': '_a'}} assert Model.__config__.allow_population_by_field_name is True assert Model(a='different').a == 'different' assert Model(a='different').dict() == {'a': 'different'} assert Model(a='different').dict(by_alias=True) == {'_a': 'different'} def test_fields_deprecated(): class Model(BaseModel): v: str = 'x' with pytest.warns(DeprecationWarning, match='`fields` attribute is deprecated, use `__fields__` instead'): assert Model().fields.keys() == {'v'} assert Model().__fields__.keys() == {'v'} assert Model.__fields__.keys() == {'v'} def test_alias_child_precedence(): class Parent(BaseModel): x: int class Config: fields = {'x': 'x1'} class Child(Parent): y: int class Config: fields = {'y': 'y2', 'x': 'x2'} assert Child.__fields__['y'].alias == 'y2' assert Child.__fields__['x'].alias == 'x2' def test_alias_generator_parent(): class Parent(BaseModel): x: int class Config: allow_population_by_field_name = True @classmethod def alias_generator(cls, f_name): return f_name + '1' class Child(Parent): y: int class Config: @classmethod def alias_generator(cls, f_name): return f_name + '2' assert Child.__fields__['y'].alias == 'y2' assert Child.__fields__['x'].alias == 'x2' def test_optional_field_constraints(): class MyModel(BaseModel): my_int: Optional[int] = Field(..., ge=3) with pytest.raises(ValidationError) as exc_info: MyModel(my_int=2) assert exc_info.value.errors() == [ { 'loc': ('my_int',), 'msg': 'ensure this value is greater than or equal to 3', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': 3}, } ] def test_field_str_shape(): class Model(BaseModel): a: List[int] assert repr(Model.__fields__['a']) == "ModelField(name='a', type=List[int], required=True)" assert str(Model.__fields__['a']) == "name='a' type=List[int] required=True" @pytest.mark.skipif(sys.version_info < (3, 7), reason='output slightly different for 3.6') @pytest.mark.parametrize( 'type_,expected', [ (int, 'int'), (Optional[int], 'Optional[int]'), (Union[None, int, str], 'Union[NoneType, int, str]'), (Union[int, str, bytes], 'Union[int, str, bytes]'), (List[int], 'List[int]'), (Tuple[int, str, bytes], 'Tuple[int, str, bytes]'), (Union[List[int], Set[bytes]], 'Union[List[int], Set[bytes]]'), (List[Tuple[int, int]], 'List[Tuple[int, int]]'), (Dict[int, str], 'Mapping[int, str]'), (Tuple[int, ...], 'Tuple[int, ...]'), (Optional[List[int]], 'Optional[List[int]]'), ], ) def test_field_type_display(type_, expected): class Model(BaseModel): a: type_ assert Model.__fields__['a']._type_display() == expected def test_any_none(): class MyModel(BaseModel): foo: Any m = MyModel(foo=None) assert dict(m) == {'foo': None} def test_type_var_any(): Foobar = TypeVar('Foobar') class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == {'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo'}}} assert MyModel(foo=None).foo is None assert MyModel(foo='x').foo == 'x' assert MyModel(foo=123).foo == 123 def test_type_var_constraint(): Foobar = TypeVar('Foobar', int, str) class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'anyOf': [{'type': 'integer'}, {'type': 'string'}]}}, 'required': ['foo'], } with pytest.raises(ValidationError, match='none is not an allowed value'): MyModel(foo=None) with pytest.raises(ValidationError, match='value is not a valid integer'): MyModel(foo=[1, 2, 3]) assert MyModel(foo='x').foo == 'x' assert MyModel(foo=123).foo == 123 def test_type_var_bound(): Foobar = TypeVar('Foobar', bound=int) class MyModel(BaseModel): foo: Foobar assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'type': 'integer'}}, 'required': ['foo'], } with pytest.raises(ValidationError, match='none is not an allowed value'): MyModel(foo=None) with pytest.raises(ValidationError, match='value is not a valid integer'): MyModel(foo='x') assert MyModel(foo=123).foo == 123 def test_dict_bare(): class MyModel(BaseModel): foo: Dict m = MyModel(foo={'x': 'a', 'y': None}) assert m.foo == {'x': 'a', 'y': None} def test_list_bare(): class MyModel(BaseModel): foo: List m = MyModel(foo=[1, 2, None]) assert m.foo == [1, 2, None] def test_dict_any(): class MyModel(BaseModel): foo: Dict[str, Any] m = MyModel(foo={'x': 'a', 'y': None}) assert m.foo == {'x': 'a', 'y': None} def test_modify_fields(): class Foo(BaseModel): foo: List[List[int]] @validator('foo') def check_something(cls, value): return value class Bar(Foo): pass # output is slightly different for 3.6 if sys.version_info >= (3, 7): assert repr(Foo.__fields__['foo']) == "ModelField(name='foo', type=List[List[int]], required=True)" assert repr(Bar.__fields__['foo']) == "ModelField(name='foo', type=List[List[int]], required=True)" assert Foo(foo=[[0, 1]]).foo == [[0, 1]] assert Bar(foo=[[0, 1]]).foo == [[0, 1]] def test_exclude_none(): class MyModel(BaseModel): a: Optional[int] = None b: int = 2 m = MyModel(a=5) assert m.dict(exclude_none=True) == {'a': 5, 'b': 2} m = MyModel(b=3) assert m.dict(exclude_none=True) == {'b': 3} assert m.json(exclude_none=True) == '{"b": 3}' def test_exclude_none_recursive(): class ModelA(BaseModel): a: Optional[int] = None b: int = 1 class ModelB(BaseModel): c: int d: int = 2 e: ModelA f: Optional[str] = None m = ModelB(c=5, e={'a': 0}) assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}, 'f': None} assert m.dict(exclude_none=True) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}, 'f': None} m = ModelB(c=5, e={'b': 20}, f='test') assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': None, 'b': 20}, 'f': 'test'} assert m.dict(exclude_none=True) == {'c': 5, 'd': 2, 'e': {'b': 20}, 'f': 'test'} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': None, 'b': 20}, 'f': 'test'} def test_exclude_none_with_extra(): class MyModel(BaseModel): a: str = 'default' b: Optional[str] = None class Config: extra = 'allow' m = MyModel(a='a', c='c') assert m.dict(exclude_none=True) == {'a': 'a', 'c': 'c'} assert m.dict() == {'a': 'a', 'b': None, 'c': 'c'} m = MyModel(a='a', b='b', c=None) assert m.dict(exclude_none=True) == {'a': 'a', 'b': 'b'} assert m.dict() == {'a': 'a', 'b': 'b', 'c': None} def test_str_method_inheritance(): import pydantic class Foo(pydantic.BaseModel): x: int = 3 y: int = 4 def __str__(self): return str(self.y + self.x) class Bar(Foo): z: bool = False assert str(Foo()) == '7' assert str(Bar()) == '7' def test_repr_method_inheritance(): import pydantic class Foo(pydantic.BaseModel): x: int = 3 y: int = 4 def __repr__(self): return repr(self.y + self.x) class Bar(Foo): z: bool = False assert repr(Foo()) == '7' assert repr(Bar()) == '7' def test_optional_validator(): val_calls = [] class Model(BaseModel): something: Optional[str] @validator('something') def check_something(cls, v): val_calls.append(v) return v assert Model().dict() == {'something': None} assert Model(something=None).dict() == {'something': None} assert Model(something='hello').dict() == {'something': 'hello'} assert val_calls == [None, 'hello'] def test_required_optional(): class Model(BaseModel): nullable1: Optional[int] = ... nullable2: Optional[int] = Field(...) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: Model(nullable1=1) assert exc_info.value.errors() == [{'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}] with pytest.raises(ValidationError) as exc_info: Model(nullable2=2) assert exc_info.value.errors() == [{'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}] assert Model(nullable1=None, nullable2=None).dict() == {'nullable1': None, 'nullable2': None} assert Model(nullable1=1, nullable2=2).dict() == {'nullable1': 1, 'nullable2': 2} with pytest.raises(ValidationError) as exc_info: Model(nullable1='some text') assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_required_any(): class Model(BaseModel): optional1: Any optional2: Any = None nullable1: Any = ... nullable2: Any = Field(...) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [ {'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}, ] with pytest.raises(ValidationError) as exc_info: Model(nullable1='a') assert exc_info.value.errors() == [{'loc': ('nullable2',), 'msg': 'field required', 'type': 'value_error.missing'}] with pytest.raises(ValidationError) as exc_info: Model(nullable2=False) assert exc_info.value.errors() == [{'loc': ('nullable1',), 'msg': 'field required', 'type': 'value_error.missing'}] assert Model(nullable1=None, nullable2=None).dict() == { 'optional1': None, 'optional2': None, 'nullable1': None, 'nullable2': None, } assert Model(nullable1=1, nullable2='two').dict() == { 'optional1': None, 'optional2': None, 'nullable1': 1, 'nullable2': 'two', } assert Model(optional1='op1', optional2=False, nullable1=1, nullable2='two').dict() == { 'optional1': 'op1', 'optional2': False, 'nullable1': 1, 'nullable2': 'two', } pydantic-1.2/tests/test_errors.py000066400000000000000000000245241357000400300172550ustar00rootroot00000000000000import sys from typing import Dict, List, Optional, Union from uuid import UUID, uuid4 import pytest from pydantic import UUID1, BaseConfig, BaseModel, PydanticTypeError, ValidationError, conint, errors, validator from pydantic.error_wrappers import flatten_errors, get_exc_type from pydantic.typing import Literal def test_pydantic_error(): class TestError(PydanticTypeError): code = 'test_code' msg_template = 'test message template "{test_ctx}"' def __init__(self, *, test_ctx: int) -> None: super().__init__(test_ctx=test_ctx) with pytest.raises(TestError) as exc_info: raise TestError(test_ctx='test_value') assert str(exc_info.value) == 'test message template "test_value"' @pytest.mark.skipif(not Literal, reason='typing_extensions not installed') def test_interval_validation_error(): class Foo(BaseModel): model_type: Literal['foo'] f: int class Bar(BaseModel): model_type: Literal['bar'] b: int class MyModel(BaseModel): foobar: Union[Foo, Bar] @validator('foobar', pre=True) def check_action(cls, v): if isinstance(v, dict): model_type = v.get('model_type') if model_type == 'foo': return Foo(**v) if model_type == 'bar': return Bar(**v) raise ValueError('not valid Foo or Bar') m1 = MyModel(foobar={'model_type': 'foo', 'f': '1'}) assert m1.foobar.f == 1 assert isinstance(m1.foobar, Foo) m2 = MyModel(foobar={'model_type': 'bar', 'b': '2'}) assert m2.foobar.b == 2 assert isinstance(m2.foobar, BaseModel) with pytest.raises(ValidationError) as exc_info: MyModel(foobar={'model_type': 'foo', 'f': 'x'}) assert exc_info.value.errors() == [ {'loc': ('foobar', 'f'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] @pytest.mark.skipif(sys.version_info < (3, 7), reason='output slightly different for 3.6') def test_error_on_optional(): class Foobar(BaseModel): foo: Optional[str] = None @validator('foo', always=True, pre=True) def check_foo(cls, v): raise ValueError('custom error') with pytest.raises(ValidationError) as exc_info: Foobar(foo='x') assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'custom error', 'type': 'value_error'}] assert repr(exc_info.value.raw_errors[0]) == "ErrorWrapper(exc=ValueError('custom error'), loc=('foo',))" with pytest.raises(ValidationError) as exc_info: Foobar(foo=None) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'custom error', 'type': 'value_error'}] @pytest.mark.parametrize( 'result,expected', ( ( 'errors', [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b', 'x'), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('b', 'z'), 'msg': 'field required', 'type': 'value_error.missing'}, {'loc': ('c', 0, 'x'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('d',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('d',), 'msg': 'value is not a valid uuid', 'type': 'type_error.uuid'}, {'loc': ('e', '__key__'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('f', 0), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, { 'loc': ('g',), 'msg': 'uuid version 1 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 1}, }, { 'loc': ('h',), 'msg': 'yet another error message template 42', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 42}, }, ], ), ( 'json', """\ [ { "loc": [ "a" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "b", "x" ], "msg": "field required", "type": "value_error.missing" }, { "loc": [ "b", "z" ], "msg": "field required", "type": "value_error.missing" }, { "loc": [ "c", 0, "x" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "d" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "d" ], "msg": "value is not a valid uuid", "type": "type_error.uuid" }, { "loc": [ "e", "__key__" ], "msg": "value is not a valid integer", "type": "type_error.integer" }, { "loc": [ "f", 0 ], "msg": "none is not an allowed value", "type": "type_error.none.not_allowed" }, { "loc": [ "g" ], "msg": "uuid version 1 expected", "type": "value_error.uuid.version", "ctx": { "required_version": 1 } }, { "loc": [ "h" ], "msg": "yet another error message template 42", "type": "value_error.number.not_gt", "ctx": { "limit_value": 42 } } ]""", ), ( '__str__', """\ 10 validation errors for Model a value is not a valid integer (type=type_error.integer) b -> x field required (type=value_error.missing) b -> z field required (type=value_error.missing) c -> 0 -> x value is not a valid integer (type=type_error.integer) d value is not a valid integer (type=type_error.integer) d value is not a valid uuid (type=type_error.uuid) e -> __key__ value is not a valid integer (type=type_error.integer) f -> 0 none is not an allowed value (type=type_error.none.not_allowed) g uuid version 1 expected (type=value_error.uuid.version; required_version=1) h yet another error message template 42 (type=value_error.number.not_gt; limit_value=42)""", ), ), ) def test_validation_error(result, expected): class SubModel(BaseModel): x: int y: int z: str class Model(BaseModel): a: int b: SubModel c: List[SubModel] d: Union[int, UUID] e: Dict[int, str] f: List[Union[int, str]] g: UUID1 h: conint(gt=42) class Config: error_msg_templates = {'value_error.number.not_gt': 'yet another error message template {limit_value}'} with pytest.raises(ValidationError) as exc_info: Model.parse_obj( { 'a': 'not_int', 'b': {'y': 42}, 'c': [{'x': 'not_int', 'y': 42, 'z': 'string'}], 'd': 'string', 'e': {'not_int': 'string'}, 'f': [None], 'g': uuid4(), 'h': 21, } ) assert getattr(exc_info.value, result)() == expected def test_errors_unknown_error_object(): with pytest.raises(RuntimeError): list(flatten_errors([object], BaseConfig)) @pytest.mark.parametrize( 'exc,type_', ( (TypeError(), 'type_error'), (ValueError(), 'value_error'), (AssertionError(), 'assertion_error'), (errors.DecimalIsNotFiniteError(), 'value_error.decimal.not_finite'), ), ) def test_get_exc_type(exc, type_): if isinstance(type_, str): assert get_exc_type(type(exc)) == type_ else: with pytest.raises(type_) as exc_info: get_exc_type(type(exc)) assert isinstance(exc_info.value, type_) def test_single_error(): class Model(BaseModel): x: int with pytest.raises(ValidationError) as exc_info: Model(x='x') expected = """\ 1 validation error for Model x value is not a valid integer (type=type_error.integer)""" assert str(exc_info.value) == expected assert str(exc_info.value) == expected # to check lru cache doesn't break anything with pytest.raises(ValidationError) as exc_info: Model() assert ( str(exc_info.value) == """\ 1 validation error for Model x field required (type=value_error.missing)""" ) def test_nested_error(): class NestedModel3(BaseModel): x: str class NestedModel2(BaseModel): data2: List[NestedModel3] class NestedModel1(BaseModel): data1: List[NestedModel2] with pytest.raises(ValidationError) as exc_info: NestedModel1(data1=[{'data2': [{'y': 1}]}]) expected = [{'loc': ('data1', 0, 'data2', 0, 'x'), 'msg': 'field required', 'type': 'value_error.missing'}] assert exc_info.value.errors() == expected def test_validate_assignment_error(): class Model(BaseModel): x: int class Config: validate_assignment = True model = Model(x=1) with pytest.raises(ValidationError) as exc_info: model.x = 'a' assert ( str(exc_info.value) == '1 validation error for Model\nx\n value is not a valid integer (type=type_error.integer)' ) def test_submodel_override_validation_error(): class SubmodelA(BaseModel): x: str class SubmodelB(SubmodelA): x: int class Model(BaseModel): submodel: SubmodelB submodel = SubmodelA(x='a') with pytest.raises(ValidationError) as exc_info: Model(submodel=submodel) assert exc_info.value.errors() == [ {'loc': ('submodel', 'x'), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_validation_error_methods(): class Model(BaseModel): x: int with pytest.raises(ValidationError) as exc_info: Model(x='x') e = exc_info.value assert ( str(e) == """\ 1 validation error for Model x value is not a valid integer (type=type_error.integer)""" ) assert e.errors() == [{'loc': ('x',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}] assert e.json(indent=None) == ( '[{"loc": ["x"], "msg": "value is not a valid integer", "type": "type_error.integer"}]' ) assert repr(e) == ( "ValidationError(model='Model', errors=[{'loc': ('x',), 'msg': 'value is not a valid integer', " "'type': 'type_error.integer'}])" ) pydantic-1.2/tests/test_forward_ref.py000066400000000000000000000246731357000400300202460ustar00rootroot00000000000000import sys import pytest from pydantic import ConfigError, ValidationError skip_not_37 = pytest.mark.skipif(sys.version_info < (3, 7), reason='testing >= 3.7 behaviour only') @skip_not_37 def test_postponed_annotations(create_module): module = create_module( """ from __future__ import annotations from pydantic import BaseModel class Model(BaseModel): a: int """ ) m = module.Model(a='123') assert m.dict() == {'a': 123} @skip_not_37 def test_postponed_annotations_optional(create_module): module = create_module( """ from __future__ import annotations from typing import Optional from pydantic import BaseModel class Model(BaseModel): a: Optional[int] """ ) assert module.Model(a='123').dict() == {'a': 123} assert module.Model().dict() == {'a': None} def test_basic_forward_ref(create_module): module = create_module( """ from typing import Optional from pydantic import BaseModel from pydantic.typing import ForwardRef class Foo(BaseModel): a: int FooRef = ForwardRef('Foo') class Bar(BaseModel): b: Optional[FooRef] """ ) assert module.Bar().dict() == {'b': None} assert module.Bar(b={'a': '123'}).dict() == {'b': {'a': 123}} def test_self_forward_ref_module(create_module): module = create_module( """ from pydantic import BaseModel from pydantic.typing import ForwardRef Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: 'Foo' = None Foo.update_forward_refs() """ ) assert module.Foo().dict() == {'a': 123, 'b': None} assert module.Foo(b={'a': '321'}).dict() == {'a': 123, 'b': {'a': 321, 'b': None}} def test_self_forward_ref_collection(create_module): module = create_module( """ from typing import List, Dict from pydantic import BaseModel from pydantic.typing import ForwardRef Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None c: List[Foo] = [] d: Dict[str, Foo] = {} Foo.update_forward_refs() """ ) assert module.Foo().dict() == {'a': 123, 'b': None, 'c': [], 'd': {}} assert module.Foo(b={'a': '321'}, c=[{'a': 234}], d={'bar': {'a': 345}}).dict() == { 'a': 123, 'b': {'a': 321, 'b': None, 'c': [], 'd': {}}, 'c': [{'a': 234, 'b': None, 'c': [], 'd': {}}], 'd': {'bar': {'a': 345, 'b': None, 'c': [], 'd': {}}}, } with pytest.raises(ValidationError) as exc_info: module.Foo(b={'a': '321'}, c=[{'b': 234}], d={'bar': {'a': 345}}) assert exc_info.value.errors() == [ {'loc': ('c', 0, 'b'), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'} ] def test_self_forward_ref_local(create_module): module = create_module( """ from pydantic import BaseModel from pydantic.typing import ForwardRef def main(): Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None Foo.update_forward_refs() return Foo """ ) Foo = module.main() assert Foo().dict() == {'a': 123, 'b': None} assert Foo(b={'a': '321'}).dict() == {'a': 123, 'b': {'a': 321, 'b': None}} def test_missing_update_forward_refs(create_module): module = create_module( """ from pydantic import BaseModel from pydantic.typing import ForwardRef Foo = ForwardRef('Foo') class Foo(BaseModel): a: int = 123 b: Foo = None """ ) with pytest.raises(ConfigError) as exc_info: module.Foo(b=123) assert str(exc_info.value).startswith('field "b" not yet prepared so type is still a ForwardRef') def test_forward_ref_dataclass(create_module): module = create_module( """ from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class Dataclass: url: AnyUrl """ ) m = module.Dataclass('http://example.com ') assert m.url == 'http://example.com' @skip_not_37 def test_forward_ref_dataclass_with_future_annotations(create_module): module = create_module( """ from __future__ import annotations from pydantic import AnyUrl from pydantic.dataclasses import dataclass @dataclass class Dataclass: url: AnyUrl """ ) m = module.Dataclass('http://example.com ') assert m.url == 'http://example.com' def test_forward_ref_sub_types(create_module): module = create_module( """ from typing import Union from pydantic import BaseModel from pydantic.typing import ForwardRef class Leaf(BaseModel): a: str TreeType = Union[ForwardRef('Node'), Leaf] class Node(BaseModel): value: int left: TreeType right: TreeType Node.update_forward_refs() """ ) Node = module.Node Leaf = module.Leaf data = {'value': 3, 'left': {'a': 'foo'}, 'right': {'value': 5, 'left': {'a': 'bar'}, 'right': {'a': 'buzz'}}} node = Node(**data) assert isinstance(node.left, Leaf) assert isinstance(node.right, Node) def test_forward_ref_nested_sub_types(create_module): module = create_module( """ from typing import Tuple, Union from pydantic import BaseModel from pydantic.typing import ForwardRef class Leaf(BaseModel): a: str TreeType = Union[Union[Tuple[ForwardRef('Node'), str], int], Leaf] class Node(BaseModel): value: int left: TreeType right: TreeType Node.update_forward_refs() """ ) Node = module.Node Leaf = module.Leaf data = { 'value': 3, 'left': {'a': 'foo'}, 'right': [{'value': 5, 'left': {'a': 'bar'}, 'right': {'a': 'buzz'}}, 'test'], } node = Node(**data) assert isinstance(node.left, Leaf) assert isinstance(node.right[0], Node) def test_self_reference_json_schema(create_module): module = create_module( """ from typing import List from pydantic import BaseModel class Account(BaseModel): name: str subaccounts: List['Account'] = [] Account.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name'], } }, } @skip_not_37 def test_self_reference_json_schema_with_future_annotations(create_module): module = create_module( """ from __future__ import annotations from typing import List from pydantic import BaseModel class Account(BaseModel): name: str subaccounts: List[Account] = [] Account.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name'], } }, } def test_circular_reference_json_schema(create_module): module = create_module( """ from typing import List from pydantic import BaseModel class Owner(BaseModel): account: 'Account' class Account(BaseModel): name: str owner: 'Owner' subaccounts: List['Account'] = [] Account.update_forward_refs() Owner.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'owner': {'$ref': '#/definitions/Owner'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name', 'owner'], }, 'Owner': { 'title': 'Owner', 'type': 'object', 'properties': {'account': {'$ref': '#/definitions/Account'}}, 'required': ['account'], }, }, } @skip_not_37 def test_circular_reference_json_schema_with_future_annotations(create_module): module = create_module( """ from __future__ import annotations from typing import List from pydantic import BaseModel class Owner(BaseModel): account: Account class Account(BaseModel): name: str owner: Owner subaccounts: List[Account] = [] Account.update_forward_refs() Owner.update_forward_refs() """ ) Account = module.Account assert Account.schema() == { '$ref': '#/definitions/Account', 'definitions': { 'Account': { 'title': 'Account', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'owner': {'$ref': '#/definitions/Owner'}, 'subaccounts': { 'title': 'Subaccounts', 'default': [], 'type': 'array', 'items': {'$ref': '#/definitions/Account'}, }, }, 'required': ['name', 'owner'], }, 'Owner': { 'title': 'Owner', 'type': 'object', 'properties': {'account': {'$ref': '#/definitions/Account'}}, 'required': ['account'], }, }, } def test_forward_ref_with_field(create_module): create_module( """ from typing import List from pydantic import BaseModel, Field from pydantic.typing import ForwardRef Foo = ForwardRef('Foo') try: class Foo(BaseModel): c: List[Foo] = Field(..., gt=0) except ValueError: pass else: raise AssertionError('error not raised') """ ) pydantic-1.2/tests/test_generics.py000066400000000000000000000275341357000400300175440ustar00rootroot00000000000000import sys from enum import Enum from typing import Any, ClassVar, Dict, Generic, List, Optional, Tuple, Type, TypeVar, Union import pytest from pydantic import BaseModel, Field, ValidationError, root_validator, validator from pydantic.generics import GenericModel, _generic_types_cache skip_36 = pytest.mark.skipif(sys.version_info < (3, 7), reason='generics only supported for python 3.7 and above') @skip_36 def test_generic_name(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type assert Result[List[int]].__name__ == 'Result[typing.List[int]]' @skip_36 def test_double_parameterize_error(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type with pytest.raises(TypeError) as exc_info: Result[int][int] assert str(exc_info.value) == 'Cannot parameterize a concrete instantiation of a generic model' @skip_36 def test_value_validation(): T = TypeVar('T') class Response(GenericModel, Generic[T]): data: T @validator('data', each_item=True) def validate_value_nonzero(cls, v): if v == 0: raise ValueError('value is zero') return v @root_validator() def validate_sum(cls, values): if sum(values.get('data', {}).values()) > 5: raise ValueError('sum too large') return values assert Response[Dict[int, int]](data={1: '4'}).dict() == {'data': {1: 4}} with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 'a'}) assert exc_info.value.errors() == [ {'loc': ('data', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 0}) assert exc_info.value.errors() == [{'loc': ('data', 1), 'msg': 'value is zero', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Response[Dict[int, int]](data={1: 3, 2: 6}) assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'sum too large', 'type': 'value_error'}] @skip_36 def test_methods_are_inherited(): class CustomGenericModel(GenericModel): def method(self): return self.data T = TypeVar('T') class Model(CustomGenericModel, Generic[T]): data: T instance = Model[int](data=1) assert instance.method() == 1 @skip_36 def test_config_is_inherited(): class CustomGenericModel(GenericModel): class Config: allow_mutation = False T = TypeVar('T') class Model(CustomGenericModel, Generic[T]): data: T instance = Model[int](data=1) with pytest.raises(TypeError) as exc_info: instance.data = 2 assert str(exc_info.value) == '"Model[int]" is immutable and does not support item assignment' @skip_36 def test_default_argument(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other: bool = True result = Result[int](data=1) assert result.other is True @skip_36 def test_default_argument_for_typevar(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T = 4 result = Result[int]() assert result.data == 4 result = Result[float]() assert result.data == 4 result = Result[int](data=1) assert result.data == 1 @skip_36 def test_classvar(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other: ClassVar[int] = 1 assert Result.other == 1 assert Result[int].other == 1 assert Result[int](data=1).other == 1 assert 'other' not in Result.__fields__ @skip_36 def test_non_annotated_field(): T = TypeVar('T') class Result(GenericModel, Generic[T]): data: T other = True assert 'other' in Result.__fields__ assert 'other' in Result[int].__fields__ result = Result[int](data=1) assert result.other is True @skip_36 def test_must_inherit_from_generic(): with pytest.raises(TypeError) as exc_info: class Result(GenericModel): pass Result[int] assert str(exc_info.value) == f'Type Result must inherit from typing.Generic before being parameterized' @skip_36 def test_parameters_placed_on_generic(): T = TypeVar('T') with pytest.raises(TypeError, match='Type parameters should be placed on typing.Generic, not GenericModel'): class Result(GenericModel[T]): pass @skip_36 def test_parameters_must_be_typevar(): with pytest.raises(TypeError, match='Type GenericModel must inherit from typing.Generic before being '): class Result(GenericModel[int]): pass @skip_36 def test_subclass_can_be_genericized(): T = TypeVar('T') class Result(GenericModel, Generic[T]): pass Result[T] @skip_36 def test_parameter_count(): T = TypeVar('T') S = TypeVar('S') class Model(GenericModel, Generic[T, S]): x: T y: S with pytest.raises(TypeError) as exc_info: Model[int, int, int] assert str(exc_info.value) == 'Too many parameters for Model; actual 3, expected 2' with pytest.raises(TypeError) as exc_info: Model[int] assert str(exc_info.value) == 'Too few parameters for Model; actual 1, expected 2' @skip_36 def test_cover_cache(): cache_size = len(_generic_types_cache) T = TypeVar('T') class Model(GenericModel, Generic[T]): x: T Model[int] # adds both with-tuple and without-tuple version to cache assert len(_generic_types_cache) == cache_size + 2 Model[int] # uses the cache assert len(_generic_types_cache) == cache_size + 2 @skip_36 def test_generic_config(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type class Config: allow_mutation = False result = Result[int](data=1) assert result.data == 1 with pytest.raises(TypeError): result.data = 2 @skip_36 def test_generic_instantiation_error(): with pytest.raises(TypeError) as exc_info: GenericModel() assert str(exc_info.value) == 'Type GenericModel cannot be used without generic parameters, e.g. GenericModel[T]' @skip_36 def test_parameterized_generic_instantiation_error(): data_type = TypeVar('data_type') class Result(GenericModel, Generic[data_type]): data: data_type with pytest.raises(TypeError) as exc_info: Result(data=1) assert str(exc_info.value) == 'Type Result cannot be used without generic parameters, e.g. Result[T]' @skip_36 def test_deep_generic(): T = TypeVar('T') S = TypeVar('S') R = TypeVar('R') class OuterModel(GenericModel, Generic[T, S, R]): a: Dict[R, Optional[List[T]]] b: Optional[Union[S, R]] c: R d: float class InnerModel(GenericModel, Generic[T, R]): c: T d: R class NormalModel(BaseModel): e: int f: str inner_model = InnerModel[int, str] generic_model = OuterModel[inner_model, NormalModel, int] inner_models = [inner_model(c=1, d='a')] generic_model(a={1: inner_models, 2: None}, b=None, c=1, d=1.5) generic_model(a={}, b=NormalModel(e=1, f='a'), c=1, d=1.5) generic_model(a={}, b=1, c=1, d=1.5) @skip_36 def test_enum_generic(): T = TypeVar('T') class MyEnum(Enum): x = 1 y = 2 class Model(GenericModel, Generic[T]): enum: T Model[MyEnum](enum=MyEnum.x) Model[MyEnum](enum=2) @skip_36 def test_generic(): data_type = TypeVar('data_type') error_type = TypeVar('error_type') class Result(GenericModel, Generic[data_type, error_type]): data: Optional[List[data_type]] error: Optional[error_type] positive_number: int @validator('error', always=True) def validate_error(cls, v: Optional[error_type], values: Dict[str, Any]) -> Optional[error_type]: if values.get('data', None) is None and v is None: raise ValueError('Must provide data or error') if values.get('data', None) is not None and v is not None: raise ValueError('Must not provide both data and error') return v @validator('positive_number') def validate_positive_number(cls, v: int) -> int: if v < 0: raise ValueError return v class Error(BaseModel): message: str class Data(BaseModel): number: int text: str success1 = Result[Data, Error](data=[Data(number=1, text='a')], positive_number=1) assert success1.dict() == {'data': [{'number': 1, 'text': 'a'}], 'error': None, 'positive_number': 1} assert repr(success1) == "Result[Data, Error](data=[Data(number=1, text='a')], error=None, positive_number=1)" success2 = Result[Data, Error](error=Error(message='error'), positive_number=1) assert success2.dict() == {'data': None, 'error': {'message': 'error'}, 'positive_number': 1} assert repr(success2) == "Result[Data, Error](data=None, error=Error(message='error'), positive_number=1)" with pytest.raises(ValidationError) as exc_info: Result[Data, Error](error=Error(message='error'), positive_number=-1) assert exc_info.value.errors() == [{'loc': ('positive_number',), 'msg': '', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Result[Data, Error](data=[Data(number=1, text='a')], error=Error(message='error'), positive_number=1) assert exc_info.value.errors() == [ {'loc': ('error',), 'msg': 'Must not provide both data and error', 'type': 'value_error'} ] with pytest.raises(ValidationError) as exc_info: Result[Data, Error](data=[Data(number=1, text='a')], error=Error(message='error'), positive_number=1) assert exc_info.value.errors() == [ {'loc': ('error',), 'msg': 'Must not provide both data and error', 'type': 'value_error'} ] @skip_36 def test_alongside_concrete_generics(): from pydantic.generics import GenericModel T = TypeVar('T') class MyModel(GenericModel, Generic[T]): item: T metadata: Dict[str, Any] model = MyModel[int](item=1, metadata={}) assert model.item == 1 assert model.metadata == {} @skip_36 def test_complex_nesting(): from pydantic.generics import GenericModel T = TypeVar('T') class MyModel(GenericModel, Generic[T]): item: List[Dict[Union[int, T], str]] item = [{1: 'a', 'a': 'a'}] model = MyModel[str](item=item) assert model.item == item @skip_36 def test_required_value(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: int with pytest.raises(ValidationError) as exc_info: MyModel[int]() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] @skip_36 def test_optional_value(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: Optional[int] = 1 model = MyModel[int]() assert model.dict() == {'a': 1} @skip_36 def test_custom_schema(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): a: int = Field(1, description='Custom') schema = MyModel[int].schema() assert schema['properties']['a'].get('description') == 'Custom' @skip_36 def test_custom_generic_naming(): T = TypeVar('T') class MyModel(GenericModel, Generic[T]): value: Optional[T] @classmethod def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: param_names = [param.__name__ if hasattr(param, '__name__') else str(param) for param in params] title = param_names[0].title() return f'Optional{title}Wrapper' assert repr(MyModel[int](value=1)) == 'OptionalIntWrapper(value=1)' assert repr(MyModel[str](value=None)) == 'OptionalStrWrapper(value=None)' pydantic-1.2/tests/test_json.py000066400000000000000000000136751357000400300167170ustar00rootroot00000000000000import datetime import json import sys from dataclasses import dataclass as vanilla_dataclass from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import List from uuid import UUID import pytest from pydantic import BaseModel, create_model from pydantic.color import Color from pydantic.dataclasses import dataclass as pydantic_dataclass from pydantic.json import pydantic_encoder, timedelta_isoformat from pydantic.types import DirectoryPath, FilePath, SecretBytes, SecretStr class MyEnum(Enum): foo = 'bar' snap = 'crackle' @pytest.mark.parametrize( 'input,output', [ (UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8'), '"ebcdab58-6eb8-46fb-a190-d07a33e9eac8"'), (IPv4Address('192.168.0.1'), '"192.168.0.1"'), (Color('#000'), '"black"'), (Color((1, 12, 123)), '"#010c7b"'), (SecretStr('abcd'), '"**********"'), (SecretStr(''), '""'), (SecretBytes(b'xyz'), '"**********"'), (SecretBytes(b''), '""'), (IPv6Address('::1:0:1'), '"::1:0:1"'), (IPv4Interface('192.168.0.0/24'), '"192.168.0.0/24"'), (IPv6Interface('2001:db00::/120'), '"2001:db00::/120"'), (IPv4Network('192.168.0.0/24'), '"192.168.0.0/24"'), (IPv6Network('2001:db00::/120'), '"2001:db00::/120"'), (datetime.datetime(2032, 1, 1, 1, 1), '"2032-01-01T01:01:00"'), (datetime.datetime(2032, 1, 1, 1, 1, tzinfo=datetime.timezone.utc), '"2032-01-01T01:01:00+00:00"'), (datetime.datetime(2032, 1, 1), '"2032-01-01T00:00:00"'), (datetime.time(12, 34, 56), '"12:34:56"'), (datetime.timedelta(days=12, seconds=34, microseconds=56), '1036834.000056'), ({1, 2, 3}, '[1, 2, 3]'), (frozenset([1, 2, 3]), '[1, 2, 3]'), ((v for v in range(4)), '[0, 1, 2, 3]'), (b'this is bytes', '"this is bytes"'), (Decimal('12.34'), '12.34'), (create_model('BarModel', a='b', c='d')(), '{"a": "b", "c": "d"}'), (MyEnum.foo, '"bar"'), ], ) def test_encoding(input, output): assert output == json.dumps(input, default=pydantic_encoder) @pytest.mark.skipif(sys.platform.startswith('win'), reason='paths look different on windows') def test_path_encoding(tmpdir): class PathModel(BaseModel): path: Path file_path: FilePath dir_path: DirectoryPath tmpdir = Path(tmpdir) file_path = tmpdir / 'bar' file_path.touch() dir_path = tmpdir / 'baz' dir_path.mkdir() model = PathModel(path=Path('/path/test/example/'), file_path=file_path, dir_path=dir_path) expected = '{{"path": "/path/test/example", "file_path": "{}", "dir_path": "{}"}}'.format(file_path, dir_path) assert json.dumps(model, default=pydantic_encoder) == expected def test_model_encoding(): class ModelA(BaseModel): x: int y: str class Model(BaseModel): a: float b: bytes c: Decimal d: ModelA m = Model(a=10.2, b='foobar', c=10.2, d={'x': 123, 'y': '123'}) assert m.dict() == {'a': 10.2, 'b': b'foobar', 'c': Decimal('10.2'), 'd': {'x': 123, 'y': '123'}} assert m.json() == '{"a": 10.2, "b": "foobar", "c": 10.2, "d": {"x": 123, "y": "123"}}' assert m.json(exclude={'b'}) == '{"a": 10.2, "c": 10.2, "d": {"x": 123, "y": "123"}}' def test_invalid_model(): class Foo: pass with pytest.raises(TypeError): json.dumps(Foo, default=pydantic_encoder) @pytest.mark.parametrize( 'input,output', [ (datetime.timedelta(days=12, seconds=34, microseconds=56), 'P12DT0H0M34.000056S'), (datetime.timedelta(days=1001, hours=1, minutes=2, seconds=3, microseconds=654_321), 'P1001DT1H2M3.654321S'), ], ) def test_iso_timedelta(input, output): assert output == timedelta_isoformat(input) def test_custom_encoder(): class Model(BaseModel): x: datetime.timedelta y: Decimal z: datetime.date class Config: json_encoders = {datetime.timedelta: lambda v: f'{v.total_seconds():0.3f}s', Decimal: lambda v: 'a decimal'} assert Model(x=123, y=5, z='2032-06-01').json() == '{"x": "123.000s", "y": "a decimal", "z": "2032-06-01"}' def test_custom_iso_timedelta(): class Model(BaseModel): x: datetime.timedelta class Config: json_encoders = {datetime.timedelta: timedelta_isoformat} m = Model(x=123) assert m.json() == '{"x": "P0DT0H2M3.000000S"}' def test_custom_encoder_arg(): class Model(BaseModel): x: datetime.timedelta m = Model(x=123) assert m.json() == '{"x": 123.0}' assert m.json(encoder=lambda v: '__default__') == '{"x": "__default__"}' def test_encode_dataclass(): @vanilla_dataclass class Foo: bar: int spam: str f = Foo(bar=123, spam='apple pie') assert '{"bar": 123, "spam": "apple pie"}' == json.dumps(f, default=pydantic_encoder) def test_encode_pydantic_dataclass(): @pydantic_dataclass class Foo: bar: int spam: str f = Foo(bar=123, spam='apple pie') assert '{"bar": 123, "spam": "apple pie"}' == json.dumps(f, default=pydantic_encoder) def test_encode_custom_root(): class Model(BaseModel): __root__: List[str] assert Model(__root__=['a', 'b']).json() == '["a", "b"]' def test_custom_decode_encode(): load_calls, dump_calls = 0, 0 def custom_loads(s): nonlocal load_calls load_calls += 1 return json.loads(s.strip('$')) def custom_dumps(s, default=None, **kwargs): nonlocal dump_calls dump_calls += 1 return json.dumps(s, default=default, indent=2) class Model(BaseModel): a: int b: str class Config: json_loads = custom_loads json_dumps = custom_dumps m = Model.parse_raw('${"a": 1, "b": "foo"}$$') assert m.dict() == {'a': 1, 'b': 'foo'} assert m.json() == '{\n "a": 1,\n "b": "foo"\n}' pydantic-1.2/tests/test_main.py000066400000000000000000000675131357000400300166720ustar00rootroot00000000000000from enum import Enum from typing import Any, ClassVar, List, Mapping, Type import pytest from pydantic import BaseModel, Extra, Field, NoneBytes, NoneStr, Required, ValidationError, constr def test_success(): # same as below but defined here so class definition occurs inside the test class Model(BaseModel): a: float b: int = 10 m = Model(a=10.2) assert m.a == 10.2 assert m.b == 10 class UltraSimpleModel(BaseModel): a: float b: int = 10 def test_ultra_simple_missing(): with pytest.raises(ValidationError) as exc_info: UltraSimpleModel() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_ultra_simple_failed(): with pytest.raises(ValidationError) as exc_info: UltraSimpleModel(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_ultra_simple_repr(): m = UltraSimpleModel(a=10.2) assert str(m) == 'a=10.2 b=10' assert repr(m) == 'UltraSimpleModel(a=10.2, b=10)' assert repr(m.__fields__['a']) == "ModelField(name='a', type=float, required=True)" assert repr(m.__fields__['b']) == "ModelField(name='b', type=int, required=False, default=10)" assert dict(m) == {'a': 10.2, 'b': 10} assert m.dict() == {'a': 10.2, 'b': 10} assert m.json() == '{"a": 10.2, "b": 10}' with pytest.raises(DeprecationWarning, match=r'`model.to_string\(\)` method is deprecated'): assert m.to_string() == 'a=10.2 b=10' def test_comparing(): m = UltraSimpleModel(a=10.2, b='100') assert m == {'a': 10.2, 'b': 100} assert m == UltraSimpleModel(a=10.2, b=100) def test_nullable_strings_success(): class NoneCheckModel(BaseModel): existing_str_value = 'foo' required_str_value: str = ... required_str_none_value: NoneStr = ... existing_bytes_value = b'foo' required_bytes_value: bytes = ... required_bytes_none_value: NoneBytes = ... m = NoneCheckModel( required_str_value='v1', required_str_none_value=None, required_bytes_value='v2', required_bytes_none_value=None ) assert m.required_str_value == 'v1' assert m.required_str_none_value is None assert m.required_bytes_value == b'v2' assert m.required_bytes_none_value is None def test_nullable_strings_fails(): class NoneCheckModel(BaseModel): existing_str_value = 'foo' required_str_value: str = ... required_str_none_value: NoneStr = ... existing_bytes_value = b'foo' required_bytes_value: bytes = ... required_bytes_none_value: NoneBytes = ... with pytest.raises(ValidationError) as exc_info: NoneCheckModel( required_str_value=None, required_str_none_value=None, required_bytes_value=None, required_bytes_none_value=None, ) assert exc_info.value.errors() == [ {'loc': ('required_str_value',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed'}, { 'loc': ('required_bytes_value',), 'msg': 'none is not an allowed value', 'type': 'type_error.none.not_allowed', }, ] class RecursiveModel(BaseModel): grape: bool = ... banana: UltraSimpleModel = ... def test_recursion(): m = RecursiveModel(grape=1, banana={'a': 1}) assert m.grape is True assert m.banana.a == 1.0 assert m.banana.b == 10 assert repr(m) == 'RecursiveModel(grape=True, banana=UltraSimpleModel(a=1.0, b=10))' def test_recursion_fails(): with pytest.raises(ValidationError): RecursiveModel(grape=1, banana=123) def test_not_required(): class Model(BaseModel): a: float = None assert Model(a=12.2).a == 12.2 assert Model().a is None assert Model(a=None).a is None def test_infer_type(): class Model(BaseModel): a = False b = '' c = 0 assert Model().a is False assert Model().b == '' assert Model().c == 0 def test_allow_extra(): class Model(BaseModel): a: float = ... class Config: extra = Extra.allow assert Model(a='10.2', b=12).dict() == {'a': 10.2, 'b': 12} def test_forbidden_extra_success(): class ForbiddenExtra(BaseModel): foo = 'whatever' class Config: extra = Extra.forbid m = ForbiddenExtra() assert m.foo == 'whatever' m = ForbiddenExtra(foo=1) assert m.foo == '1' def test_forbidden_extra_fails(): class ForbiddenExtra(BaseModel): foo = 'whatever' class Config: extra = Extra.forbid with pytest.raises(ValidationError) as exc_info: ForbiddenExtra(foo='ok', bar='wrong', spam='xx') assert exc_info.value.errors() == [ {'loc': ('bar',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, {'loc': ('spam',), 'msg': 'extra fields not permitted', 'type': 'value_error.extra'}, ] def test_disallow_mutation(): class Model(BaseModel): a: float model = Model(a=0.2) with pytest.raises(ValueError, match='"Model" object has no field "b"'): model.b = 2 def test_extra_allowed(): class Model(BaseModel): a: float class Config: extra = Extra.allow model = Model(a=0.2, b=0.1) assert model.b == 0.1 assert not hasattr(model, 'c') model.c = 1 assert hasattr(model, 'c') assert model.c == 1 def test_extra_ignored(): class Model(BaseModel): a: float class Config: extra = Extra.ignore model = Model(a=0.2, b=0.1) assert not hasattr(model, 'b') with pytest.raises(ValueError, match='"Model" object has no field "c"'): model.c = 1 def test_set_attr(): m = UltraSimpleModel(a=10.2) assert m.dict() == {'a': 10.2, 'b': 10} m.b = 20 assert m.dict() == {'a': 10.2, 'b': 20} def test_set_attr_invalid(): class UltraSimpleModel(BaseModel): a: float = ... b: int = 10 m = UltraSimpleModel(a=10.2) assert m.dict() == {'a': 10.2, 'b': 10} with pytest.raises(ValueError) as exc_info: m.c = 20 assert '"UltraSimpleModel" object has no field "c"' in exc_info.value.args[0] def test_any(): class AnyModel(BaseModel): a: Any = 10 assert AnyModel().a == 10 assert AnyModel(a='foobar').a == 'foobar' def test_alias(): class SubModel(BaseModel): c = 'barfoo' class Config: fields = {'c': {'alias': '_c'}} class Model(BaseModel): a = 'foobar' b: SubModel = SubModel() class Config: fields = {'a': {'alias': '_a'}} assert Model().a == 'foobar' assert Model().b.c == 'barfoo' assert Model().dict() == {'a': 'foobar', 'b': {'c': 'barfoo'}} assert Model(_a='different').a == 'different' assert Model(b={'_c': 'different'}).b.c == 'different' assert Model(_a='different', b={'_c': 'different'}).dict() == {'a': 'different', 'b': {'c': 'different'}} assert Model(_a='different', b={'_c': 'different'}).dict(by_alias=True) == { '_a': 'different', 'b': {'_c': 'different'}, } def test_population_by_field_name(): class Model(BaseModel): a: str class Config: allow_population_by_field_name = True fields = {'a': {'alias': '_a'}} assert Model(a='different').a == 'different' assert Model(a='different').dict() == {'a': 'different'} assert Model(a='different').dict(by_alias=True) == {'_a': 'different'} def test_field_order(): class Model(BaseModel): c: float b: int = 10 a: str d: dict = {} assert list(Model.__fields__.keys()) == ['c', 'b', 'a', 'd'] def test_required(): # same as below but defined here so class definition occurs inside the test class Model(BaseModel): a: float = Required b: int = 10 m = Model(a=10.2) assert m.dict() == dict(a=10.2, b=10) with pytest.raises(ValidationError) as exc_info: Model() assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_not_immutability(): class TestModel(BaseModel): a: int = 10 class Config: allow_mutation = True extra = Extra.forbid m = TestModel() assert m.a == 10 m.a = 11 assert m.a == 11 with pytest.raises(ValueError) as exc_info: m.b = 11 assert '"TestModel" object has no field "b"' in exc_info.value.args[0] def test_immutability(): class TestModel(BaseModel): a: int = 10 class Config: allow_mutation = False extra = Extra.forbid m = TestModel() assert m.a == 10 with pytest.raises(TypeError) as exc_info: m.a = 11 assert '"TestModel" is immutable and does not support item assignment' in exc_info.value.args[0] with pytest.raises(ValueError) as exc_info: m.b = 11 assert '"TestModel" object has no field "b"' in exc_info.value.args[0] def test_const_validates(): class Model(BaseModel): a: int = Field(3, const=True) m = Model(a=3) assert m.a == 3 def test_const_uses_default(): class Model(BaseModel): a: int = Field(3, const=True) m = Model() assert m.a == 3 def test_const_with_wrong_value(): class Model(BaseModel): a: int = Field(3, const=True) with pytest.raises(ValidationError) as exc_info: Model(a=4) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'unexpected value; permitted: 3', 'type': 'value_error.const', 'ctx': {'given': 4, 'permitted': [3]}, } ] def test_const_list(): class SubModel(BaseModel): b: int class Model(BaseModel): a: List[SubModel] = Field([SubModel(b=1), SubModel(b=2), SubModel(b=3)], const=True) b: List[SubModel] = Field([{'b': 4}, {'b': 5}, {'b': 6}], const=True) m = Model() assert m.a == [SubModel(b=1), SubModel(b=2), SubModel(b=3)] assert m.b == [SubModel(b=4), SubModel(b=5), SubModel(b=6)] assert m.schema() == { 'definitions': { 'SubModel': { 'properties': {'b': {'title': 'B', 'type': 'integer'}}, 'required': ['b'], 'title': 'SubModel', 'type': 'object', } }, 'properties': { 'a': { 'const': [SubModel(b=1), SubModel(b=2), SubModel(b=3)], 'items': {'$ref': '#/definitions/SubModel'}, 'title': 'A', 'type': 'array', }, 'b': { 'const': [{'b': 4}, {'b': 5}, {'b': 6}], 'items': {'$ref': '#/definitions/SubModel'}, 'title': 'B', 'type': 'array', }, }, 'title': 'Model', 'type': 'object', } def test_const_list_with_wrong_value(): class SubModel(BaseModel): b: int class Model(BaseModel): a: List[SubModel] = Field([SubModel(b=1), SubModel(b=2), SubModel(b=3)], const=True) b: List[SubModel] = Field([{'b': 4}, {'b': 5}, {'b': 6}], const=True) with pytest.raises(ValidationError) as exc_info: Model(a=[{'b': 3}, {'b': 1}, {'b': 2}], b=[{'b': 6}, {'b': 5}]) assert exc_info.value.errors() == [ { 'ctx': { 'given': [{'b': 3}, {'b': 1}, {'b': 2}], 'permitted': [[SubModel(b=1), SubModel(b=2), SubModel(b=3)]], }, 'loc': ('a',), 'msg': 'unexpected value; permitted: [SubModel(b=1), SubModel(b=2), SubModel(b=3)]', 'type': 'value_error.const', }, { 'ctx': {'given': [{'b': 6}, {'b': 5}], 'permitted': [[{'b': 4}, {'b': 5}, {'b': 6}]]}, 'loc': ('b',), 'msg': "unexpected value; permitted: [{'b': 4}, {'b': 5}, {'b': 6}]", 'type': 'value_error.const', }, ] assert exc_info.value.json().startswith('[') with pytest.raises(ValidationError) as exc_info: Model(a=[SubModel(b=3), SubModel(b=1), SubModel(b=2)], b=[SubModel(b=3), SubModel(b=1)]) assert exc_info.value.errors() == [ { 'ctx': { 'given': [SubModel(b=3), SubModel(b=1), SubModel(b=2)], 'permitted': [[SubModel(b=1), SubModel(b=2), SubModel(b=3)]], }, 'loc': ('a',), 'msg': 'unexpected value; permitted: [SubModel(b=1), SubModel(b=2), SubModel(b=3)]', 'type': 'value_error.const', }, { 'ctx': {'given': [SubModel(b=3), SubModel(b=1)], 'permitted': [[{'b': 4}, {'b': 5}, {'b': 6}]]}, 'loc': ('b',), 'msg': "unexpected value; permitted: [{'b': 4}, {'b': 5}, {'b': 6}]", 'type': 'value_error.const', }, ] assert exc_info.value.json().startswith('[') def test_const_validation_json_serializable(): class SubForm(BaseModel): field: int class Form(BaseModel): field1: SubForm = Field({'field': 2}, const=True) field2: List[SubForm] = Field([{'field': 2}], const=True) with pytest.raises(ValidationError) as exc_info: # Fails Form(field1={'field': 1}, field2=[{'field': 1}]) # This should not raise an Json error exc_info.value.json() class ValidateAssignmentModel(BaseModel): a: int = 2 b: constr(min_length=1) class Config: validate_assignment = True def test_validating_assignment_pass(): p = ValidateAssignmentModel(a=5, b='hello') p.a = 2 assert p.a == 2 assert p.dict() == {'a': 2, 'b': 'hello'} p.b = 'hi' assert p.b == 'hi' assert p.dict() == {'a': 2, 'b': 'hi'} def test_validating_assignment_fail(): p = ValidateAssignmentModel(a=5, b='hello') with pytest.raises(ValidationError) as exc_info: p.a = 'b' assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] with pytest.raises(ValidationError) as exc_info: p.b = '' assert exc_info.value.errors() == [ { 'loc': ('b',), 'msg': 'ensure this value has at least 1 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 1}, } ] def test_enum_values(): FooEnum = Enum('FooEnum', {'foo': 'foo', 'bar': 'bar'}) class Model(BaseModel): foo: FooEnum = None class Config: use_enum_values = True m = Model(foo='foo') # this is the actual value, so has not "values" field assert not isinstance(m.foo, FooEnum) assert m.foo == 'foo' def test_enum_raw(): FooEnum = Enum('FooEnum', {'foo': 'foo', 'bar': 'bar'}) class Model(BaseModel): foo: FooEnum = None m = Model(foo='foo') assert isinstance(m.foo, FooEnum) assert m.foo != 'foo' assert m.foo.value == 'foo' def test_set_tuple_values(): class Model(BaseModel): foo: set bar: tuple m = Model(foo=['a', 'b'], bar=['c', 'd']) assert m.foo == {'a', 'b'} assert m.bar == ('c', 'd') assert m.dict() == {'foo': {'a', 'b'}, 'bar': ('c', 'd')} def test_default_copy(): class User(BaseModel): friends: List[int] = [] u1 = User() u2 = User() assert u1.friends is not u2.friends class ArbitraryType: pass def test_arbitrary_type_allowed_validation_success(): class ArbitraryTypeAllowedModel(BaseModel): t: ArbitraryType class Config: arbitrary_types_allowed = True arbitrary_type_instance = ArbitraryType() m = ArbitraryTypeAllowedModel(t=arbitrary_type_instance) assert m.t == arbitrary_type_instance def test_arbitrary_type_allowed_validation_fails(): class ArbitraryTypeAllowedModel(BaseModel): t: ArbitraryType class Config: arbitrary_types_allowed = True class C: pass with pytest.raises(ValidationError) as exc_info: ArbitraryTypeAllowedModel(t=C()) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'instance of ArbitraryType expected', 'type': 'type_error.arbitrary_type', 'ctx': {'expected_arbitrary_type': 'ArbitraryType'}, } ] def test_arbitrary_types_not_allowed(): with pytest.raises(RuntimeError) as exc_info: class ArbitraryTypeNotAllowedModel(BaseModel): t: ArbitraryType assert exc_info.value.args[0].startswith('no validator found for') def test_type_type_validation_success(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] arbitrary_type_class = ArbitraryType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class def test_type_type_subclass_validation_success(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] class ArbitrarySubType(ArbitraryType): pass arbitrary_type_class = ArbitrarySubType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class def test_type_type_validation_fails_for_instance(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] class C: pass with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=C) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'subclass of ArbitraryType expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'ArbitraryType'}, } ] def test_type_type_validation_fails_for_basic_type(): class ArbitraryClassAllowedModel(BaseModel): t: Type[ArbitraryType] with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=1) assert exc_info.value.errors() == [ { 'loc': ('t',), 'msg': 'subclass of ArbitraryType expected', 'type': 'type_error.subclass', 'ctx': {'expected_class': 'ArbitraryType'}, } ] def test_bare_type_type_validation_success(): class ArbitraryClassAllowedModel(BaseModel): t: Type arbitrary_type_class = ArbitraryType m = ArbitraryClassAllowedModel(t=arbitrary_type_class) assert m.t == arbitrary_type_class def test_bare_type_type_validation_fails(): class ArbitraryClassAllowedModel(BaseModel): t: Type arbitrary_type = ArbitraryType() with pytest.raises(ValidationError) as exc_info: ArbitraryClassAllowedModel(t=arbitrary_type) assert exc_info.value.errors() == [{'loc': ('t',), 'msg': 'a class is expected', 'type': 'type_error.class'}] def test_annotation_field_name_shadows_attribute(): with pytest.raises(NameError): # When defining a model that has an attribute with the name of a built-in attribute, an exception is raised class BadModel(BaseModel): schema: str # This conflicts with the BaseModel's schema() class method def test_value_field_name_shadows_attribute(): # When defining a model that has an attribute with the name of a built-in attribute, an exception is raised with pytest.raises(NameError): class BadModel(BaseModel): schema = 'abc' # This conflicts with the BaseModel's schema() class method def test_class_var(): class MyModel(BaseModel): a: ClassVar b: ClassVar[int] = 1 c: int = 2 assert list(MyModel.__fields__.keys()) == ['c'] def test_fields_set(): class MyModel(BaseModel): a: int b: int = 2 m = MyModel(a=5) assert m.__fields_set__ == {'a'} m.b = 2 assert m.__fields_set__ == {'a', 'b'} m = MyModel(a=5, b=2) assert m.__fields_set__ == {'a', 'b'} def test_exclude_unset_dict(): class MyModel(BaseModel): a: int b: int = 2 m = MyModel(a=5) assert m.dict(exclude_unset=True) == {'a': 5} m = MyModel(a=5, b=3) assert m.dict(exclude_unset=True) == {'a': 5, 'b': 3} def test_exclude_unset_recursive(): class ModelA(BaseModel): a: int b: int = 1 class ModelB(BaseModel): c: int d: int = 2 e: ModelA m = ModelB(c=5, e={'a': 0}) assert m.dict() == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} assert m.dict(exclude_unset=True) == {'c': 5, 'e': {'a': 0}} assert dict(m) == {'c': 5, 'd': 2, 'e': {'a': 0, 'b': 1}} def test_dict_exclude_unset_populated_by_alias(): class MyModel(BaseModel): a: str = Field('default', alias='alias_a') b: str = Field('default', alias='alias_b') class Config: allow_population_by_field_name = True m = MyModel(alias_a='a') assert m.dict(exclude_unset=True) == {'a': 'a'} assert m.dict(exclude_unset=True, by_alias=True) == {'alias_a': 'a'} def test_dict_exclude_unset_populated_by_alias_with_extra(): class MyModel(BaseModel): a: str = Field('default', alias='alias_a') b: str = Field('default', alias='alias_b') class Config: extra = 'allow' m = MyModel(alias_a='a', c='c') assert m.dict(exclude_unset=True) == {'a': 'a', 'c': 'c'} assert m.dict(exclude_unset=True, by_alias=True) == {'alias_a': 'a', 'c': 'c'} def test_dir_fields(): class MyModel(BaseModel): attribute_a: int attribute_b: int = 2 m = MyModel(attribute_a=5) assert 'dict' in dir(m) assert 'json' in dir(m) assert 'attribute_a' in dir(m) assert 'attribute_b' in dir(m) def test_dict_with_extra_keys(): class MyModel(BaseModel): a: str = Field(None, alias='alias_a') class Config: extra = Extra.allow m = MyModel(extra_key='extra') assert m.dict() == {'a': None, 'extra_key': 'extra'} assert m.dict(by_alias=True) == {'alias_a': None, 'extra_key': 'extra'} def test_alias_generator(): def to_camel(string: str): return ''.join(x.capitalize() for x in string.split('_')) class MyModel(BaseModel): a: List[str] = None foo_bar: str class Config: alias_generator = to_camel data = {'A': ['foo', 'bar'], 'FooBar': 'foobar'} v = MyModel(**data) assert v.a == ['foo', 'bar'] assert v.foo_bar == 'foobar' assert v.dict(by_alias=True) == data def test_alias_generator_with_field_schema(): def to_upper_case(string: str): return string.upper() class MyModel(BaseModel): my_shiny_field: Any # Alias from Config.fields will be used foo_bar: str # Alias from Config.fields will be used baz_bar: str # Alias will be generated another_field: str # Alias will be generated class Config: alias_generator = to_upper_case fields = {'my_shiny_field': 'MY_FIELD', 'foo_bar': {'alias': 'FOO'}, 'another_field': {'not_alias': 'a'}} data = {'MY_FIELD': ['a'], 'FOO': 'bar', 'BAZ_BAR': 'ok', 'ANOTHER_FIELD': '...'} m = MyModel(**data) assert m.dict(by_alias=True) == data def test_alias_generator_wrong_type_error(): def return_bytes(string): return b'not a string' with pytest.raises(TypeError) as e: class MyModel(BaseModel): bar: Any class Config: alias_generator = return_bytes assert str(e.value) == "Config.alias_generator must return str, not " def test_root(): class MyModel(BaseModel): __root__: str m = MyModel(__root__='a') assert m.dict() == {'__root__': 'a'} assert m.__root__ == 'a' def test_root_list(): class MyModel(BaseModel): __root__: List[str] m = MyModel(__root__=['a']) assert m.dict() == {'__root__': ['a']} assert m.__root__ == ['a'] def test_root_failed(): with pytest.raises(ValueError, match='__root__ cannot be mixed with other fields'): class MyModel(BaseModel): __root__: str a: str def test_root_undefined_failed(): class MyModel(BaseModel): a: List[str] with pytest.raises(ValidationError) as exc_info: MyModel(__root__=['a']) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_parse_root_as_mapping(): class MyModel(BaseModel): __root__: Mapping[str, str] assert MyModel.parse_obj({1: 2}).__root__ == {'1': '2'} with pytest.raises(ValidationError) as exc_info: MyModel.parse_obj({'__root__': {'1': '2'}}) assert exc_info.value.errors() == [ {'loc': ('__root__', '__root__'), 'msg': 'str type expected', 'type': 'type_error.str'} ] def test_parse_obj_non_mapping_root(): class MyModel(BaseModel): __root__: List[str] assert MyModel.parse_obj(['a']).__root__ == ['a'] assert MyModel.parse_obj({'__root__': ['a']}).__root__ == ['a'] with pytest.raises(ValidationError) as exc_info: MyModel.parse_obj({'__not_root__': ['a']}) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid list', 'type': 'type_error.list'} ] with pytest.raises(ValidationError): MyModel.parse_obj({'__root__': ['a'], 'other': 1}) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid list', 'type': 'type_error.list'} ] def test_untouched_types(): from pydantic import BaseModel class _ClassPropertyDescriptor: def __init__(self, getter): self.getter = getter def __get__(self, instance, owner): return self.getter(owner) classproperty = _ClassPropertyDescriptor class Model(BaseModel): class Config: keep_untouched = (classproperty,) @classproperty def class_name(cls) -> str: return cls.__name__ assert Model.class_name == 'Model' assert Model().class_name == 'Model' def test_custom_types_fail_without_keep_untouched(): from pydantic import BaseModel class _ClassPropertyDescriptor: def __init__(self, getter): self.getter = getter def __get__(self, instance, owner): return self.getter(owner) classproperty = _ClassPropertyDescriptor with pytest.raises(RuntimeError) as e: class Model(BaseModel): @classproperty def class_name(cls) -> str: return cls.__name__ Model.class_name assert str(e.value) == ( "no validator found for ." "_ClassPropertyDescriptor'>, see `arbitrary_types_allowed` in Config" ) class Model(BaseModel): class Config: arbitrary_types_allowed = True @classproperty def class_name(cls) -> str: return cls.__name__ with pytest.raises(AttributeError) as e: Model.class_name assert str(e.value) == "type object 'Model' has no attribute 'class_name'" def test_model_iteration(): class Foo(BaseModel): a: int = 1 b: int = 2 class Bar(BaseModel): c: int d: Foo m = Bar(c=3, d={}) assert m.dict() == {'c': 3, 'd': {'a': 1, 'b': 2}} assert list(m) == [('c', 3), ('d', Foo())] assert dict(m) == {'c': 3, 'd': Foo()} def test_custom_init_subclass_params(): class DerivedModel(BaseModel): def __init_subclass__(cls, something): cls.something = something # if this raises a TypeError, then there is a regression of issue 867: # pydantic.main.MetaModel.__new__ should include **kwargs at the end of the # method definition and pass them on to the super call at the end in order # to allow the special method __init_subclass__ to be defined with custom # parameters on extended BaseModel classes. class NewModel(DerivedModel, something=2): something = 1 assert NewModel.something == 2 pydantic-1.2/tests/test_networks.py000066400000000000000000000334531357000400300176160ustar00rootroot00000000000000import pytest from pydantic import AnyUrl, BaseModel, HttpUrl, PostgresDsn, RedisDsn, ValidationError, stricturl from pydantic.networks import validate_email try: import email_validator except ImportError: email_validator = None @pytest.mark.parametrize( 'value', [ 'http://example.org', 'http://test', 'http://localhost', 'https://example.org/whatever/next/', 'postgres://user:pass@localhost:5432/app', 'postgres://just-user@localhost:5432/app', 'https://example.org', 'http://localhost', 'http://localhost/', 'http://localhost:8000', 'http://localhost:8000/', 'https://foo_bar.example.com/', 'ftp://example.org', 'ftps://example.org', 'http://example.co.jp', 'http://www.example.com/a%C2%B1b', 'http://www.example.com/~username/', 'http://info.example.com?fred', 'http://info.example.com/?fred', 'http://xn--mgbh0fb.xn--kgbechtv/', 'http://example.com/blue/red%3Fand+green', 'http://www.example.com/?array%5Bkey%5D=value', 'http://xn--rsum-bpad.example.org/', 'http://123.45.67.8/', 'http://123.45.67.8:8329/', 'http://[2001:db8::ff00:42]:8329', 'http://[2001::1]:8329', 'http://[2001:db8::1]/', 'http://www.example.com:8000/foo', 'http://www.cwi.nl:80/%7Eguido/Python.html', 'https://www.python.org/путь', 'http://андрей@example.com', AnyUrl('https://example.com', scheme='https', host='example.com'), 'https://exam_ple.com/', ], ) def test_any_url_success(value): class Model(BaseModel): v: AnyUrl assert Model(v=value).v, value @pytest.mark.parametrize( 'value,err_type,err_msg,err_ctx', [ ('http:///example.com/', 'value_error.url.host', 'URL host invalid', None), ('https:///example.com/', 'value_error.url.host', 'URL host invalid', None), ('http://.example.com:8000/foo', 'value_error.url.host', 'URL host invalid', None), ('https://example.org\\', 'value_error.url.host', 'URL host invalid', None), ('https://exampl$e.org', 'value_error.url.host', 'URL host invalid', None), ('http://??', 'value_error.url.host', 'URL host invalid', None), ('http://.', 'value_error.url.host', 'URL host invalid', None), ('http://..', 'value_error.url.host', 'URL host invalid', None), ( 'https://example.org more', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: ' more'", {'extra': ' more'}, ), ('$https://example.org', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('../icons/logo.gif', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('abc', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('..', 'value_error.url.scheme', 'invalid or missing URL scheme', None), ('/', 'value_error.url.scheme', 'invalid or missing URL scheme', None), (' ', 'value_error.any_str.min_length', 'ensure this value has at least 1 characters', {'limit_value': 1}), ('', 'value_error.any_str.min_length', 'ensure this value has at least 1 characters', {'limit_value': 1}), (None, 'type_error.none.not_allowed', 'none is not an allowed value', None), ( 'http://2001:db8::ff00:42:8329', 'value_error.url.extra', "URL invalid, extra characters found after valid URL: ':db8::ff00:42:8329'", {'extra': ':db8::ff00:42:8329'}, ), ('http://[192.168.1.1]:8329', 'value_error.url.host', 'URL host invalid', None), ], ) def test_any_url_invalid(value, err_type, err_msg, err_ctx): class Model(BaseModel): v: AnyUrl with pytest.raises(ValidationError) as exc_info: Model(v=value) assert len(exc_info.value.errors()) == 1, exc_info.value.errors() error = exc_info.value.errors()[0] # debug(error) assert error['type'] == err_type, value assert error['msg'] == err_msg, value assert error.get('ctx') == err_ctx, value def test_any_url_obj(): class Model(BaseModel): v: AnyUrl url = Model(v='http://example.org').v assert str(url) == 'http://example.org' assert repr(url) == "AnyUrl('http://example.org', scheme='http', host='example.org', tld='org', host_type='domain')" assert url.scheme == 'http' assert url.host == 'example.org' assert url.tld == 'org' assert url.host_type == 'domain' assert url.port is None assert url == AnyUrl('http://example.org', scheme='https', host='example.org') url2 = Model(v='http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit').v assert str(url2) == 'http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit' assert repr(url2) == ( "AnyUrl('http://user:password@example.org:1234/the/path/?query=here#fragment=is;this=bit', " "scheme='http', user='user', password='password', host='example.org', tld='org', host_type='domain', " "port='1234', path='/the/path/', query='query=here', fragment='fragment=is;this=bit')" ) assert url2.scheme == 'http' assert url2.user == 'user' assert url2.password == 'password' assert url2.host == 'example.org' assert url.host_type == 'domain' assert url2.port == '1234' assert url2.path == '/the/path/' assert url2.query == 'query=here' assert url2.fragment == 'fragment=is;this=bit' url3 = Model(v='ftp://123.45.67.8:8329/').v assert url3.scheme == 'ftp' assert url3.host == '123.45.67.8' assert url3.host_type == 'ipv4' assert url3.port == '8329' assert url3.user is None assert url3.password is None url4 = Model(v='wss://[2001:db8::ff00:42]:8329').v assert url4.scheme == 'wss' assert url4.host == '[2001:db8::ff00:42]' assert url4.host_type == 'ipv6' assert url4.port == '8329' url5 = Model(v='https://£££.org').v assert url5.host == 'xn--9aaa.org' assert url5.host_type == 'int_domain' assert str(url5) == 'https://xn--9aaa.org' url6 = Model(v='http://example.co.uk').v assert str(url6) == 'http://example.co.uk' assert url6.scheme == 'http' assert url6.host == 'example.co.uk' assert url6.tld == 'uk' # wrong but no better solution assert url6.host_type == 'domain' url7 = Model(v='http://user:@example.org').v assert url7.user == 'user' assert url7.password == '' assert url7.host == 'example.org' @pytest.mark.parametrize( 'value', [ 'http://example.org', 'http://example.org/foobar', 'http://example.org.', 'http://example.org./foobar', 'HTTP://EXAMPLE.ORG', 'https://example.org', 'https://example.org?a=1&b=2', 'https://example.org#a=3;b=3', 'https://foo_bar.example.com/', 'https://exam_ple.com/', # should perhaps fail? I think it's contrary to the RFC but chrome allows it ], ) def test_http_url_success(value): class Model(BaseModel): v: HttpUrl assert Model(v=value).v == value, value @pytest.mark.parametrize( 'value,err_type,err_msg,err_ctx', [ ( 'ftp://example.com/', 'value_error.url.scheme', 'URL scheme not permitted', {'allowed_schemes': {'https', 'http'}}, ), ('http://foobar/', 'value_error.url.host', 'URL host invalid, top level domain required', None), ('http://localhost/', 'value_error.url.host', 'URL host invalid, top level domain required', None), ( 'x' * 2084, 'value_error.any_str.max_length', 'ensure this value has at most 2083 characters', {'limit_value': 2083}, ), ], ) def test_http_url_invalid(value, err_type, err_msg, err_ctx): class Model(BaseModel): v: HttpUrl with pytest.raises(ValidationError) as exc_info: Model(v=value) assert len(exc_info.value.errors()) == 1, exc_info.value.errors() error = exc_info.value.errors()[0] assert error['type'] == err_type, value assert error['msg'] == err_msg, value assert error.get('ctx') == err_ctx, value @pytest.mark.parametrize( 'input,output', [ (' https://www.example.com \n', 'https://www.example.com'), (b'https://www.example.com', 'https://www.example.com'), # https://www.xudongz.com/blog/2017/idn-phishing/ accepted but converted ('https://www.аррӏе.com/', 'https://www.xn--80ak6aa92e.com/'), ('https://exampl£e.org', 'https://xn--example-gia.org'), ], ) def test_coerse_url(input, output): class Model(BaseModel): v: HttpUrl assert Model(v=input).v == output def test_postgres_dsns(): class Model(BaseModel): a: PostgresDsn assert Model(a='postgres://user:pass@localhost:5432/app').a == 'postgres://user:pass@localhost:5432/app' assert Model(a='postgresql://user:pass@localhost:5432/app').a == 'postgresql://user:pass@localhost:5432/app' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' assert exc_info.value.json().startswith('[') def test_redis_dsns(): class Model(BaseModel): a: RedisDsn m = Model(a='redis://user:pass@localhost:5432/app') assert m.a == 'redis://user:pass@localhost:5432/app' assert m.a.user == 'user' assert m.a.password == 'pass' with pytest.raises(ValidationError) as exc_info: Model(a='http://example.org') assert exc_info.value.errors()[0]['type'] == 'value_error.url.scheme' with pytest.raises(ValidationError) as exc_info: Model(a='redis://localhost:5432/app') error = exc_info.value.errors()[0] assert error == {'loc': ('a',), 'msg': 'userinfo required in URL but missing', 'type': 'value_error.url.userinfo'} def test_custom_schemes(): class Model(BaseModel): v: stricturl(strip_whitespace=False, allowed_schemes={'ws', 'wss'}) assert Model(v='ws://example.org').v == 'ws://example.org' with pytest.raises(ValidationError): Model(v='http://example.org') with pytest.raises(ValidationError): Model(v='ws://example.org ') @pytest.mark.parametrize( 'kwargs,expected', [ (dict(scheme='ws', user='foo', host='example.net'), 'ws://foo@example.net'), (dict(scheme='ws', user='foo', password='x', host='example.net'), 'ws://foo:x@example.net'), (dict(scheme='ws', host='example.net', query='a=b', fragment='c=d'), 'ws://example.net?a=b#c=d'), (dict(scheme='http', host='example.net', port='1234'), 'http://example.net:1234'), ], ) def test_build_url(kwargs, expected): assert AnyUrl(None, **kwargs) == expected def test_son(): class Model(BaseModel): v: HttpUrl m = Model(v='http://foo@example.net') assert m.json() == '{"v": "http://foo@example.net"}' assert m.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'v': {'title': 'V', 'minLength': 1, 'maxLength': 2083, 'type': 'string', 'format': 'uri'}}, 'required': ['v'], } @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize( 'value,name,email', [ ('foobar@example.com', 'foobar', 'foobar@example.com'), ('s@muelcolvin.com', 's', 's@muelcolvin.com'), ('Samuel Colvin ', 'Samuel Colvin', 's@muelcolvin.com'), ('foobar ', 'foobar', 'foobar@example.com'), (' foo.bar@example.com', 'foo.bar', 'foo.bar@example.com'), ('foo.bar@example.com ', 'foo.bar', 'foo.bar@example.com'), ('foo BAR ', 'foo BAR', 'foobar@example.com'), ('FOO bar ', 'FOO bar', 'foobar@example.com'), (' ', 'FOOBAR', 'FOOBAR@example.com'), ('ñoñó@example.com', 'ñoñó', 'ñoñó@example.com'), ('我買@example.com', '我買', '我買@example.com'), ('甲斐黒川日本@example.com', '甲斐黒川日本', '甲斐黒川日本@example.com'), ( 'чебурашкаящик-с-апельсинами.рф@example.com', 'чебурашкаящик-с-апельсинами.рф', 'чебурашкаящик-с-апельсинами.рф@example.com', ), ('उदाहरण.परीक्ष@domain.with.idn.tld', 'उदाहरण.परीक्ष', 'उदाहरण.परीक्ष@domain.with.idn.tld'), ('foo.bar@example.com', 'foo.bar', 'foo.bar@example.com'), ('foo.bar@exam-ple.com ', 'foo.bar', 'foo.bar@exam-ple.com'), ('ιωάννης@εεττ.gr', 'ιωάννης', 'ιωάννης@εεττ.gr'), ], ) def test_address_valid(value, name, email): assert validate_email(value) == (name, email) @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize( 'value', [ 'f oo.bar@example.com ', 'foo.bar@exam\nple.com ', 'foobar', 'foobar ', ], ) def test_address_invalid(value): with pytest.raises(ValueError): validate_email(value) @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed(): with pytest.raises(ImportError): validate_email('s@muelcolvin.com') pydantic-1.2/tests/test_networks_ipaddress.py000066400000000000000000000416731357000400300216570ustar00rootroot00000000000000from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network import pytest from pydantic import BaseModel, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, ValidationError # # ipaddress.IPv4Address # ipaddress.IPv6Address # pydantic.IPvAnyAddress # @pytest.mark.parametrize( 'value,cls', [ ('0.0.0.0', IPv4Address), ('1.1.1.1', IPv4Address), ('10.10.10.10', IPv4Address), ('192.168.0.1', IPv4Address), ('255.255.255.255', IPv4Address), ('::1:0:1', IPv6Address), ('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', IPv6Address), (b'\x00\x00\x00\x00', IPv4Address), (b'\x01\x01\x01\x01', IPv4Address), (b'\n\n\n\n', IPv4Address), (b'\xc0\xa8\x00\x01', IPv4Address), (b'\xff\xff\xff\xff', IPv4Address), (b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01', IPv6Address), (b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Address), (0, IPv4Address), (16_843_009, IPv4Address), (168_430_090, IPv4Address), (3_232_235_521, IPv4Address), (4_294_967_295, IPv4Address), (4_294_967_297, IPv6Address), (340_282_366_920_938_463_463_374_607_431_768_211_455, IPv6Address), (IPv4Address('192.168.0.1'), IPv4Address), (IPv6Address('::1:0:1'), IPv6Address), ], ) def test_ipaddress_success(value, cls): class Model(BaseModel): ip: IPvAnyAddress assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value', [ '0.0.0.0', '1.1.1.1', '10.10.10.10', '192.168.0.1', '255.255.255.255', b'\x00\x00\x00\x00', b'\x01\x01\x01\x01', b'\n\n\n\n', b'\xc0\xa8\x00\x01', b'\xff\xff\xff\xff', 0, 16_843_009, 168_430_090, 3_232_235_521, 4_294_967_295, IPv4Address('0.0.0.0'), IPv4Address('1.1.1.1'), IPv4Address('10.10.10.10'), IPv4Address('192.168.0.1'), IPv4Address('255.255.255.255'), ], ) def test_ipv4address_success(value): class Model(BaseModel): ipv4: IPv4Address assert Model(ipv4=value).ipv4 == IPv4Address(value) @pytest.mark.parametrize( 'value', [ '::1:0:1', 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01', b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', 4_294_967_297, 340_282_366_920_938_463_463_374_607_431_768_211_455, IPv6Address('::1:0:1'), IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'), ], ) def test_ipv6address_success(value): class Model(BaseModel): ipv6: IPv6Address assert Model(ipv6=value).ipv6 == IPv6Address(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( '192.168.0.1.1.1', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( -1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 address', 'type': 'value_error.ipvanyaddress'}], ), ], ) def test_ipaddress_fails(value, errors): class Model(BaseModel): ip: IPvAnyAddress with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ( '192.168.0.1.1.1', [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), (-1, [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}]), ( 2 ** 32 + 1, [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ( IPv6Address('::0:1:0'), [{'loc': ('ipv4',), 'msg': 'value is not a valid IPv4 address', 'type': 'value_error.ipv4address'}], ), ], ) def test_ipv4address_fails(value, errors): class Model(BaseModel): ipv4: IPv4Address with pytest.raises(ValidationError) as exc_info: Model(ipv4=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ( '192.168.0.1.1.1', [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), (-1, [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}]), ( 2 ** 128 + 1, [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ( IPv4Address('192.168.0.1'), [{'loc': ('ipv6',), 'msg': 'value is not a valid IPv6 address', 'type': 'value_error.ipv6address'}], ), ], ) def test_ipv6address_fails(value, errors): class Model(BaseModel): ipv6: IPv6Address with pytest.raises(ValidationError) as exc_info: Model(ipv6=value) assert exc_info.value.errors() == errors # # ipaddress.IPv4Network # ipaddress.IPv6Network # pydantic.IPvAnyNetwork # @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Network), ('192.168.128.0/30', IPv4Network), ('2001:db00::0/120', IPv6Network), (2 ** 32 - 1, IPv4Network), # no mask equals to mask /32 (20_282_409_603_651_670_423_947_251_286_015, IPv6Network), # /128 (b'\xff\xff\xff\xff', IPv4Network), # /32 (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Network), (('192.168.0.0', 24), IPv4Network), (('2001:db00::0', 120), IPv6Network), (IPv4Network('192.168.0.0/24'), IPv4Network), ], ) def test_ipnetwork_success(value, cls): class Model(BaseModel): ip: IPvAnyNetwork = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Network), ('192.168.128.0/30', IPv4Network), (2 ** 32 - 1, IPv4Network), # no mask equals to mask /32 (b'\xff\xff\xff\xff', IPv4Network), # /32 (('192.168.0.0', 24), IPv4Network), (IPv4Network('192.168.0.0/24'), IPv4Network), ], ) def test_ip_v4_network_success(value, cls): class Model(BaseModel): ip: IPv4Network = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('2001:db00::0/120', IPv6Network), (20_282_409_603_651_670_423_947_251_286_015, IPv6Network), # /128 (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Network), (('2001:db00::0', 120), IPv6Network), (IPv6Network('2001:db00::0/120'), IPv6Network), ], ) def test_ip_v6_network_success(value, cls): class Model(BaseModel): ip: IPv6Network = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( -1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 network', 'type': 'value_error.ipvanynetwork'}], ), ], ) def test_ipnetwork_fails(value, errors): class Model(BaseModel): ip: IPvAnyNetwork = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}]), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ( '2001:db00::1/120', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 network', 'type': 'value_error.ipv4network'}], ), ], ) def test_ip_v4_network_fails(value, errors): class Model(BaseModel): ip: IPv4Network = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}]), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ( '192.168.0.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 network', 'type': 'value_error.ipv6network'}], ), ], ) def test_ip_v6_network_fails(value, errors): class Model(BaseModel): ip: IPv6Network = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors # # ipaddress.IPv4Interface # ipaddress.IPv6Interface # pydantic.IPvAnyInterface # @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Interface), ('192.168.0.1/24', IPv4Interface), ('192.168.128.0/30', IPv4Interface), ('192.168.128.1/30', IPv4Interface), ('2001:db00::0/120', IPv6Interface), ('2001:db00::1/120', IPv6Interface), (2 ** 32 - 1, IPv4Interface), # no mask equals to mask /32 (2 ** 32 - 1, IPv4Interface), # so ``strict`` has no effect (20_282_409_603_651_670_423_947_251_286_015, IPv6Interface), # /128 (20_282_409_603_651_670_423_947_251_286_014, IPv6Interface), (b'\xff\xff\xff\xff', IPv4Interface), # /32 (b'\xff\xff\xff\xff', IPv4Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (('192.168.0.0', 24), IPv4Interface), (('192.168.0.1', 24), IPv4Interface), (('2001:db00::0', 120), IPv6Interface), (('2001:db00::1', 120), IPv6Interface), (IPv4Interface('192.168.0.0/24'), IPv4Interface), (IPv4Interface('192.168.0.1/24'), IPv4Interface), (IPv6Interface('2001:db00::0/120'), IPv6Interface), (IPv6Interface('2001:db00::1/120'), IPv6Interface), ], ) def test_ipinterface_success(value, cls): class Model(BaseModel): ip: IPvAnyInterface = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('192.168.0.0/24', IPv4Interface), ('192.168.0.1/24', IPv4Interface), ('192.168.128.0/30', IPv4Interface), ('192.168.128.1/30', IPv4Interface), (2 ** 32 - 1, IPv4Interface), # no mask equals to mask /32 (2 ** 32 - 1, IPv4Interface), # so ``strict`` has no effect (b'\xff\xff\xff\xff', IPv4Interface), # /32 (b'\xff\xff\xff\xff', IPv4Interface), (('192.168.0.0', 24), IPv4Interface), (('192.168.0.1', 24), IPv4Interface), (IPv4Interface('192.168.0.0/24'), IPv4Interface), (IPv4Interface('192.168.0.1/24'), IPv4Interface), ], ) def test_ip_v4_interface_success(value, cls): class Model(BaseModel): ip: IPv4Interface assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,cls', [ ('2001:db00::0/120', IPv6Interface), ('2001:db00::1/120', IPv6Interface), (20_282_409_603_651_670_423_947_251_286_015, IPv6Interface), # /128 (20_282_409_603_651_670_423_947_251_286_014, IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (b'\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff', IPv6Interface), (('2001:db00::0', 120), IPv6Interface), (('2001:db00::1', 120), IPv6Interface), (IPv6Interface('2001:db00::0/120'), IPv6Interface), (IPv6Interface('2001:db00::1/120'), IPv6Interface), ], ) def test_ip_v6_interface_success(value, cls): class Model(BaseModel): ip: IPv6Interface = None assert Model(ip=value).ip == cls(value) @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( '192.168.0.1.1.1/24', [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( -1, [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ( 2 ** 128 + 1, [ { 'loc': ('ip',), 'msg': 'value is not a valid IPv4 or IPv6 interface', 'type': 'value_error.ipvanyinterface', } ], ), ], ) def test_ipinterface_fails(value, errors): class Model(BaseModel): ip: IPvAnyInterface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}]), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv4 interface', 'type': 'value_error.ipv4interface'}], ), ], ) def test_ip_v4_interface_fails(value, errors): class Model(BaseModel): ip: IPv4Interface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'value,errors', [ ( 'hello,world', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), ( '192.168.0.1.1.1/24', [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), (-1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}]), ( 2 ** 128 + 1, [{'loc': ('ip',), 'msg': 'value is not a valid IPv6 interface', 'type': 'value_error.ipv6interface'}], ), ], ) def test_ip_v6_interface_fails(value, errors): class Model(BaseModel): ip: IPv6Interface = None with pytest.raises(ValidationError) as exc_info: Model(ip=value) assert exc_info.value.errors() == errors pydantic-1.2/tests/test_orm_mode.py000066400000000000000000000135271357000400300175430ustar00rootroot00000000000000from typing import Any, List import pytest from pydantic import BaseModel, ConfigError, ValidationError, root_validator from pydantic.utils import GetterDict def test_getdict(): class TestCls: a = 1 b: int def __init__(self): self.c = 3 @property def d(self): return 4 def __getattr__(self, key): if key == 'e': return 5 else: raise AttributeError() t = TestCls() gd = GetterDict(t) assert gd.keys() == ['a', 'c', 'd'] assert gd.get('a') == 1 assert gd['a'] == 1 with pytest.raises(KeyError): assert gd['foobar'] assert gd.get('b', None) is None assert gd.get('b', 1234) == 1234 assert gd.get('c', None) == 3 assert gd.get('d', None) == 4 assert gd.get('e', None) == 5 assert gd.get('f', 'missing') == 'missing' assert list(gd.values()) == [1, 3, 4] assert list(gd.items()) == [('a', 1), ('c', 3), ('d', 4)] assert list(gd) == ['a', 'c', 'd'] assert gd == {'a': 1, 'c': 3, 'd': 4} assert 'a' in gd assert len(gd) == 3 assert str(gd) == "{'a': 1, 'c': 3, 'd': 4}" assert repr(gd) == "GetterDict[TestCls]({'a': 1, 'c': 3, 'd': 4})" def test_orm_mode(): class PetCls: def __init__(self, *, name: str, species: str): self.name = name self.species = species class PersonCls: def __init__(self, *, name: str, age: float = None, pets: List[PetCls]): self.name = name self.age = age self.pets = pets class Pet(BaseModel): name: str species: str class Config: orm_mode = True class Person(BaseModel): name: str age: float = None pets: List[Pet] class Config: orm_mode = True bones = PetCls(name='Bones', species='dog') orion = PetCls(name='Orion', species='cat') anna = PersonCls(name='Anna', age=20, pets=[bones, orion]) anna_model = Person.from_orm(anna) assert anna_model.dict() == { 'name': 'Anna', 'pets': [{'name': 'Bones', 'species': 'dog'}, {'name': 'Orion', 'species': 'cat'}], 'age': 20.0, } def test_not_orm_mode(): class Pet(BaseModel): name: str species: str with pytest.raises(ConfigError): Pet.from_orm(None) def test_object_with_getattr(): class FooGetAttr: def __getattr__(self, key: str): if key == 'foo': return 'Foo' else: raise AttributeError class Model(BaseModel): foo: str bar: int = 1 class Config: orm_mode = True class ModelInvalid(BaseModel): foo: str bar: int class Config: orm_mode = True foo = FooGetAttr() model = Model.from_orm(foo) assert model.foo == 'Foo' assert model.bar == 1 assert model.dict(exclude_unset=True) == {'foo': 'Foo'} with pytest.raises(ValidationError): ModelInvalid.from_orm(foo) def test_properties(): class XyProperty: x = 4 @property def y(self): return '5' class Model(BaseModel): x: int y: int class Config: orm_mode = True model = Model.from_orm(XyProperty()) assert model.x == 4 assert model.y == 5 def test_extra_allow(): class TestCls: x = 1 y = 2 class Model(BaseModel): x: int class Config: orm_mode = True extra = 'allow' model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1} def test_extra_forbid(): class TestCls: x = 1 y = 2 class Model(BaseModel): x: int class Config: orm_mode = True extra = 'forbid' model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1} def test_root_validator(): validator_value = None class TestCls: x = 1 y = 2 class Model(BaseModel): x: int y: int z: int @root_validator(pre=True) def change_input_data(cls, value): nonlocal validator_value validator_value = value return {**value, 'z': value['x'] + value['y']} class Config: orm_mode = True model = Model.from_orm(TestCls()) assert model.dict() == {'x': 1, 'y': 2, 'z': 3} assert isinstance(validator_value, GetterDict) assert validator_value == {'x': 1, 'y': 2} def test_custom_getter_dict(): class TestCls: x = 1 y = 2 def custom_getter_dict(obj): assert isinstance(obj, TestCls) return {'x': 42, 'y': 24} class Model(BaseModel): x: int y: int class Config: orm_mode = True getter_dict = custom_getter_dict model = Model.from_orm(TestCls()) assert model.dict() == {'x': 42, 'y': 24} def test_custom_getter_dict_derived_model_class(): class CustomCollection: __custom__ = True def __iter__(self): for elem in range(5): yield elem class Example: def __init__(self, *args, **kwargs): self.col = CustomCollection() self.id = 1 self.name = 'name' class MyGetterDict(GetterDict): def get(self, key: Any, default: Any = None) -> Any: res = getattr(self._obj, key, default) if hasattr(res, '__custom__'): return list(res) return res class ExampleBase(BaseModel): name: str col: List[int] class ExampleOrm(ExampleBase): id: int class Config: orm_mode = True getter_dict = MyGetterDict model = ExampleOrm.from_orm(Example()) assert model.dict() == {'name': 'name', 'col': [0, 1, 2, 3, 4], 'id': 1} pydantic-1.2/tests/test_parse.py000066400000000000000000000071401357000400300170460ustar00rootroot00000000000000import pickle from typing import List, Union import pytest from pydantic import BaseModel, Field, Protocol, ValidationError class Model(BaseModel): a: float b: int = 10 def test_obj(): m = Model.parse_obj(dict(a=10.2)) assert str(m) == 'a=10.2 b=10' def test_parse_obj_fails(): with pytest.raises(ValidationError) as exc_info: Model.parse_obj([1, 2, 3]) assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'Model expected dict not list', 'type': 'type_error'} ] def test_parse_obj_submodel(): m = Model.parse_obj(Model(a=10.2)) assert m.dict() == {'a': 10.2, 'b': 10} def test_parse_obj_wrong_model(): class Foo(BaseModel): c = 123 with pytest.raises(ValidationError) as exc_info: Model.parse_obj(Foo()) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_parse_obj_root(): class MyModel(BaseModel): __root__: str m = MyModel.parse_obj('a') assert m.dict() == {'__root__': 'a'} assert m.__root__ == 'a' def test_parse_root_list(): class MyModel(BaseModel): __root__: List[str] m = MyModel.parse_obj(['a']) assert m.dict() == {'__root__': ['a']} assert m.__root__ == ['a'] def test_json(): assert Model.parse_raw('{"a": 12, "b": 8}') == Model(a=12, b=8) def test_json_ct(): assert Model.parse_raw('{"a": 12, "b": 8}', content_type='application/json') == Model(a=12, b=8) def test_pickle_ct(): data = pickle.dumps(dict(a=12, b=8)) assert Model.parse_raw(data, content_type='application/pickle', allow_pickle=True) == Model(a=12, b=8) def test_pickle_proto(): data = pickle.dumps(dict(a=12, b=8)) assert Model.parse_raw(data, proto=Protocol.pickle, allow_pickle=True) == Model(a=12, b=8) def test_pickle_not_allowed(): data = pickle.dumps(dict(a=12, b=8)) with pytest.raises(RuntimeError): Model.parse_raw(data, proto=Protocol.pickle) def test_bad_ct(): with pytest.raises(ValidationError) as exc_info: Model.parse_raw('{"a": 12, "b": 8}', content_type='application/missing') assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'Unknown content-type: application/missing', 'type': 'type_error'} ] def test_bad_proto(): with pytest.raises(ValidationError) as exc_info: Model.parse_raw('{"a": 12, "b": 8}', proto='foobar') assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'Unknown protocol: foobar', 'type': 'type_error'}] def test_file_json(tmpdir): p = tmpdir.join('test.json') p.write('{"a": 12, "b": 8}') assert Model.parse_file(str(p)) == Model(a=12, b=8) def test_file_json_no_ext(tmpdir): p = tmpdir.join('test') p.write('{"a": 12, "b": 8}') assert Model.parse_file(str(p)) == Model(a=12, b=8) def test_file_pickle(tmpdir): p = tmpdir.join('test.pkl') p.write_binary(pickle.dumps(dict(a=12, b=8))) assert Model.parse_file(str(p), allow_pickle=True) == Model(a=12, b=8) def test_file_pickle_no_ext(tmpdir): p = tmpdir.join('test') p.write_binary(pickle.dumps(dict(a=12, b=8))) assert Model.parse_file(str(p), content_type='application/pickle', allow_pickle=True) == Model(a=12, b=8) def test_const_differentiates_union(): class SubModelA(BaseModel): key: str = Field('A', const=True) foo: int class SubModelB(BaseModel): key: str = Field('B', const=True) foo: int class Model(BaseModel): a: Union[SubModelA, SubModelB] m = Model.parse_obj({'a': {'key': 'B', 'foo': 3}}) assert isinstance(m.a, SubModelB) pydantic-1.2/tests/test_schema.py000066400000000000000000001342741357000400300172050ustar00rootroot00000000000000import os import sys import tempfile from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum, IntEnum from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from pathlib import Path from typing import Any, Callable, Dict, List, NewType, Optional, Set, Tuple, Union from uuid import UUID import pytest from pydantic import BaseModel, Extra, Field, ValidationError, conlist, validator from pydantic.color import Color from pydantic.networks import AnyUrl, EmailStr, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, NameEmail, stricturl from pydantic.schema import ( get_flat_models_from_model, get_flat_models_from_models, get_model_name_map, model_schema, schema, ) from pydantic.types import ( UUID1, UUID3, UUID4, UUID5, ConstrainedBytes, ConstrainedDecimal, ConstrainedFloat, ConstrainedInt, ConstrainedStr, DirectoryPath, FilePath, Json, NegativeFloat, NegativeInt, NoneBytes, NoneStr, NoneStrBytes, PositiveFloat, PositiveInt, PyObject, SecretBytes, SecretStr, StrBytes, StrictBool, StrictStr, conbytes, condecimal, confloat, conint, constr, ) from pydantic.typing import Literal try: import email_validator except ImportError: email_validator = None try: import typing_extensions except ImportError: typing_extensions = None def test_key(): class ApplePie(BaseModel): """ This is a test. """ a: float b: int = 10 s = { 'type': 'object', 'properties': {'a': {'type': 'number', 'title': 'A'}, 'b': {'type': 'integer', 'title': 'B', 'default': 10}}, 'required': ['a'], 'title': 'ApplePie', 'description': 'This is a test.', } assert True not in ApplePie.__schema_cache__ assert False not in ApplePie.__schema_cache__ assert ApplePie.schema() == s assert True in ApplePie.__schema_cache__ assert False not in ApplePie.__schema_cache__ assert ApplePie.schema() == s def test_by_alias(): class ApplePie(BaseModel): a: float b: int = 10 class Config: title = 'Apple Pie' fields = {'a': 'Snap', 'b': 'Crackle'} s = { 'type': 'object', 'title': 'Apple Pie', 'properties': { 'Snap': {'type': 'number', 'title': 'Snap'}, 'Crackle': {'type': 'integer', 'title': 'Crackle', 'default': 10}, }, 'required': ['Snap'], } assert ApplePie.schema() == s assert list(ApplePie.schema(by_alias=True)['properties'].keys()) == ['Snap', 'Crackle'] assert list(ApplePie.schema(by_alias=False)['properties'].keys()) == ['a', 'b'] def test_sub_model(): class Foo(BaseModel): """hello""" b: float class Bar(BaseModel): a: int b: Foo = None assert Bar.schema() == { 'type': 'object', 'title': 'Bar', 'definitions': { 'Foo': { 'type': 'object', 'title': 'Foo', 'description': 'hello', 'properties': {'b': {'type': 'number', 'title': 'B'}}, 'required': ['b'], } }, 'properties': {'a': {'type': 'integer', 'title': 'A'}, 'b': {'$ref': '#/definitions/Foo'}}, 'required': ['a'], } def test_schema_class(): class Model(BaseModel): foo: int = Field(4, title='Foo is Great') bar: str = Field(..., description='this description of bar') with pytest.raises(ValidationError): Model() m = Model(bar=123) assert m.dict() == {'foo': 4, 'bar': '123'} assert Model.schema() == { 'type': 'object', 'title': 'Model', 'properties': { 'foo': {'type': 'integer', 'title': 'Foo is Great', 'default': 4}, 'bar': {'type': 'string', 'title': 'Bar', 'description': 'this description of bar'}, }, 'required': ['bar'], } def test_schema_repr(): s = Field(4, title='Foo is Great') assert str(s) == "default=4 title='Foo is Great' extra={}" assert repr(s) == "FieldInfo(default=4, title='Foo is Great', extra={})" def test_schema_class_by_alias(): class Model(BaseModel): foo: int = Field(4, alias='foofoo') assert list(Model.schema()['properties'].keys()) == ['foofoo'] assert list(Model.schema(by_alias=False)['properties'].keys()) == ['foo'] def test_choices(): FooEnum = Enum('FooEnum', {'foo': 'f', 'bar': 'b'}) BarEnum = IntEnum('BarEnum', {'foo': 1, 'bar': 2}) class SpamEnum(str, Enum): foo = 'f' bar = 'b' class Model(BaseModel): foo: FooEnum bar: BarEnum spam: SpamEnum = Field(None) assert Model.schema() == { 'type': 'object', 'title': 'Model', 'properties': { 'foo': {'title': 'Foo', 'enum': ['f', 'b']}, 'bar': {'type': 'integer', 'title': 'Bar', 'enum': [1, 2]}, 'spam': {'type': 'string', 'title': 'Spam', 'enum': ['f', 'b']}, }, 'required': ['foo', 'bar'], } def test_json_schema(): class Model(BaseModel): a = b'foobar' b = Decimal('12.34') assert Model.schema_json(indent=2) == ( '{\n' ' "title": "Model",\n' ' "type": "object",\n' ' "properties": {\n' ' "a": {\n' ' "title": "A",\n' ' "default": "foobar",\n' ' "type": "string",\n' ' "format": "binary"\n' ' },\n' ' "b": {\n' ' "title": "B",\n' ' "default": 12.34,\n' ' "type": "number"\n' ' }\n' ' }\n' '}' ) def test_list_sub_model(): class Foo(BaseModel): a: float class Bar(BaseModel): b: List[Foo] assert Bar.schema() == { 'title': 'Bar', 'type': 'object', 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'type': 'number', 'title': 'A'}}, 'required': ['a'], } }, 'properties': {'b': {'type': 'array', 'items': {'$ref': '#/definitions/Foo'}, 'title': 'B'}}, 'required': ['b'], } def test_optional(): class Model(BaseModel): a: Optional[str] assert Model.schema() == {'title': 'Model', 'type': 'object', 'properties': {'a': {'type': 'string', 'title': 'A'}}} def test_any(): class Model(BaseModel): a: Any assert Model.schema() == {'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A'}}} def test_set(): class Model(BaseModel): a: Set[int] b: set assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'a': {'title': 'A', 'type': 'array', 'uniqueItems': True, 'items': {'type': 'integer'}}, 'b': {'title': 'B', 'type': 'array', 'items': {}, 'uniqueItems': True}, }, 'required': ['a', 'b'], } def test_const_str(): class Model(BaseModel): a: str = Field('some string', const=True) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'const': 'some string'}}, } def test_const_false(): class Model(BaseModel): a: str = Field('some string', const=False) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'default': 'some string'}}, } @pytest.mark.parametrize( 'field_type,expected_schema', [ (tuple, {}), ( Tuple[str, int, Union[str, int, float], float], [ {'type': 'string'}, {'type': 'integer'}, {'anyOf': [{'type': 'string'}, {'type': 'integer'}, {'type': 'number'}]}, {'type': 'number'}, ], ), (Tuple[str], {'type': 'string'}), ], ) def test_tuple(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array'}}, 'required': ['a'], } base_schema['properties']['a']['items'] = expected_schema assert Model.schema() == base_schema def test_bool(): class Model(BaseModel): a: bool assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } def test_strict_bool(): class Model(BaseModel): a: StrictBool assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'boolean'}}, 'required': ['a'], } def test_dict(): class Model(BaseModel): a: dict assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'object'}}, 'required': ['a'], } def test_list(): class Model(BaseModel): a: list assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'array', 'items': {}}}, 'required': ['a'], } class Foo(BaseModel): a: float @pytest.mark.parametrize( 'field_type,expected_schema', [ ( Union[int, str], { 'properties': {'a': {'title': 'A', 'anyOf': [{'type': 'integer'}, {'type': 'string'}]}}, 'required': ['a'], }, ), ( List[int], {'properties': {'a': {'title': 'A', 'type': 'array', 'items': {'type': 'integer'}}}, 'required': ['a']}, ), ( Dict[str, Foo], { 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } }, 'properties': { 'a': {'title': 'A', 'type': 'object', 'additionalProperties': {'$ref': '#/definitions/Foo'}} }, 'required': ['a'], }, ), ( Union[None, Foo], { 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } }, 'properties': {'a': {'$ref': '#/definitions/Foo'}}, }, ), (Dict[str, Any], {'properties': {'a': {'title': 'A', 'type': 'object'}}, 'required': ['a']}), ], ) def test_list_union_dict(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object'} base_schema.update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (datetime, {'type': 'string', 'format': 'date-time'}), (date, {'type': 'string', 'format': 'date'}), (time, {'type': 'string', 'format': 'time'}), (timedelta, {'type': 'number', 'format': 'time-delta'}), ], ) def test_date_types(field_type, expected_schema): class Model(BaseModel): a: field_type attribute_schema = {'title': 'A'} attribute_schema.update(expected_schema) base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': attribute_schema}, 'required': ['a']} assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (NoneStr, {'properties': {'a': {'title': 'A', 'type': 'string'}}}), (NoneBytes, {'properties': {'a': {'title': 'A', 'type': 'string', 'format': 'binary'}}}), ( StrBytes, { 'properties': { 'a': {'title': 'A', 'anyOf': [{'type': 'string'}, {'type': 'string', 'format': 'binary'}]} }, 'required': ['a'], }, ), ( NoneStrBytes, { 'properties': { 'a': {'title': 'A', 'anyOf': [{'type': 'string'}, {'type': 'string', 'format': 'binary'}]} } }, ), ], ) def test_str_basic_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object'} base_schema.update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (StrictStr, {'title': 'A', 'type': 'string'}), (ConstrainedStr, {'title': 'A', 'type': 'string'}), ( constr(min_length=3, max_length=5, regex='^text$'), {'title': 'A', 'type': 'string', 'minLength': 3, 'maxLength': 5, 'pattern': '^text$'}, ), ], ) def test_str_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (AnyUrl, {'title': 'A', 'type': 'string', 'format': 'uri', 'minLength': 1, 'maxLength': 2 ** 16}), ( stricturl(min_length=5, max_length=10), {'title': 'A', 'type': 'string', 'format': 'uri', 'minLength': 5, 'maxLength': 10}, ), ], ) def test_special_str_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema @pytest.mark.skipif(not email_validator, reason='email_validator not installed') @pytest.mark.parametrize('field_type,expected_schema', [(EmailStr, 'email'), (NameEmail, 'name-email')]) def test_email_str_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize('field_type,inner_type', [(SecretBytes, 'string'), (SecretStr, 'string')]) def test_secret_types(field_type, inner_type): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': inner_type, 'writeOnly': True}}, 'required': ['a'], } assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedInt, {}), (conint(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (conint(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (conint(multiple_of=5), {'multipleOf': 5}), (PositiveInt, {'exclusiveMinimum': 0}), (NegativeInt, {'exclusiveMaximum': 0}), ], ) def test_special_int_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'integer'}}, 'required': ['a'], } base_schema['properties']['a'].update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedFloat, {}), (confloat(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (confloat(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (confloat(multiple_of=5), {'multipleOf': 5}), (PositiveFloat, {'exclusiveMinimum': 0}), (NegativeFloat, {'exclusiveMaximum': 0}), (ConstrainedDecimal, {}), (condecimal(gt=5, lt=10), {'exclusiveMinimum': 5, 'exclusiveMaximum': 10}), (condecimal(ge=5, le=10), {'minimum': 5, 'maximum': 10}), (condecimal(multiple_of=5), {'multipleOf': 5}), ], ) def test_special_float_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'number'}}, 'required': ['a'], } base_schema['properties']['a'].update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [(UUID, 'uuid'), (UUID1, 'uuid1'), (UUID3, 'uuid3'), (UUID4, 'uuid4'), (UUID5, 'uuid5')], ) def test_uuid_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': ''}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [(FilePath, 'file-path'), (DirectoryPath, 'directory-path'), (Path, 'path')] ) def test_path_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': ''}}, 'required': ['a'], } base_schema['properties']['a']['format'] = expected_schema assert Model.schema() == base_schema def test_json_type(): class Model(BaseModel): a: Json model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string', 'format': 'json-string'}}, } def test_ipv4address_type(): class Model(BaseModel): ip_address: IPv4Address model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipv4'}}, 'required': ['ip_address'], } def test_ipv6address_type(): class Model(BaseModel): ip_address: IPv6Address model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipv6'}}, 'required': ['ip_address'], } def test_ipvanyaddress_type(): class Model(BaseModel): ip_address: IPvAnyAddress model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_address': {'title': 'Ip Address', 'type': 'string', 'format': 'ipvanyaddress'}}, 'required': ['ip_address'], } def test_ipv4interface_type(): class Model(BaseModel): ip_interface: IPv4Interface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipv4interface'}}, 'required': ['ip_interface'], } def test_ipv6interface_type(): class Model(BaseModel): ip_interface: IPv6Interface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipv6interface'}}, 'required': ['ip_interface'], } def test_ipvanyinterface_type(): class Model(BaseModel): ip_interface: IPvAnyInterface model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_interface': {'title': 'Ip Interface', 'type': 'string', 'format': 'ipvanyinterface'}}, 'required': ['ip_interface'], } def test_ipv4network_type(): class Model(BaseModel): ip_network: IPv4Network model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipv4network'}}, 'required': ['ip_network'], } def test_ipv6network_type(): class Model(BaseModel): ip_network: IPv6Network model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipv6network'}}, 'required': ['ip_network'], } def test_ipvanynetwork_type(): class Model(BaseModel): ip_network: IPvAnyNetwork model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'ip_network': {'title': 'Ip Network', 'type': 'string', 'format': 'ipvanynetwork'}}, 'required': ['ip_network'], } @pytest.mark.parametrize('annotation', [Callable, Callable[[int], int]]) def test_callable_type(annotation): class Model(BaseModel): callback: annotation foo: int with pytest.warns(UserWarning): model_schema = Model.schema() assert 'callback' not in model_schema['properties'] def test_error_non_supported_types(): class Model(BaseModel): a: PyObject with pytest.raises(ValueError): Model.schema() def create_testing_submodules(): base_path = Path(tempfile.mkdtemp()) mod_root_path = base_path / 'pydantic_schema_test' os.makedirs(mod_root_path, exist_ok=True) open(mod_root_path / '__init__.py', 'w').close() for mod in ['a', 'b', 'c']: module_name = 'module' + mod model_name = 'model' + mod + '.py' os.makedirs(mod_root_path / module_name, exist_ok=True) open(mod_root_path / module_name / '__init__.py', 'w').close() with open(mod_root_path / module_name / model_name, 'w') as f: f.write('from pydantic import BaseModel\n' 'class Model(BaseModel):\n' ' a: str\n') module_name = 'moduled' model_name = 'modeld.py' os.makedirs(mod_root_path / module_name, exist_ok=True) open(mod_root_path / module_name / '__init__.py', 'w').close() with open(mod_root_path / module_name / model_name, 'w') as f: f.write('from ..moduleb.modelb import Model') sys.path.insert(0, str(base_path)) def test_flat_models_unique_models(): create_testing_submodules() from pydantic_schema_test.modulea.modela import Model as ModelA from pydantic_schema_test.moduleb.modelb import Model as ModelB from pydantic_schema_test.moduled.modeld import Model as ModelD flat_models = get_flat_models_from_models([ModelA, ModelB, ModelD]) assert flat_models == set([ModelA, ModelB]) def test_flat_models_with_submodels(): class Foo(BaseModel): a: str class Bar(BaseModel): b: List[Foo] class Baz(BaseModel): c: Dict[str, Bar] flat_models = get_flat_models_from_model(Baz) assert flat_models == set([Foo, Bar, Baz]) def test_flat_models_with_submodels_from_sequence(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Ingredient(BaseModel): name: str class Pizza(BaseModel): name: str ingredients: List[Ingredient] flat_models = get_flat_models_from_models([Bar, Pizza]) assert flat_models == set([Foo, Bar, Ingredient, Pizza]) def test_model_name_maps(): create_testing_submodules() from pydantic_schema_test.modulea.modela import Model as ModelA from pydantic_schema_test.moduleb.modelb import Model as ModelB from pydantic_schema_test.modulec.modelc import Model as ModelC from pydantic_schema_test.moduled.modeld import Model as ModelD class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar flat_models = get_flat_models_from_models([Baz, ModelA, ModelB, ModelC, ModelD]) model_name_map = get_model_name_map(flat_models) assert model_name_map == { Foo: 'Foo', Bar: 'Bar', Baz: 'Baz', ModelA: 'pydantic_schema_test__modulea__modela__Model', ModelB: 'pydantic_schema_test__moduleb__modelb__Model', ModelC: 'pydantic_schema_test__modulec__modelc__Model', } def test_schema_overrides(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo = Foo(a='foo') class Baz(BaseModel): c: Optional[Bar] class Model(BaseModel): d: Baz model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'definitions': { 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'title': 'B', 'default': {'a': 'foo'}, 'allOf': [{'$ref': '#/definitions/Foo'}]}}, }, 'Baz': {'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/definitions/Bar'}}}, }, 'properties': {'d': {'$ref': '#/definitions/Baz'}}, 'required': ['d'], } def test_schema_from_models(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar class Model(BaseModel): d: Baz class Ingredient(BaseModel): name: str class Pizza(BaseModel): name: str ingredients: List[Ingredient] model_schema = schema( [Model, Pizza], title='Multi-model schema', description='Single JSON Schema with multiple definitions' ) assert model_schema == { 'title': 'Multi-model schema', 'description': 'Single JSON Schema with multiple definitions', 'definitions': { 'Pizza': { 'title': 'Pizza', 'type': 'object', 'properties': { 'name': {'title': 'Name', 'type': 'string'}, 'ingredients': { 'title': 'Ingredients', 'type': 'array', 'items': {'$ref': '#/definitions/Ingredient'}, }, }, 'required': ['name', 'ingredients'], }, 'Ingredient': { 'title': 'Ingredient', 'type': 'object', 'properties': {'name': {'title': 'Name', 'type': 'string'}}, 'required': ['name'], }, 'Model': { 'title': 'Model', 'type': 'object', 'properties': {'d': {'$ref': '#/definitions/Baz'}}, 'required': ['d'], }, 'Baz': { 'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/definitions/Bar'}}, 'required': ['c'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'$ref': '#/definitions/Foo'}}, 'required': ['b'], }, 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, }, } def test_schema_with_ref_prefix(): class Foo(BaseModel): a: str class Bar(BaseModel): b: Foo class Baz(BaseModel): c: Bar model_schema = schema([Bar, Baz], ref_prefix='#/components/schemas/') # OpenAPI style assert model_schema == { 'definitions': { 'Baz': { 'title': 'Baz', 'type': 'object', 'properties': {'c': {'$ref': '#/components/schemas/Bar'}}, 'required': ['c'], }, 'Bar': { 'title': 'Bar', 'type': 'object', 'properties': {'b': {'$ref': '#/components/schemas/Foo'}}, 'required': ['b'], }, 'Foo': { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], }, } } def test_schema_no_definitions(): model_schema = schema([], title='Schema without definitions') assert model_schema == {'title': 'Schema without definitions'} def test_list_default(): class UserModel(BaseModel): friends: List[int] = [1] assert UserModel.schema() == { 'title': 'UserModel', 'type': 'object', 'properties': {'friends': {'title': 'Friends', 'default': [1], 'type': 'array', 'items': {'type': 'integer'}}}, } def test_dict_default(): class UserModel(BaseModel): friends: Dict[str, float] = {'a': 1.1, 'b': 2.2} assert UserModel.schema() == { 'title': 'UserModel', 'type': 'object', 'properties': { 'friends': { 'title': 'Friends', 'default': {'a': 1.1, 'b': 2.2}, 'type': 'object', 'additionalProperties': {'type': 'number'}, } }, } @pytest.mark.parametrize( 'kwargs,type_,expected_extra', [ ({'max_length': 5}, str, {'type': 'string', 'maxLength': 5}), ({}, constr(max_length=6), {'type': 'string', 'maxLength': 6}), ({'min_length': 2}, str, {'type': 'string', 'minLength': 2}), ({'max_length': 5}, bytes, {'type': 'string', 'maxLength': 5, 'format': 'binary'}), ({'regex': '^foo$'}, str, {'type': 'string', 'pattern': '^foo$'}), ({'gt': 2}, int, {'type': 'integer', 'exclusiveMinimum': 2}), ({'lt': 5}, int, {'type': 'integer', 'exclusiveMaximum': 5}), ({'ge': 2}, int, {'type': 'integer', 'minimum': 2}), ({'le': 5}, int, {'type': 'integer', 'maximum': 5}), ({'multiple_of': 5}, int, {'type': 'integer', 'multipleOf': 5}), ({'gt': 2}, float, {'type': 'number', 'exclusiveMinimum': 2}), ({'lt': 5}, float, {'type': 'number', 'exclusiveMaximum': 5}), ({'ge': 2}, float, {'type': 'number', 'minimum': 2}), ({'le': 5}, float, {'type': 'number', 'maximum': 5}), ({'multiple_of': 5}, float, {'type': 'number', 'multipleOf': 5}), ({'gt': 2}, Decimal, {'type': 'number', 'exclusiveMinimum': 2}), ({'lt': 5}, Decimal, {'type': 'number', 'exclusiveMaximum': 5}), ({'ge': 2}, Decimal, {'type': 'number', 'minimum': 2}), ({'le': 5}, Decimal, {'type': 'number', 'maximum': 5}), ({'multiple_of': 5}, Decimal, {'type': 'number', 'multipleOf': 5}), ], ) def test_constraints_schema(kwargs, type_, expected_extra): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) expected_schema = { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'title': 'A title', 'description': 'A description', 'default': 'foo'}}, } expected_schema['properties']['a'].update(expected_extra) assert Foo.schema() == expected_schema @pytest.mark.parametrize( 'kwargs,type_', [ ({'max_length': 5}, int), ({'min_length': 2}, float), ({'max_length': 5}, Decimal), ({'regex': '^foo$'}, int), ({'gt': 2}, str), ({'lt': 5}, bytes), ({'ge': 2}, str), ({'le': 5}, bool), ({'gt': 0}, Callable), ({'gt': 0}, Callable[[int], int]), ({'gt': 0}, conlist(int, min_items=4)), ], ) def test_unenforced_constraints_schema(kwargs, type_): with pytest.raises(ValueError, match='On field "a" the following field constraints are set but not enforced'): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) @pytest.mark.parametrize( 'kwargs,type_,value', [ ({'max_length': 5}, str, 'foo'), ({'min_length': 2}, str, 'foo'), ({'max_length': 5}, bytes, b'foo'), ({'regex': '^foo$'}, str, 'foo'), ({'gt': 2}, int, 3), ({'lt': 5}, int, 3), ({'ge': 2}, int, 3), ({'ge': 2}, int, 2), ({'gt': 2}, int, '3'), ({'le': 5}, int, 3), ({'le': 5}, int, 5), ({'gt': 2}, float, 3.0), ({'gt': 2}, float, 2.1), ({'lt': 5}, float, 3.0), ({'lt': 5}, float, 4.9), ({'ge': 2}, float, 3.0), ({'ge': 2}, float, 2.0), ({'le': 5}, float, 3.0), ({'le': 5}, float, 5.0), ({'gt': 2}, float, 3), ({'gt': 2}, float, '3'), ({'gt': 2}, Decimal, Decimal(3)), ({'lt': 5}, Decimal, Decimal(3)), ({'ge': 2}, Decimal, Decimal(3)), ({'ge': 2}, Decimal, Decimal(2)), ({'le': 5}, Decimal, Decimal(3)), ({'le': 5}, Decimal, Decimal(5)), ], ) def test_constraints_schema_validation(kwargs, type_, value): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) assert Foo(a=value) @pytest.mark.parametrize( 'kwargs,type_,value', [ ({'max_length': 5}, str, 'foobar'), ({'min_length': 2}, str, 'f'), ({'regex': '^foo$'}, str, 'bar'), ({'gt': 2}, int, 2), ({'lt': 5}, int, 5), ({'ge': 2}, int, 1), ({'le': 5}, int, 6), ({'gt': 2}, float, 2.0), ({'lt': 5}, float, 5.0), ({'ge': 2}, float, 1.9), ({'le': 5}, float, 5.1), ({'gt': 2}, Decimal, Decimal(2)), ({'lt': 5}, Decimal, Decimal(5)), ({'ge': 2}, Decimal, Decimal(1)), ({'le': 5}, Decimal, Decimal(6)), ], ) def test_constraints_schema_validation_raises(kwargs, type_, value): class Foo(BaseModel): a: type_ = Field('foo', title='A title', description='A description', **kwargs) with pytest.raises(ValidationError): Foo(a=value) def test_schema_kwargs(): class Foo(BaseModel): a: str = Field('foo', examples=['bar']) assert Foo.schema() == { 'title': 'Foo', 'type': 'object', 'properties': {'a': {'type': 'string', 'title': 'A', 'default': 'foo', 'examples': ['bar']}}, } def test_schema_dict_constr(): regex_str = r'^([a-zA-Z_][a-zA-Z0-9_]*)$' ConStrType = constr(regex=regex_str) ConStrKeyDict = Dict[ConStrType, str] class Foo(BaseModel): a: ConStrKeyDict = {} assert Foo.schema() == { 'title': 'Foo', 'type': 'object', 'properties': { 'a': {'type': 'object', 'title': 'A', 'default': {}, 'patternProperties': {regex_str: {'type': 'string'}}} }, } @pytest.mark.parametrize( 'field_type,expected_schema', [ (ConstrainedBytes, {'title': 'A', 'type': 'string', 'format': 'binary'}), ( conbytes(min_length=3, max_length=5), {'title': 'A', 'type': 'string', 'format': 'binary', 'minLength': 3, 'maxLength': 5}, ), ], ) def test_bytes_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema def test_optional_dict(): class Model(BaseModel): something: Optional[Dict[str, Any]] assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'title': 'Something', 'type': 'object'}}, } assert Model().dict() == {'something': None} assert Model(something={'foo': 'Bar'}).dict() == {'something': {'foo': 'Bar'}} def test_optional_validator(): class Model(BaseModel): something: Optional[str] @validator('something', always=True) def check_something(cls, v): assert v is None or 'x' not in v, 'should not contain x' return v assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'title': 'Something', 'type': 'string'}}, } assert Model().dict() == {'something': None} assert Model(something=None).dict() == {'something': None} assert Model(something='hello').dict() == {'something': 'hello'} def test_field_with_validator(): class Model(BaseModel): something: Optional[int] = None @validator('something') def check_field(cls, v, *, values, config, field): return v assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'something': {'type': 'integer', 'title': 'Something'}}, } def test_unparameterized_schema_generation(): class FooList(BaseModel): d: List class BarList(BaseModel): d: list assert model_schema(FooList) == { 'title': 'FooList', 'type': 'object', 'properties': {'d': {'items': {}, 'title': 'D', 'type': 'array'}}, 'required': ['d'], } foo_list_schema = model_schema(FooList) bar_list_schema = model_schema(BarList) bar_list_schema['title'] = 'FooList' # to check for equality assert foo_list_schema == bar_list_schema class FooDict(BaseModel): d: Dict class BarDict(BaseModel): d: dict model_schema(Foo) assert model_schema(FooDict) == { 'title': 'FooDict', 'type': 'object', 'properties': {'d': {'title': 'D', 'type': 'object'}}, 'required': ['d'], } foo_dict_schema = model_schema(FooDict) bar_dict_schema = model_schema(BarDict) bar_dict_schema['title'] = 'FooDict' # to check for equality assert foo_dict_schema == bar_dict_schema def test_known_model_optimization(): class Dep(BaseModel): number: int class Model(BaseModel): dep: Dep dep_l: List[Dep] expected = { 'title': 'Model', 'type': 'object', 'properties': { 'dep': {'$ref': '#/definitions/Dep'}, 'dep_l': {'title': 'Dep L', 'type': 'array', 'items': {'$ref': '#/definitions/Dep'}}, }, 'required': ['dep', 'dep_l'], 'definitions': { 'Dep': { 'title': 'Dep', 'type': 'object', 'properties': {'number': {'title': 'Number', 'type': 'integer'}}, 'required': ['number'], } }, } assert Model.schema() == expected def test_root(): class Model(BaseModel): __root__: str assert Model.schema() == {'title': 'Model', 'type': 'string'} def test_root_list(): class Model(BaseModel): __root__: List[str] assert Model.schema() == {'title': 'Model', 'type': 'array', 'items': {'type': 'string'}} def test_root_nested_model(): class NestedModel(BaseModel): a: str class Model(BaseModel): __root__: List[NestedModel] assert Model.schema() == { 'title': 'Model', 'type': 'array', 'items': {'$ref': '#/definitions/NestedModel'}, 'definitions': { 'NestedModel': { 'title': 'NestedModel', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], } }, } def test_new_type_schema(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) c_type = NewType('c_type', str) class Model(BaseModel): a: a_type b: b_type c: c_type assert Model.schema() == { 'properties': { 'a': {'title': 'A', 'type': 'integer'}, 'b': {'title': 'B', 'type': 'integer'}, 'c': {'title': 'C', 'type': 'string'}, }, 'required': ['a', 'b', 'c'], 'title': 'Model', 'type': 'object', } @pytest.mark.skipif(not typing_extensions, reason='typing_extensions not installed') def test_literal_schema(): class Model(BaseModel): a: Literal[1] b: Literal['a'] c: Literal['a', 1] assert Model.schema() == { 'properties': { 'a': {'title': 'A', 'type': 'integer', 'const': 1}, 'b': {'title': 'B', 'type': 'string', 'const': 'a'}, 'c': {'anyOf': [{'type': 'string', 'const': 'a'}, {'type': 'integer', 'const': 1}], 'title': 'C'}, }, 'required': ['a', 'b', 'c'], 'title': 'Model', 'type': 'object', } def test_color_type(): class Model(BaseModel): color: Color model_schema = Model.schema() assert model_schema == { 'title': 'Model', 'type': 'object', 'properties': {'color': {'title': 'Color', 'type': 'string', 'format': 'color'}}, 'required': ['color'], } def test_model_with_schema_extra(): class Model(BaseModel): a: str class Config: schema_extra = {'examples': [{'a': 'Foo'}]} assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], 'examples': [{'a': 'Foo'}], } def test_model_with_extra_forbidden(): class Model(BaseModel): a: str class Config: extra = Extra.forbid assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': {'a': {'title': 'A', 'type': 'string'}}, 'required': ['a'], 'additionalProperties': False, } @pytest.mark.parametrize( 'annotation,kwargs,field_schema', [ (int, dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'integer'}), (Optional[int], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'integer'}), ( Tuple[int, ...], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'array', 'items': {'exclusiveMinimum': 0, 'type': 'integer'}}, ), ( Tuple[int, int, int], dict(gt=0), { 'title': 'A', 'type': 'array', 'items': [ {'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'integer'}, ], }, ), ( Union[int, float], dict(gt=0), { 'title': 'A', 'anyOf': [{'exclusiveMinimum': 0, 'type': 'integer'}, {'exclusiveMinimum': 0, 'type': 'number'}], }, ), ( List[int], dict(gt=0), {'title': 'A', 'exclusiveMinimum': 0, 'type': 'array', 'items': {'exclusiveMinimum': 0, 'type': 'integer'}}, ), ( Dict[str, int], dict(gt=0), { 'title': 'A', 'exclusiveMinimum': 0, 'type': 'object', 'additionalProperties': {'exclusiveMinimum': 0, 'type': 'integer'}, }, ), ( Union[str, int], dict(gt=0, max_length=5), {'title': 'A', 'anyOf': [{'maxLength': 5, 'type': 'string'}, {'exclusiveMinimum': 0, 'type': 'integer'}]}, ), ], ) def test_enforced_constraints(annotation, kwargs, field_schema): class Model(BaseModel): a: annotation = Field(..., **kwargs) schema = Model.schema() # debug(schema['properties']['a']) assert schema['properties']['a'] == field_schema def test_real_vs_phony_constraints(): class Model1(BaseModel): foo: int = Field(..., gt=123) class Config: title = 'Test Model' class Model2(BaseModel): foo: int = Field(..., exclusiveMinimum=123) class Config: title = 'Test Model' with pytest.raises(ValidationError, match='ensure this value is greater than 123'): Model1(foo=122) assert Model2(foo=122).dict() == {'foo': 122} assert ( Model1.schema() == Model2.schema() == { 'title': 'Test Model', 'type': 'object', 'properties': {'foo': {'title': 'Foo', 'exclusiveMinimum': 123, 'type': 'integer'}}, 'required': ['foo'], } ) def test_conlist(): class Model(BaseModel): foo: List[int] = Field(..., min_items=2, max_items=4) assert Model(foo=[1, 2]).dict() == {'foo': [1, 2]} with pytest.raises(ValidationError, match='ensure this value has at least 2 items'): Model(foo=[1]) with pytest.raises(ValidationError, match='ensure this value has at most 4 items'): Model(foo=list(range(5))) assert Model.schema() == { 'title': 'Model', 'type': 'object', 'properties': { 'foo': {'title': 'Foo', 'type': 'array', 'items': {'type': 'integer'}, 'minItems': 2, 'maxItems': 4} }, 'required': ['foo'], } with pytest.raises(ValidationError) as exc_info: Model(foo=[1, 'x', 'y']) assert exc_info.value.errors() == [ {'loc': ('foo', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('foo', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] with pytest.raises(ValidationError) as exc_info: Model(foo=1) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_subfield_field_info(): class MyModel(BaseModel): entries: Dict[str, List[int]] assert MyModel.schema() == { 'title': 'MyModel', 'type': 'object', 'properties': { 'entries': { 'title': 'Entries', 'type': 'object', 'additionalProperties': {'type': 'array', 'items': {'type': 'integer'}}, } }, 'required': ['entries'], } pydantic-1.2/tests/test_settings.py000066400000000000000000000245471357000400300176060ustar00rootroot00000000000000import os from typing import Dict, List, Set import pytest from pydantic import BaseModel, BaseSettings, Field, NoneStr, ValidationError, dataclasses from pydantic.env_settings import SettingsError class SimpleSettings(BaseSettings): apple: str def test_sub_env(env): env.set('apple', 'hello') s = SimpleSettings() assert s.apple == 'hello' def test_sub_env_override(env): env.set('apple', 'hello') s = SimpleSettings(apple='goodbye') assert s.apple == 'goodbye' def test_sub_env_missing(): with pytest.raises(ValidationError) as exc_info: SimpleSettings() assert exc_info.value.errors() == [{'loc': ('apple',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_other_setting(): with pytest.raises(ValidationError): SimpleSettings(apple='a', foobar=42) def test_with_prefix(env): class Settings(BaseSettings): apple: str class Config: env_prefix = 'foobar_' with pytest.raises(ValidationError): Settings() env.set('foobar_apple', 'has_prefix') s = Settings() assert s.apple == 'has_prefix' def test_nested_env_with_basemodel(env): class TopValue(BaseModel): apple: str banana: str class Settings(BaseSettings): top: TopValue with pytest.raises(ValidationError): Settings() env.set('top', '{"banana": "secret_value"}') s = Settings(top={'apple': 'value'}) assert s.top == {'apple': 'value', 'banana': 'secret_value'} def test_nested_env_with_dict(env): class Settings(BaseSettings): top: Dict[str, str] with pytest.raises(ValidationError): Settings() env.set('top', '{"banana": "secret_value"}') s = Settings(top={'apple': 'value'}) assert s.top == {'apple': 'value', 'banana': 'secret_value'} class DateModel(BaseModel): pips: bool = False class ComplexSettings(BaseSettings): apples: List[str] = [] bananas: Set[int] = set() carrots: dict = {} date: DateModel = DateModel() def test_list(env): env.set('apples', '["russet", "granny smith"]') s = ComplexSettings() assert s.apples == ['russet', 'granny smith'] assert s.date.pips is False def test_set_dict_model(env): env.set('bananas', '[1, 2, 3, 3]') env.set('CARROTS', '{"a": null, "b": 4}') env.set('daTE', '{"pips": true}') s = ComplexSettings() assert s.bananas == {1, 2, 3} assert s.carrots == {'a': None, 'b': 4} assert s.date.pips is True def test_invalid_json(env): env.set('apples', '["russet", "granny smith",]') with pytest.raises(SettingsError, match='error parsing JSON for "apples"'): ComplexSettings() def test_required_sub_model(env): class Settings(BaseSettings): foobar: DateModel with pytest.raises(ValidationError): Settings() env.set('FOOBAR', '{"pips": "TRUE"}') s = Settings() assert s.foobar.pips is True def test_non_class(env): class Settings(BaseSettings): foobar: NoneStr env.set('FOOBAR', 'xxx') s = Settings() assert s.foobar == 'xxx' def test_env_str(env): class Settings(BaseSettings): apple: str = ... class Config: fields = {'apple': {'env': 'BOOM'}} env.set('BOOM', 'hello') assert Settings().apple == 'hello' def test_env_list(env): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': ['different1', 'different2']}} env.set('different1', 'value 1') env.set('different2', 'value 2') s = Settings() assert s.foobar == 'value 1' def test_env_list_field(env): class Settings(BaseSettings): foobar: str = Field(..., env='foobar_env_name') env.set('FOOBAR_ENV_NAME', 'env value') s = Settings() assert s.foobar == 'env value' def test_env_list_last(env): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': ['different2']}} env.set('different1', 'value 1') env.set('different2', 'value 2') s = Settings() assert s.foobar == 'value 2' assert Settings(foobar='abc').foobar == 'abc' def test_env_inheritance(env): class SettingsParent(BaseSettings): foobar: str = 'parent default' class Config: fields = {'foobar': {'env': 'different'}} class SettingsChild(SettingsParent): foobar: str = 'child default' assert SettingsParent().foobar == 'parent default' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' env.set('different', 'env value') assert SettingsParent().foobar == 'env value' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'env value' assert SettingsChild(foobar='abc').foobar == 'abc' def test_env_inheritance_field(env): class SettingsParent(BaseSettings): foobar: str = Field('parent default', env='foobar_env') class SettingsChild(SettingsParent): foobar: str = 'child default' assert SettingsParent().foobar == 'parent default' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' env.set('foobar_env', 'env value') assert SettingsParent().foobar == 'env value' assert SettingsParent(foobar='abc').foobar == 'abc' assert SettingsChild().foobar == 'child default' assert SettingsChild(foobar='abc').foobar == 'abc' def test_env_invalid(env): with pytest.raises(TypeError, match=r'invalid field env: 123 \(int\); should be string, list or set'): class Settings(BaseSettings): foobar: str class Config: fields = {'foobar': {'env': 123}} def test_env_field(env): with pytest.raises(TypeError, match=r'invalid field env: 123 \(int\); should be string, list or set'): class Settings(BaseSettings): foobar: str = Field(..., env=123) def test_aliases_warning(env): with pytest.warns(FutureWarning, match='aliases are no longer used by BaseSettings'): class Settings(BaseSettings): foobar: str = 'default value' class Config: fields = {'foobar': 'foobar_alias'} assert Settings().foobar == 'default value' env.set('foobar_alias', 'xxx') assert Settings().foobar == 'default value' assert Settings(foobar_alias='42').foobar == '42' def test_aliases_no_warning(env): class Settings(BaseSettings): foobar: str = 'default value' class Config: fields = {'foobar': {'alias': 'foobar_alias', 'env': 'foobar_env'}} assert Settings().foobar == 'default value' assert Settings(foobar_alias='42').foobar == '42' env.set('foobar_alias', 'xxx') assert Settings().foobar == 'default value' env.set('foobar_env', 'xxx') assert Settings().foobar == 'xxx' assert Settings(foobar_alias='42').foobar == '42' def test_case_sensitive(monkeypatch): class Settings(BaseSettings): foo: str class Config: case_sensitive = True # Need to patch os.environ to get build to work on Windows, where os.environ is case insensitive monkeypatch.setattr(os, 'environ', value={'Foo': 'foo'}) with pytest.raises(ValidationError) as exc_info: Settings() assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}] def test_case_insensitive(monkeypatch): class Settings1(BaseSettings): foo: str with pytest.warns(DeprecationWarning, match='Settings2: "case_insensitive" is deprecated on BaseSettings'): class Settings2(BaseSettings): foo: str class Config: case_insensitive = False assert Settings1.__config__.case_sensitive is False assert Settings2.__config__.case_sensitive is True def test_nested_dataclass(env): @dataclasses.dataclass class MyDataclass: foo: int bar: str class Settings(BaseSettings): n: MyDataclass env.set('N', '[123, "bar value"]') s = Settings() assert isinstance(s.n, MyDataclass) assert s.n.foo == 123 assert s.n.bar == 'bar value' def test_env_takes_precedence(env): class Settings(BaseSettings): foo: int bar: str def _build_values(self, init_kwargs): return {**init_kwargs, **self._build_environ()} env.set('BAR', 'env setting') s = Settings(foo='123', bar='argument') assert s.foo == 123 assert s.bar == 'env setting' def test_config_file_settings_nornir(env): """ See https://github.com/samuelcolvin/pydantic/pull/341#issuecomment-450378771 """ class Settings(BaseSettings): a: str b: str c: str def _build_values(self, init_kwargs): config_settings = init_kwargs.pop('__config_settings__') return {**config_settings, **init_kwargs, **self._build_environ()} env.set('C', 'env setting c') config = {'a': 'config a', 'b': 'config b', 'c': 'config c'} s = Settings(__config_settings__=config, b='argument b', c='argument c') assert s.a == 'config a' assert s.b == 'argument b' assert s.c == 'env setting c' def test_alias_set(env): class Settings(BaseSettings): foo: str = 'default foo' bar: str = 'bar default' class Config: fields = {'foo': {'env': 'foo_env'}} assert Settings.__fields__['bar'].name == 'bar' assert Settings.__fields__['bar'].alias == 'bar' assert Settings.__fields__['foo'].name == 'foo' assert Settings.__fields__['foo'].alias == 'foo' class SubSettings(Settings): spam: str = 'spam default' assert SubSettings.__fields__['bar'].name == 'bar' assert SubSettings.__fields__['bar'].alias == 'bar' assert SubSettings.__fields__['foo'].name == 'foo' assert SubSettings.__fields__['foo'].alias == 'foo' assert SubSettings().dict() == {'foo': 'default foo', 'bar': 'bar default', 'spam': 'spam default'} env.set('foo_env', 'fff') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bar default', 'spam': 'spam default'} env.set('bar', 'bbb') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bbb', 'spam': 'spam default'} env.set('spam', 'sss') assert SubSettings().dict() == {'foo': 'fff', 'bar': 'bbb', 'spam': 'sss'} pydantic-1.2/tests/test_tools.py000066400000000000000000000043351357000400300170770ustar00rootroot00000000000000from typing import Dict, List, Mapping import pytest from pydantic import BaseModel, ValidationError from pydantic.dataclasses import dataclass from pydantic.tools import parse_file_as, parse_obj_as @pytest.mark.parametrize('obj,type_,parsed', [('1', int, 1), (['1'], List[int], [1])]) def test_parse_obj(obj, type_, parsed): assert parse_obj_as(type_, obj) == parsed def test_parse_obj_as_model(): class Model(BaseModel): x: int y: bool z: str model_inputs = {'x': '1', 'y': 'true', 'z': 'abc'} assert parse_obj_as(Model, model_inputs) == Model(**model_inputs) def test_parse_obj_preserves_subclasses(): class ModelA(BaseModel): a: Mapping[int, str] class ModelB(ModelA): b: int model_b = ModelB(a={1: 'f'}, b=2) parsed = parse_obj_as(List[ModelA], [model_b]) assert parsed == [model_b] def test_parse_obj_fails(): with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a') assert exc_info.value.errors() == [ {'loc': ('__root__',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert exc_info.value.model.__name__ == 'ParsingModel[int]' def test_parsing_model_naming(): with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a') assert str(exc_info.value).split('\n')[0] == '1 validation error for ParsingModel[int]' with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a', type_name='ParsingModel') assert str(exc_info.value).split('\n')[0] == '1 validation error for ParsingModel' with pytest.raises(ValidationError) as exc_info: parse_obj_as(int, 'a', type_name=lambda type_: type_.__name__) assert str(exc_info.value).split('\n')[0] == '1 validation error for int' def test_parse_as_dataclass(): @dataclass class PydanticDataclass: x: int inputs = {'x': '1'} assert parse_obj_as(PydanticDataclass, inputs) == PydanticDataclass(1) def test_parse_mapping_as(): inputs = {'1': '2'} assert parse_obj_as(Dict[int, int], inputs) == {1: 2} def test_parse_file_as(tmp_path): p = tmp_path / 'test.json' p.write_text('{"1": "2"}') assert parse_file_as(Dict[int, int], p) == {1: 2} pydantic-1.2/tests/test_types.py000066400000000000000000001615621357000400300171110ustar00rootroot00000000000000import os import sys import uuid from collections import OrderedDict from datetime import date, datetime, time, timedelta from decimal import Decimal from enum import Enum, IntEnum from pathlib import Path from typing import Dict, FrozenSet, Iterator, List, MutableSet, NewType, Pattern, Sequence, Set, Tuple from uuid import UUID import pytest from pydantic import ( UUID1, UUID3, UUID4, UUID5, BaseModel, ByteSize, ConfigError, DirectoryPath, EmailStr, FilePath, Json, NameEmail, NegativeFloat, NegativeInt, PositiveFloat, PositiveInt, PyObject, SecretBytes, SecretStr, StrictBool, StrictFloat, StrictInt, StrictStr, ValidationError, conbytes, condecimal, confloat, conint, conlist, constr, create_model, errors, validator, ) try: import email_validator except ImportError: email_validator = None try: import typing_extensions except ImportError: typing_extensions = None class ConBytesModel(BaseModel): v: conbytes(max_length=10) = b'foobar' def foo(): return 42 def test_constrained_bytes_good(): m = ConBytesModel(v=b'short') assert m.v == b'short' def test_constrained_bytes_default(): m = ConBytesModel() assert m.v == b'foobar' def test_constrained_bytes_too_long(): with pytest.raises(ValidationError) as exc_info: ConBytesModel(v=b'this is too long') assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] def test_constrained_list_good(): class ConListModelMax(BaseModel): v: conlist(int) = [] m = ConListModelMax(v=[1, 2, 3]) assert m.v == [1, 2, 3] def test_constrained_list_default(): class ConListModelMax(BaseModel): v: conlist(int) = [] m = ConListModelMax() assert m.v == [] def test_constrained_list_too_long(): class ConListModelMax(BaseModel): v: conlist(int, max_items=10) = [] with pytest.raises(ValidationError) as exc_info: ConListModelMax(v=list(str(i) for i in range(11))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 items', 'type': 'value_error.list.max_items', 'ctx': {'limit_value': 10}, } ] def test_constrained_list_too_short(): class ConListModelMin(BaseModel): v: conlist(int, min_items=1) with pytest.raises(ValidationError) as exc_info: ConListModelMin(v=[]) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 1 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 1}, } ] def test_constrained_list_constraints(): class ConListModelBoth(BaseModel): v: conlist(int, min_items=7, max_items=11) m = ConListModelBoth(v=list(range(7))) assert m.v == list(range(7)) m = ConListModelBoth(v=list(range(11))) assert m.v == list(range(11)) with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=list(range(6))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at least 7 items', 'type': 'value_error.list.min_items', 'ctx': {'limit_value': 7}, } ] with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=list(range(12))) assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 11 items', 'type': 'value_error.list.max_items', 'ctx': {'limit_value': 11}, } ] with pytest.raises(ValidationError) as exc_info: ConListModelBoth(v=1) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_constrained_list_item_type_fails(): class ConListModel(BaseModel): v: conlist(int) = [] with pytest.raises(ValidationError) as exc_info: ConListModel(v=['a', 'b', 'c']) assert exc_info.value.errors() == [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] class ConStringModel(BaseModel): v: constr(max_length=10) = 'foobar' def test_constrained_str_good(): m = ConStringModel(v='short') assert m.v == 'short' def test_constrained_str_default(): m = ConStringModel() assert m.v == 'foobar' def test_constrained_str_too_long(): with pytest.raises(ValidationError) as exc_info: ConStringModel(v='this is too long') assert exc_info.value.errors() == [ { 'loc': ('v',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] def test_module_import(): class PyObjectModel(BaseModel): module: PyObject = 'os.path' m = PyObjectModel() assert m.module == os.path with pytest.raises(ValidationError) as exc_info: PyObjectModel(module='foobar') assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' '"foobar" doesn\'t look like a module path', 'type': 'type_error.pyobject', 'ctx': {'error_message': '"foobar" doesn\'t look like a module path'}, } ] with pytest.raises(ValidationError) as exc_info: PyObjectModel(module='os.missing') assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' 'Module "os" does not define a "missing" attribute', 'type': 'type_error.pyobject', 'ctx': {'error_message': 'Module "os" does not define a "missing" attribute'}, } ] with pytest.raises(ValidationError) as exc_info: PyObjectModel(module=[1, 2, 3]) assert exc_info.value.errors() == [ { 'loc': ('module',), 'msg': 'ensure this value contains valid import path or valid callable: ' 'value is neither a valid import path not a valid callable', 'type': 'type_error.pyobject', 'ctx': {'error_message': 'value is neither a valid import path not a valid callable'}, } ] def test_pyobject_none(): class PyObjectModel(BaseModel): module: PyObject = None m = PyObjectModel() assert m.module is None def test_pyobject_callable(): class PyObjectModel(BaseModel): foo: PyObject = foo m = PyObjectModel() assert m.foo is foo assert m.foo() == 42 class CheckModel(BaseModel): bool_check = True str_check = 's' bytes_check = b's' int_check = 1 float_check = 1.0 uuid_check: UUID = UUID('7bd00d58-6485-4ca6-b889-3da6d8df3ee4') decimal_check: Decimal = Decimal('42.24') class Config: anystr_strip_whitespace = True max_anystr_length = 10 class BoolCastable: def __bool__(self) -> bool: return True @pytest.mark.parametrize( 'field,value,result', [ ('bool_check', True, True), ('bool_check', 1, True), ('bool_check', 'y', True), ('bool_check', 'Y', True), ('bool_check', 'yes', True), ('bool_check', 'Yes', True), ('bool_check', 'YES', True), ('bool_check', 'true', True), ('bool_check', 'True', True), ('bool_check', 'TRUE', True), ('bool_check', 'on', True), ('bool_check', 'On', True), ('bool_check', 'ON', True), ('bool_check', '1', True), ('bool_check', 't', True), ('bool_check', 'T', True), ('bool_check', b'TRUE', True), ('bool_check', False, False), ('bool_check', 0, False), ('bool_check', 'n', False), ('bool_check', 'N', False), ('bool_check', 'no', False), ('bool_check', 'No', False), ('bool_check', 'NO', False), ('bool_check', 'false', False), ('bool_check', 'False', False), ('bool_check', 'FALSE', False), ('bool_check', 'off', False), ('bool_check', 'Off', False), ('bool_check', 'OFF', False), ('bool_check', '0', False), ('bool_check', 'f', False), ('bool_check', 'F', False), ('bool_check', b'FALSE', False), ('bool_check', None, ValidationError), ('bool_check', '', ValidationError), ('bool_check', [], ValidationError), ('bool_check', {}, ValidationError), ('bool_check', [1, 2, 3, 4], ValidationError), ('bool_check', {1: 2, 3: 4}, ValidationError), ('bool_check', b'2', ValidationError), ('bool_check', '2', ValidationError), ('bool_check', 2, ValidationError), ('bool_check', b'\x81', ValidationError), ('bool_check', BoolCastable(), ValidationError), ('str_check', 's', 's'), ('str_check', ' s ', 's'), ('str_check', b's', 's'), ('str_check', b' s ', 's'), ('str_check', 1, '1'), ('str_check', 'x' * 11, ValidationError), ('str_check', b'x' * 11, ValidationError), ('bytes_check', 's', b's'), ('bytes_check', ' s ', b's'), ('bytes_check', b's', b's'), ('bytes_check', b' s ', b's'), ('bytes_check', 1, b'1'), ('bytes_check', bytearray('xx', encoding='utf8'), b'xx'), ('bytes_check', True, b'True'), ('bytes_check', False, b'False'), ('bytes_check', {}, ValidationError), ('bytes_check', 'x' * 11, ValidationError), ('bytes_check', b'x' * 11, ValidationError), ('int_check', 1, 1), ('int_check', 1.9, 1), ('int_check', '1', 1), ('int_check', '1.9', ValidationError), ('int_check', b'1', 1), ('int_check', 12, 12), ('int_check', '12', 12), ('int_check', b'12', 12), ('float_check', 1, 1.0), ('float_check', 1.0, 1.0), ('float_check', '1.0', 1.0), ('float_check', '1', 1.0), ('float_check', b'1.0', 1.0), ('float_check', b'1', 1.0), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8'), UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', b'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-', ValidationError), ('uuid_check', 123, ValidationError), ('decimal_check', 42.24, Decimal('42.24')), ('decimal_check', '42.24', Decimal('42.24')), ('decimal_check', b'42.24', Decimal('42.24')), ('decimal_check', ' 42.24 ', Decimal('42.24')), ('decimal_check', Decimal('42.24'), Decimal('42.24')), ('decimal_check', 'not a valid decimal', ValidationError), ('decimal_check', 'NaN', ValidationError), ], ) def test_default_validators(field, value, result): kwargs = {field: value} if result == ValidationError: with pytest.raises(ValidationError): CheckModel(**kwargs) else: assert CheckModel(**kwargs).dict()[field] == result class StrModel(BaseModel): str_check: str class Config: min_anystr_length = 5 max_anystr_length = 10 def test_string_too_long(): with pytest.raises(ValidationError) as exc_info: StrModel(str_check='x' * 150) assert exc_info.value.errors() == [ { 'loc': ('str_check',), 'msg': 'ensure this value has at most 10 characters', 'type': 'value_error.any_str.max_length', 'ctx': {'limit_value': 10}, } ] def test_string_too_short(): with pytest.raises(ValidationError) as exc_info: StrModel(str_check='x') assert exc_info.value.errors() == [ { 'loc': ('str_check',), 'msg': 'ensure this value has at least 5 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 5}, } ] class DatetimeModel(BaseModel): dt: datetime = ... date_: date = ... time_: time = ... duration: timedelta = ... def test_datetime_successful(): m = DatetimeModel(dt='2017-10-5T19:47:07', date_=1_494_012_000, time_='10:20:30.400', duration='15:30.0001') assert m.dt == datetime(2017, 10, 5, 19, 47, 7) assert m.date_ == date(2017, 5, 5) assert m.time_ == time(10, 20, 30, 400_000) assert m.duration == timedelta(minutes=15, seconds=30, microseconds=100) def test_datetime_errors(): with pytest.raises(ValueError) as exc_info: DatetimeModel(dt='2017-13-5T19:47:07', date_='XX1494012000', time_='25:20:30.400', duration='15:30.0001 broken') assert exc_info.value.errors() == [ {'loc': ('dt',), 'msg': 'invalid datetime format', 'type': 'value_error.datetime'}, {'loc': ('date_',), 'msg': 'invalid date format', 'type': 'value_error.date'}, {'loc': ('time_',), 'msg': 'invalid time format', 'type': 'value_error.time'}, {'loc': ('duration',), 'msg': 'invalid duration format', 'type': 'value_error.duration'}, ] class FruitEnum(str, Enum): pear = 'pear' banana = 'banana' class ToolEnum(IntEnum): spanner = 1 wrench = 2 class CookingModel(BaseModel): fruit: FruitEnum = FruitEnum.pear tool: ToolEnum = ToolEnum.spanner def test_enum_successful(): m = CookingModel(tool=2) assert m.fruit == FruitEnum.pear assert m.tool == ToolEnum.wrench assert repr(m.tool) == '' def test_enum_fails(): with pytest.raises(ValueError) as exc_info: CookingModel(tool=3) assert exc_info.value.errors() == [ { 'loc': ('tool',), 'msg': 'value is not a valid enumeration member; permitted: 1, 2', 'type': 'type_error.enum', 'ctx': {'enum_values': [ToolEnum.spanner, ToolEnum.wrench]}, } ] assert len(exc_info.value.json()) == 217 def test_int_enum_successful_for_str_int(): m = CookingModel(tool='2') assert m.tool == ToolEnum.wrench assert repr(m.tool) == '' @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_string_success(): class MoreStringsModel(BaseModel): str_strip_enabled: constr(strip_whitespace=True) str_strip_disabled: constr(strip_whitespace=False) str_regex: constr(regex=r'^xxx\d{3}$') = ... str_min_length: constr(min_length=5) = ... str_curtailed: constr(curtail_length=5) = ... str_email: EmailStr = ... name_email: NameEmail = ... m = MoreStringsModel( str_strip_enabled=' xxx123 ', str_strip_disabled=' xxx123 ', str_regex='xxx123', str_min_length='12345', str_curtailed='123456', str_email='foobar@example.com ', name_email='foo bar ', ) assert m.str_strip_enabled == 'xxx123' assert m.str_strip_disabled == ' xxx123 ' assert m.str_regex == 'xxx123' assert m.str_curtailed == '12345' assert m.str_email == 'foobar@example.com' assert repr(m.name_email) == "NameEmail(name='foo bar', email='foobaR@example.com')" assert str(m.name_email) == 'foo bar ' assert m.name_email.name == 'foo bar' assert m.name_email.email == 'foobaR@example.com' @pytest.mark.skipif(not email_validator, reason='email_validator not installed') def test_string_fails(): class MoreStringsModel(BaseModel): str_regex: constr(regex=r'^xxx\d{3}$') = ... str_min_length: constr(min_length=5) = ... str_curtailed: constr(curtail_length=5) = ... str_email: EmailStr = ... name_email: NameEmail = ... with pytest.raises(ValidationError) as exc_info: MoreStringsModel( str_regex='xxx123xxx', str_min_length='1234', str_curtailed='123', # doesn't fail str_email='foobar<@example.com', name_email='foobar @example.com', ) assert exc_info.value.errors() == [ { 'loc': ('str_regex',), 'msg': 'string does not match regex "^xxx\\d{3}$"', 'type': 'value_error.str.regex', 'ctx': {'pattern': '^xxx\\d{3}$'}, }, { 'loc': ('str_min_length',), 'msg': 'ensure this value has at least 5 characters', 'type': 'value_error.any_str.min_length', 'ctx': {'limit_value': 5}, }, {'loc': ('str_email',), 'msg': 'value is not a valid email address', 'type': 'value_error.email'}, {'loc': ('name_email',), 'msg': 'value is not a valid email address', 'type': 'value_error.email'}, ] @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed_email_str(): with pytest.raises(ImportError): class Model(BaseModel): str_email: EmailStr = ... @pytest.mark.skipif(email_validator, reason='email_validator is installed') def test_email_validator_not_installed_name_email(): with pytest.raises(ImportError): class Model(BaseModel): str_email: NameEmail = ... def test_dict(): class Model(BaseModel): v: dict assert Model(v={1: 10, 2: 20}).v == {1: 10, 2: 20} assert Model(v=[(1, 2), (3, 4)]).v == {1: 2, 3: 4} with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, 3]) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] @pytest.mark.parametrize( 'value,result', ( ([1, 2, '3'], [1, 2, '3']), ((1, 2, '3'), [1, 2, '3']), ({1, 2, '3'}, list({1, 2, '3'})), ((i ** 2 for i in range(5)), [0, 1, 4, 9, 16]), ), ) def test_list_success(value, result): class Model(BaseModel): v: list assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_list_fails(value): class Model(BaseModel): v: list with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_ordered_dict(): class Model(BaseModel): v: OrderedDict assert Model(v=OrderedDict([(1, 10), (2, 20)])).v == OrderedDict([(1, 10), (2, 20)]) assert Model(v={1: 10, 2: 20}).v in (OrderedDict([(1, 10), (2, 20)]), OrderedDict([(2, 20), (1, 10)])) assert Model(v=[(1, 2), (3, 4)]).v == OrderedDict([(1, 2), (3, 4)]) with pytest.raises(ValidationError) as exc_info: Model(v=[1, 2, 3]) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}] @pytest.mark.parametrize( 'value,result', ( ([1, 2, '3'], (1, 2, '3')), ((1, 2, '3'), (1, 2, '3')), ({1, 2, '3'}, tuple({1, 2, '3'})), ((i ** 2 for i in range(5)), (0, 1, 4, 9, 16)), ), ) def test_tuple_success(value, result): class Model(BaseModel): v: tuple assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_tuple_fails(value): class Model(BaseModel): v: tuple with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid tuple', 'type': 'type_error.tuple'}] @pytest.mark.parametrize( 'value,cls,result', ( ([1, 2, '3'], int, (1, 2, 3)), ((1, 2, '3'), int, (1, 2, 3)), ((i ** 2 for i in range(5)), int, (0, 1, 4, 9, 16)), (('a', 'b', 'c'), str, ('a', 'b', 'c')), ), ) def test_tuple_variable_len_success(value, cls, result): class Model(BaseModel): v: Tuple[cls, ...] assert Model(v=value).v == result @pytest.mark.parametrize( 'value, cls, exc', [ (('a', 'b', [1, 2], 'c'), str, [{'loc': ('v', 2), 'msg': 'str type expected', 'type': 'type_error.str'}]), ( ('a', 'b', [1, 2], 'c', [3, 4]), str, [ {'loc': ('v', 2), 'msg': 'str type expected', 'type': 'type_error.str'}, {'loc': ('v', 4), 'msg': 'str type expected', 'type': 'type_error.str'}, ], ), ], ) def test_tuple_variable_len_fails(value, cls, exc): class Model(BaseModel): v: Tuple[cls, ...] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == exc @pytest.mark.parametrize( 'value,result', ( ({1, 2, 2, '3'}, {1, 2, '3'}), ((1, 2, 2, '3'), {1, 2, '3'}), ([1, 2, 2, '3'], {1, 2, '3'}), ({i ** 2 for i in range(5)}, {0, 1, 4, 9, 16}), ), ) def test_set_success(value, result): class Model(BaseModel): v: set assert Model(v=value).v == result @pytest.mark.parametrize('value', (123, '123')) def test_set_fails(value): class Model(BaseModel): v: set with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] def test_list_type_fails(): class Model(BaseModel): v: List[int] with pytest.raises(ValidationError) as exc_info: Model(v='123') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}] def test_set_type_fails(): class Model(BaseModel): v: Set[int] with pytest.raises(ValidationError) as exc_info: Model(v='123') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid set', 'type': 'type_error.set'}] @pytest.mark.parametrize( 'cls, value,result', ( (int, [1, 2, 3], [1, 2, 3]), (int, (1, 2, 3), (1, 2, 3)), (float, {1.0, 2.0, 3.0}, {1.0, 2.0, 3.0}), (Set[int], [{1, 2}, {3, 4}, {5, 6}], [{1, 2}, {3, 4}, {5, 6}]), (Tuple[int, str], ((1, 'a'), (2, 'b'), (3, 'c')), ((1, 'a'), (2, 'b'), (3, 'c'))), ), ) def test_sequence_success(cls, value, result): class Model(BaseModel): v: Sequence[cls] assert Model(v=value).v == result @pytest.mark.parametrize( 'cls, value,result', ( (int, (i for i in range(3)), iter([0, 1, 2])), (float, (float(i) for i in range(3)), iter([0.0, 1.0, 2.0])), (str, (str(i) for i in range(3)), iter(['0', '1', '2'])), ), ) def test_sequence_generator_success(cls, value, result): class Model(BaseModel): v: Sequence[cls] validated = Model(v=value).v assert isinstance(validated, Iterator) assert list(validated) == list(result) @pytest.mark.parametrize( 'cls,value,errors', ( ( int, (i for i in ['a', 'b', 'c']), [ {'loc': ('v', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ], ), ( float, (i for i in ['a', 'b', 'c']), [ {'loc': ('v', 0), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 1), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, {'loc': ('v', 2), 'msg': 'value is not a valid float', 'type': 'type_error.float'}, ], ), ), ) def test_sequence_generator_fails(cls, value, errors): class Model(BaseModel): v: Sequence[cls] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize( 'cls,value,errors', ( (int, [1, 'a', 3], [{'loc': ('v', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (int, (1, 2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}]), (float, range(10), [{'loc': ('v',), 'msg': 'value is not a valid sequence', 'type': 'type_error.sequence'}]), (float, ('a', 2.2, 3.3), [{'loc': ('v', 0), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), (float, (1.1, 2.2, 'a'), [{'loc': ('v', 2), 'msg': 'value is not a valid float', 'type': 'type_error.float'}]), ( Set[int], [{1, 2}, {2, 3}, {'d'}], [{'loc': ('v', 2, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( Tuple[int, str], ((1, 'a'), ('a', 'a'), (3, 'c')), [{'loc': ('v', 1, 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}], ), ( List[int], [{'a': 1, 'b': 2}, [1, 2], [2, 3]], [{'loc': ('v', 0), 'msg': 'value is not a valid list', 'type': 'type_error.list'}], ), ), ) def test_sequence_fails(cls, value, errors): class Model(BaseModel): v: Sequence[cls] with pytest.raises(ValidationError) as exc_info: Model(v=value) assert exc_info.value.errors() == errors def test_int_validation(): class Model(BaseModel): a: PositiveInt = None b: NegativeInt = None c: conint(gt=4, lt=10) = None d: conint(ge=0, le=10) = None e: conint(multiple_of=5) = None m = Model(a=5, b=-5, c=5, d=0, e=25) assert m == {'a': 5, 'b': -5, 'c': 5, 'd': 0, 'e': 25} with pytest.raises(ValidationError) as exc_info: Model(a=-5, b=5, c=-5, d=11, e=42) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is greater than 0', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 0}, }, { 'loc': ('b',), 'msg': 'ensure this value is less than 0', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': 0}, }, { 'loc': ('c',), 'msg': 'ensure this value is greater than 4', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 4}, }, { 'loc': ('d',), 'msg': 'ensure this value is less than or equal to 10', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 10}, }, { 'loc': ('e',), 'msg': 'ensure this value is a multiple of 5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': 5}, }, ] def test_float_validation(): class Model(BaseModel): a: PositiveFloat = None b: NegativeFloat = None c: confloat(gt=4, lt=12.2) = None d: confloat(ge=0, le=9.9) = None e: confloat(multiple_of=0.5) = None m = Model(a=5.1, b=-5.2, c=5.3, d=9.9, e=2.5) assert m.dict() == {'a': 5.1, 'b': -5.2, 'c': 5.3, 'd': 9.9, 'e': 2.5} with pytest.raises(ValidationError) as exc_info: Model(a=-5.1, b=5.2, c=-5.3, d=9.91, e=4.2) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'ensure this value is greater than 0', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 0}, }, { 'loc': ('b',), 'msg': 'ensure this value is less than 0', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': 0}, }, { 'loc': ('c',), 'msg': 'ensure this value is greater than 4', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': 4}, }, { 'loc': ('d',), 'msg': 'ensure this value is less than or equal to 9.9', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': 9.9}, }, { 'loc': ('e',), 'msg': 'ensure this value is a multiple of 0.5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': 0.5}, }, ] def test_strict_str(): class Model(BaseModel): v: StrictStr assert Model(v='foobar').v == 'foobar' with pytest.raises(ValidationError): Model(v=123) with pytest.raises(ValidationError): Model(v=b'foobar') def test_strict_bool(): class Model(BaseModel): v: StrictBool assert Model(v=True).v is True assert Model(v=False).v is False with pytest.raises(ValidationError): Model(v=1) with pytest.raises(ValidationError): Model(v='1') with pytest.raises(ValidationError): Model(v=b'1') def test_strict_int(): class Model(BaseModel): v: StrictInt assert Model(v=123456).v == 123456 with pytest.raises(ValidationError, match='value is not a valid int'): Model(v='123456') with pytest.raises(ValidationError, match='value is not a valid int'): Model(v=3.14159) def test_strict_float(): class Model(BaseModel): v: StrictFloat assert Model(v=3.14159).v == 3.14159 with pytest.raises(ValidationError, match='value is not a valid float'): Model(v='3.14159') with pytest.raises(ValidationError, match='value is not a valid float'): Model(v=123456) def test_bool_unhashable_fails(): class Model(BaseModel): v: bool with pytest.raises(ValidationError) as exc_info: Model(v={}) assert exc_info.value.errors() == [ {'loc': ('v',), 'msg': 'value could not be parsed to a boolean', 'type': 'type_error.bool'} ] def test_uuid_error(): class Model(BaseModel): v: UUID with pytest.raises(ValidationError) as exc_info: Model(v='ebcdab58-6eb8-46fb-a190-d07a3') assert exc_info.value.errors() == [{'loc': ('v',), 'msg': 'value is not a valid uuid', 'type': 'type_error.uuid'}] with pytest.raises(ValidationError): Model(v=None) class UUIDModel(BaseModel): a: UUID1 b: UUID3 c: UUID4 d: UUID5 def test_uuid_validation(): a = uuid.uuid1() b = uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') c = uuid.uuid4() d = uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') m = UUIDModel(a=a, b=b, c=c, d=d) assert m.dict() == {'a': a, 'b': b, 'c': c, 'd': d} with pytest.raises(ValidationError) as exc_info: UUIDModel(a=d, b=c, c=b, d=a) assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': 'uuid version 1 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 1}, }, { 'loc': ('b',), 'msg': 'uuid version 3 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 3}, }, { 'loc': ('c',), 'msg': 'uuid version 4 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 4}, }, { 'loc': ('d',), 'msg': 'uuid version 5 expected', 'type': 'value_error.uuid.version', 'ctx': {'required_version': 5}, }, ] def test_anystr_strip_whitespace_enabled(): class Model(BaseModel): str_check: str bytes_check: bytes class Config: anystr_strip_whitespace = True m = Model(str_check=' 123 ', bytes_check=b' 456 ') assert m.str_check == '123' assert m.bytes_check == b'456' def test_anystr_strip_whitespace_disabled(): class Model(BaseModel): str_check: str bytes_check: bytes class Config: anystr_strip_whitespace = False m = Model(str_check=' 123 ', bytes_check=b' 456 ') assert m.str_check == ' 123 ' assert m.bytes_check == b' 456 ' @pytest.mark.parametrize( 'type_,value,result', [ (condecimal(gt=Decimal('42.24')), Decimal('43'), Decimal('43')), ( condecimal(gt=Decimal('42.24')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is greater than 42.24', 'type': 'value_error.number.not_gt', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (condecimal(lt=Decimal('42.24')), Decimal('42'), Decimal('42')), ( condecimal(lt=Decimal('42.24')), Decimal('43'), [ { 'loc': ('foo',), 'msg': 'ensure this value is less than 42.24', 'type': 'value_error.number.not_lt', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (condecimal(ge=Decimal('42.24')), Decimal('43'), Decimal('43')), (condecimal(ge=Decimal('42.24')), Decimal('42.24'), Decimal('42.24')), ( condecimal(ge=Decimal('42.24')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is greater than or equal to 42.24', 'type': 'value_error.number.not_ge', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (condecimal(le=Decimal('42.24')), Decimal('42'), Decimal('42')), (condecimal(le=Decimal('42.24')), Decimal('42.24'), Decimal('42.24')), ( condecimal(le=Decimal('42.24')), Decimal('43'), [ { 'loc': ('foo',), 'msg': 'ensure this value is less than or equal to 42.24', 'type': 'value_error.number.not_le', 'ctx': {'limit_value': Decimal('42.24')}, } ], ), (condecimal(max_digits=2, decimal_places=2), Decimal('0.99'), Decimal('0.99')), ( condecimal(max_digits=2, decimal_places=1), Decimal('0.99'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 1 decimal places', 'type': 'value_error.decimal.max_places', 'ctx': {'decimal_places': 1}, } ], ), ( condecimal(max_digits=3, decimal_places=1), Decimal('999'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 2 digits before the decimal point', 'type': 'value_error.decimal.whole_digits', 'ctx': {'whole_digits': 2}, } ], ), (condecimal(max_digits=4, decimal_places=1), Decimal('999'), Decimal('999')), (condecimal(max_digits=20, decimal_places=2), Decimal('742403889818000000'), Decimal('742403889818000000')), (condecimal(max_digits=20, decimal_places=2), Decimal('7.42403889818E+17'), Decimal('7.42403889818E+17')), ( condecimal(max_digits=20, decimal_places=2), Decimal('7424742403889818000000'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 20 digits in total', 'type': 'value_error.decimal.max_digits', 'ctx': {'max_digits': 20}, } ], ), (condecimal(max_digits=5, decimal_places=2), Decimal('7304E-1'), Decimal('7304E-1')), ( condecimal(max_digits=5, decimal_places=2), Decimal('7304E-3'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 2 decimal places', 'type': 'value_error.decimal.max_places', 'ctx': {'decimal_places': 2}, } ], ), (condecimal(max_digits=5, decimal_places=5), Decimal('70E-5'), Decimal('70E-5')), ( condecimal(max_digits=5, decimal_places=5), Decimal('70E-6'), [ { 'loc': ('foo',), 'msg': 'ensure that there are no more than 5 digits in total', 'type': 'value_error.decimal.max_digits', 'ctx': {'max_digits': 5}, } ], ), *[ ( condecimal(decimal_places=2, max_digits=10), value, [{'loc': ('foo',), 'msg': 'value is not a valid decimal', 'type': 'value_error.decimal.not_finite'}], ) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '-Infinity', ) ], *[ ( condecimal(decimal_places=2, max_digits=10), Decimal(value), [{'loc': ('foo',), 'msg': 'value is not a valid decimal', 'type': 'value_error.decimal.not_finite'}], ) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '-Infinity', ) ], ( condecimal(multiple_of=Decimal('5')), Decimal('42'), [ { 'loc': ('foo',), 'msg': 'ensure this value is a multiple of 5', 'type': 'value_error.number.not_multiple', 'ctx': {'multiple_of': Decimal('5')}, } ], ), ], ) def test_decimal_validation(type_, value, result): model = create_model('DecimalModel', foo=(type_, ...)) if not isinstance(result, Decimal): with pytest.raises(ValidationError) as exc_info: model(foo=value) assert exc_info.value.errors() == result assert exc_info.value.json().startswith('[') else: assert model(foo=value).foo == result @pytest.mark.parametrize('value,result', (('/test/path', Path('/test/path')), (Path('/test/path'), Path('/test/path')))) def test_path_validation_success(value, result): class Model(BaseModel): foo: Path assert Model(foo=value).foo == result def test_path_validation_fails(): class Model(BaseModel): foo: Path with pytest.raises(ValidationError) as exc_info: Model(foo=123) assert exc_info.value.errors() == [{'loc': ('foo',), 'msg': 'value is not a valid path', 'type': 'type_error.path'}] @pytest.mark.parametrize( 'value,result', (('tests/test_types.py', Path('tests/test_types.py')), (Path('tests/test_types.py'), Path('tests/test_types.py'))), ) def test_file_path_validation_success(value, result): class Model(BaseModel): foo: FilePath assert Model(foo=value).foo == result @pytest.mark.parametrize( 'value,errors', ( ( 'nonexistentfile', [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentfile" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentfile'}, } ], ), ( Path('nonexistentfile'), [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentfile" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentfile'}, } ], ), ( 'tests', [ { 'loc': ('foo',), 'msg': 'path "tests" does not point to a file', 'type': 'value_error.path.not_a_file', 'ctx': {'path': 'tests'}, } ], ), ( Path('tests'), [ { 'loc': ('foo',), 'msg': 'path "tests" does not point to a file', 'type': 'value_error.path.not_a_file', 'ctx': {'path': 'tests'}, } ], ), ), ) def test_file_path_validation_fails(value, errors): class Model(BaseModel): foo: FilePath with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == errors @pytest.mark.parametrize('value,result', (('tests', Path('tests')), (Path('tests'), Path('tests')))) def test_directory_path_validation_success(value, result): class Model(BaseModel): foo: DirectoryPath assert Model(foo=value).foo == result @pytest.mark.skipif(sys.platform.startswith('win'), reason='paths look different on windows') @pytest.mark.parametrize( 'value,errors', ( ( 'nonexistentdirectory', [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentdirectory" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentdirectory'}, } ], ), ( Path('nonexistentdirectory'), [ { 'loc': ('foo',), 'msg': 'file or directory at path "nonexistentdirectory" does not exist', 'type': 'value_error.path.not_exists', 'ctx': {'path': 'nonexistentdirectory'}, } ], ), ( 'tests/test_types.py', [ { 'loc': ('foo',), 'msg': 'path "tests/test_types.py" does not point to a directory', 'type': 'value_error.path.not_a_directory', 'ctx': {'path': 'tests/test_types.py'}, } ], ), ( Path('tests/test_types.py'), [ { 'loc': ('foo',), 'msg': 'path "tests/test_types.py" does not point to a directory', 'type': 'value_error.path.not_a_directory', 'ctx': {'path': 'tests/test_types.py'}, } ], ), ), ) def test_directory_path_validation_fails(value, errors): class Model(BaseModel): foo: DirectoryPath with pytest.raises(ValidationError) as exc_info: Model(foo=value) assert exc_info.value.errors() == errors base_message = r'.*ensure this value is {msg} \(type=value_error.number.not_{ty}; limit_value={value}\).*' def test_number_gt(): class Model(BaseModel): a: conint(gt=-1) = 0 assert Model(a=0).dict() == {'a': 0} message = base_message.format(msg='greater than -1', ty='gt', value=-1) with pytest.raises(ValidationError, match=message): Model(a=-1) def test_number_ge(): class Model(BaseModel): a: conint(ge=0) = 0 assert Model(a=0).dict() == {'a': 0} message = base_message.format(msg='greater than or equal to 0', ty='ge', value=0) with pytest.raises(ValidationError, match=message): Model(a=-1) def test_number_lt(): class Model(BaseModel): a: conint(lt=5) = 0 assert Model(a=4).dict() == {'a': 4} message = base_message.format(msg='less than 5', ty='lt', value=5) with pytest.raises(ValidationError, match=message): Model(a=5) def test_number_le(): class Model(BaseModel): a: conint(le=5) = 0 assert Model(a=5).dict() == {'a': 5} message = base_message.format(msg='less than or equal to 5', ty='le', value=5) with pytest.raises(ValidationError, match=message): Model(a=6) @pytest.mark.parametrize('value', ((10), (100), (20))) def test_number_multiple_of_int_valid(value): class Model(BaseModel): a: conint(multiple_of=5) assert Model(a=value).dict() == {'a': value} @pytest.mark.parametrize('value', ((1337), (23), (6), (14))) def test_number_multiple_of_int_invalid(value): class Model(BaseModel): a: conint(multiple_of=5) multiple_message = base_message.replace('limit_value', 'multiple_of') message = multiple_message.format(msg='a multiple of 5', ty='multiple', value=5) with pytest.raises(ValidationError, match=message): Model(a=value) @pytest.mark.parametrize('value', ((0.2), (0.3), (0.4), (0.5), (1))) def test_number_multiple_of_float_valid(value): class Model(BaseModel): a: confloat(multiple_of=0.1) assert Model(a=value).dict() == {'a': value} @pytest.mark.parametrize('value', ((0.07), (1.27), (1.003))) def test_number_multiple_of_float_invalid(value): class Model(BaseModel): a: confloat(multiple_of=0.1) multiple_message = base_message.replace('limit_value', 'multiple_of') message = multiple_message.format(msg='a multiple of 0.1', ty='multiple', value=0.1) with pytest.raises(ValidationError, match=message): Model(a=value) @pytest.mark.parametrize('fn', [conint, confloat, condecimal]) def test_bounds_config_exceptions(fn): with pytest.raises(ConfigError): fn(gt=0, ge=0) with pytest.raises(ConfigError): fn(lt=0, le=0) def test_new_type_success(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) class Model(BaseModel): a: a_type b: b_type m = Model(a=42, b=24) assert m.dict() == {'a': 42, 'b': 24} def test_new_type_fails(): a_type = NewType('a_type', int) b_type = NewType('b_type', a_type) class Model(BaseModel): a: a_type b: b_type with pytest.raises(ValidationError) as exc_info: Model(a='foo', b='bar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('b',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_valid_simple_json(): class JsonModel(BaseModel): json_obj: Json obj = '{"a": 1, "b": [2, 3]}' assert JsonModel(json_obj=obj).dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_invalid_simple_json(): class JsonModel(BaseModel): json_obj: Json obj = '{a: 1, b: [2, 3]}' with pytest.raises(ValidationError) as exc_info: JsonModel(json_obj=obj) assert exc_info.value.errors()[0] == {'loc': ('json_obj',), 'msg': 'Invalid JSON', 'type': 'value_error.json'} def test_valid_simple_json_bytes(): class JsonModel(BaseModel): json_obj: Json obj = b'{"a": 1, "b": [2, 3]}' assert JsonModel(json_obj=obj).dict() == {'json_obj': {'a': 1, 'b': [2, 3]}} def test_valid_detailed_json(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '[1, 2, 3]' assert JsonDetailedModel(json_obj=obj).dict() == {'json_obj': [1, 2, 3]} def test_invalid_detailed_json_value_error(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '(1, 2, 3)' with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors()[0] == {'loc': ('json_obj',), 'msg': 'Invalid JSON', 'type': 'value_error.json'} def test_valid_detailed_json_bytes(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = b'[1, 2, 3]' assert JsonDetailedModel(json_obj=obj).dict() == {'json_obj': [1, 2, 3]} def test_invalid_detailed_json_type_error(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = '["a", "b", "c"]' with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors() == [ {'loc': ('json_obj', 0), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('json_obj', 1), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, {'loc': ('json_obj', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}, ] def test_json_not_str(): class JsonDetailedModel(BaseModel): json_obj: Json[List[int]] obj = 12 with pytest.raises(ValidationError) as exc_info: JsonDetailedModel(json_obj=obj) assert exc_info.value.errors()[0] == { 'loc': ('json_obj',), 'msg': 'JSON object must be str, bytes or bytearray', 'type': 'type_error.json', } def test_json_pre_validator(): call_count = 0 class JsonModel(BaseModel): json_obj: Json @validator('json_obj', pre=True) def check(cls, v): assert v == '"foobar"' nonlocal call_count call_count += 1 return v assert JsonModel(json_obj='"foobar"').dict() == {'json_obj': 'foobar'} assert call_count == 1 def test_pattern(): class Foobar(BaseModel): pattern: Pattern f = Foobar(pattern=r'^whatev.r\d$') # SRE_Pattern for 3.6, Pattern for 3.7 assert f.pattern.__class__.__name__ in {'SRE_Pattern', 'Pattern'} # check it's really a proper pattern assert f.pattern.match('whatever1') assert not f.pattern.match(' whatever1') def test_pattern_error(): class Foobar(BaseModel): pattern: Pattern with pytest.raises(ValidationError) as exc_info: Foobar(pattern=f'[xx') assert exc_info.value.errors() == [ {'loc': ('pattern',), 'msg': 'Invalid regular expression', 'type': 'value_error.regex_pattern'} ] def test_secretstr(): class Foobar(BaseModel): password: SecretStr empty_password: SecretStr # Initialize the model. f = Foobar(password='1234', empty_password='') # Assert correct types. assert f.password.__class__.__name__ == 'SecretStr' assert f.empty_password.__class__.__name__ == 'SecretStr' # Assert str and repr are correct. assert str(f.password) == '**********' assert str(f.empty_password) == '' assert repr(f.password) == "SecretStr('**********')" assert repr(f.empty_password) == "SecretStr('')" # Assert retrieval of secret value is correct assert f.password.get_secret_value() == '1234' assert f.empty_password.get_secret_value() == '' with pytest.warns(DeprecationWarning, match=r'`secret_str.display\(\)` is deprecated'): assert f.password.display() == '**********' with pytest.warns(DeprecationWarning, match=r'`secret_str.display\(\)` is deprecated'): assert f.empty_password.display() == '' def test_secretstr_error(): class Foobar(BaseModel): password: SecretStr with pytest.raises(ValidationError) as exc_info: Foobar(password=[6, 23, 'abc']) assert exc_info.value.errors() == [{'loc': ('password',), 'msg': 'str type expected', 'type': 'type_error.str'}] def test_secretbytes(): class Foobar(BaseModel): password: SecretBytes empty_password: SecretBytes # Initialize the model. f = Foobar(password=b'wearebytes', empty_password=b'') # Assert correct types. assert f.password.__class__.__name__ == 'SecretBytes' assert f.empty_password.__class__.__name__ == 'SecretBytes' # Assert str and repr are correct. assert str(f.password) == '**********' assert str(f.empty_password) == '' assert repr(f.password) == "SecretBytes(b'**********')" assert repr(f.empty_password) == "SecretBytes(b'')" # Assert retrieval of secret value is correct assert f.password.get_secret_value() == b'wearebytes' assert f.empty_password.get_secret_value() == b'' with pytest.warns(DeprecationWarning, match=r'`secret_bytes.display\(\)` is deprecated'): assert f.password.display() == '**********' with pytest.warns(DeprecationWarning, match=r'`secret_bytes.display\(\)` is deprecated'): assert f.empty_password.display() == '' def test_secretbytes_error(): class Foobar(BaseModel): password: SecretBytes with pytest.raises(ValidationError) as exc_info: Foobar(password=[6, 23, 'abc']) assert exc_info.value.errors() == [{'loc': ('password',), 'msg': 'byte type expected', 'type': 'type_error.bytes'}] def test_generic_without_params(): class Model(BaseModel): generic_list: List generic_dict: Dict m = Model(generic_list=[0, 'a'], generic_dict={0: 'a', 'a': 0}) assert m.dict() == {'generic_list': [0, 'a'], 'generic_dict': {0: 'a', 'a': 0}} def test_generic_without_params_error(): class Model(BaseModel): generic_list: List generic_dict: Dict with pytest.raises(ValidationError) as exc_info: Model(generic_list=0, generic_dict=0) assert exc_info.value.errors() == [ {'loc': ('generic_list',), 'msg': 'value is not a valid list', 'type': 'type_error.list'}, {'loc': ('generic_dict',), 'msg': 'value is not a valid dict', 'type': 'type_error.dict'}, ] @pytest.mark.skipif(not typing_extensions, reason='typing_extensions not installed') def test_literal_single(): class Model(BaseModel): a: typing_extensions.Literal['a'] Model(a='a') with pytest.raises(ValidationError) as exc_info: Model(a='b') assert exc_info.value.errors() == [ { 'loc': ('a',), 'msg': "unexpected value; permitted: 'a'", 'type': 'value_error.const', 'ctx': {'given': 'b', 'permitted': ('a',)}, } ] @pytest.mark.skipif(not typing_extensions, reason='typing_extensions not installed') def test_literal_multiple(): class Model(BaseModel): a_or_b: typing_extensions.Literal['a', 'b'] Model(a_or_b='a') Model(a_or_b='b') with pytest.raises(ValidationError) as exc_info: Model(a_or_b='c') assert exc_info.value.errors() == [ { 'loc': ('a_or_b',), 'msg': "unexpected value; permitted: 'a', 'b'", 'type': 'value_error.const', 'ctx': {'given': 'c', 'permitted': ('a', 'b')}, } ] def test_unsupported_field_type(): with pytest.raises(TypeError, match=r'MutableSet(.*)not supported'): class UnsupportedModel(BaseModel): unsupported: MutableSet[int] def test_frozenset_field(): class FrozenSetModel(BaseModel): set: FrozenSet[int] test_set = frozenset({1, 2, 3}) object_under_test = FrozenSetModel(set=test_set) assert object_under_test.set == test_set def test_frozenset_field_conversion(): class FrozenSetModel(BaseModel): set: FrozenSet[int] test_list = [1, 2, 3] test_set = frozenset(test_list) object_under_test = FrozenSetModel(set=test_list) assert object_under_test.set == test_set def test_frozenset_field_not_convertible(): class FrozenSetModel(BaseModel): set: FrozenSet[int] with pytest.raises(ValidationError, match=r'frozenset'): FrozenSetModel(set=42) @pytest.mark.parametrize( 'input_value,output,human_bin,human_dec', ( ('1', 1, '1.0B', '1.0B'), ('1.0', 1, '1.0B', '1.0B'), ('1b', 1, '1.0B', '1.0B'), ('1.5 KB', int(1.5e3), '1.5KiB', '1.5KB'), ('1.5 K', int(1.5e3), '1.5KiB', '1.5KB'), ('1.5 MB', int(1.5e6), '1.4MiB', '1.5MB'), ('1.5 M', int(1.5e6), '1.4MiB', '1.5MB'), ('5.1kib', 5222, '5.1KiB', '5.2KB'), ('6.2EiB', 7148113328562451456, '6.2EiB', '7.1EB'), ), ) def test_bytesize_conversions(input_value, output, human_bin, human_dec): class Model(BaseModel): size: ByteSize m = Model(size=input_value) assert m.size == output assert m.size.human_readable() == human_bin assert m.size.human_readable(decimal=True) == human_dec def test_bytesize_to(): class Model(BaseModel): size: ByteSize m = Model(size='1GiB') assert m.size.to('MiB') == pytest.approx(1024) assert m.size.to('MB') == pytest.approx(1073.741824) assert m.size.to('TiB') == pytest.approx(0.0009765625) def test_bytesize_raises(): class Model(BaseModel): size: ByteSize with pytest.raises(ValidationError, match='parse value'): Model(size='d1MB') with pytest.raises(ValidationError, match='byte unit'): Model(size='1LiB') # 1Gi is not a valid unit unlike 1G with pytest.raises(ValidationError, match='byte unit'): Model(size='1Gi') m = Model(size='1MB') with pytest.raises(errors.InvalidByteSizeUnit, match='byte unit'): m.size.to('bad_unit') pydantic-1.2/tests/test_types_payment_card_number.py000066400000000000000000000056621357000400300232050ustar00rootroot00000000000000from collections import namedtuple from typing import Any import pytest from pydantic import BaseModel, ValidationError from pydantic.errors import InvalidLengthForBrand, LuhnValidationError, NotDigitError from pydantic.types import PaymentCardBrand, PaymentCardNumber VALID_AMEX = '370000000000002' VALID_MC = '5100000000000003' VALID_VISA = '4000000000000002' VALID_OTHER = '2000000000000000008' LUHN_INVALID = '4000000000000000' LEN_INVALID = '40000000000000006' # Mock PaymentCardNumber PCN = namedtuple('PaymentCardNumber', ['card_number', 'brand']) PCN.__len__ = lambda v: len(v.card_number) class PaymentCard(BaseModel): card_number: PaymentCardNumber def test_validate_digits(): digits = '12345' assert PaymentCardNumber.validate_digits(digits) == digits with pytest.raises(NotDigitError): PaymentCardNumber.validate_digits('hello') def test_validate_luhn_check_digit(): assert PaymentCardNumber.validate_luhn_check_digit(VALID_VISA) == VALID_VISA with pytest.raises(LuhnValidationError): PaymentCardNumber.validate_luhn_check_digit(LUHN_INVALID) @pytest.mark.parametrize( 'card_number, brand, valid', [ (VALID_VISA, PaymentCardBrand.visa, True), (VALID_MC, PaymentCardBrand.mastercard, True), (VALID_AMEX, PaymentCardBrand.amex, True), (VALID_OTHER, PaymentCardBrand.other, True), (LEN_INVALID, PaymentCardBrand.visa, False), ], ) def test_length_for_brand(card_number: str, brand: PaymentCardBrand, valid: bool): pcn = PCN(card_number, brand) if valid: assert PaymentCardNumber.validate_length_for_brand(pcn) == pcn else: with pytest.raises(InvalidLengthForBrand): PaymentCardNumber.validate_length_for_brand(pcn) @pytest.mark.parametrize( 'card_number, brand', [ (VALID_AMEX, PaymentCardBrand.amex), (VALID_MC, PaymentCardBrand.mastercard), (VALID_VISA, PaymentCardBrand.visa), (VALID_OTHER, PaymentCardBrand.other), ], ) def test_get_brand(card_number: str, brand: PaymentCardBrand): assert PaymentCardNumber._get_brand(card_number) == brand def test_valid(): card = PaymentCard(card_number=VALID_VISA) assert str(card.card_number) == VALID_VISA assert card.card_number.masked == '400000******0002' @pytest.mark.parametrize( 'card_number, error_message', [ (None, 'type_error.none.not_allowed'), ('1' * 11, 'value_error.any_str.min_length'), ('1' * 20, 'value_error.any_str.max_length'), ('h' * 16, 'value_error.payment_card_number.digits'), (LUHN_INVALID, 'value_error.payment_card_number.luhn_check'), (LEN_INVALID, 'value_error.payment_card_number.invalid_length_for_brand'), ], ) def test_error_types(card_number: Any, error_message: str): with pytest.raises(ValidationError, match=error_message) as exc_info: PaymentCard(card_number=card_number) assert exc_info.value.json().startswith('[') pydantic-1.2/tests/test_utils.py000066400000000000000000000163211357000400300170750ustar00rootroot00000000000000import os import string from enum import Enum from typing import NewType, Union import pytest from pydantic import BaseModel from pydantic.color import Color from pydantic.typing import display_as_type, is_new_type, new_type_supertype from pydantic.utils import ValueItems, deep_update, import_string, lenient_issubclass, truncate try: import devtools except ImportError: devtools = None def test_import_module(): assert import_string('os.path') == os.path def test_import_module_invalid(): with pytest.raises(ImportError) as exc_info: import_string('xx') assert exc_info.value.args[0] == '"xx" doesn\'t look like a module path' def test_import_no_attr(): with pytest.raises(ImportError) as exc_info: import_string('os.foobar') assert exc_info.value.args[0] == 'Module "os" does not define a "foobar" attribute' @pytest.mark.parametrize('value,expected', ((str, 'str'), ('string', 'str'), (Union[str, int], 'Union[str, int]'))) def test_display_as_type(value, expected): assert display_as_type(value) == expected def test_display_as_type_enum(): class SubField(Enum): a = 1 b = 'b' displayed = display_as_type(SubField) assert displayed == 'enum' def test_display_as_type_enum_int(): class SubField(int, Enum): a = 1 b = 2 displayed = display_as_type(SubField) assert displayed == 'int' def test_display_as_type_enum_str(): class SubField(str, Enum): a = 'a' b = 'b' displayed = display_as_type(SubField) assert displayed == 'str' def test_lenient_issubclass(): class A(str): pass assert lenient_issubclass(A, str) is True def test_lenient_issubclass_is_lenient(): assert lenient_issubclass('a', 'a') is False @pytest.mark.parametrize( 'input_value,output', [ (object, ""), (string.ascii_lowercase, "'abcdefghijklmnopq…'"), (list(range(20)), '[0, 1, 2, 3, 4, 5, …'), ], ) def test_truncate(input_value, output): with pytest.warns(DeprecationWarning, match='`truncate` is no-longer used by pydantic and is deprecated'): assert truncate(input_value, max_len=20) == output def test_value_items(): v = ['a', 'b', 'c'] vi = ValueItems(v, {0, -1}) assert vi.is_excluded(2) assert [v_ for i, v_ in enumerate(v) if not vi.is_excluded(i)] == ['b'] assert vi.is_included(2) assert [v_ for i, v_ in enumerate(v) if vi.is_included(i)] == ['a', 'c'] v2 = {'a': v, 'b': {'a': 1, 'b': (1, 2)}, 'c': 1} vi = ValueItems(v2, {'a': {0, -1}, 'b': {'a': ..., 'b': -1}}) assert not vi.is_excluded('a') assert vi.is_included('a') assert not vi.is_excluded('c') assert not vi.is_included('c') assert str(vi) == "{'a': {0, -1}, 'b': {'a': Ellipsis, 'b': -1}}" assert repr(vi) == "ValueItems({'a': {0, -1}, 'b': {'a': Ellipsis, 'b': -1}})" excluded = {k_: v_ for k_, v_ in v2.items() if not vi.is_excluded(k_)} assert excluded == {'a': v, 'b': {'a': 1, 'b': (1, 2)}, 'c': 1} included = {k_: v_ for k_, v_ in v2.items() if vi.is_included(k_)} assert included == {'a': v, 'b': {'a': 1, 'b': (1, 2)}} sub_v = included['a'] sub_vi = ValueItems(sub_v, vi.for_element('a')) assert repr(sub_vi) == 'ValueItems({0, 2})' assert sub_vi.is_excluded(2) assert [v_ for i, v_ in enumerate(sub_v) if not sub_vi.is_excluded(i)] == ['b'] assert sub_vi.is_included(2) assert [v_ for i, v_ in enumerate(sub_v) if sub_vi.is_included(i)] == ['a', 'c'] def test_value_items_error(): with pytest.raises(TypeError) as e: ValueItems(1, (1, 2, 3)) assert str(e.value) == "Unexpected type of exclude value " def test_is_new_type(): new_type = NewType('new_type', str) new_new_type = NewType('new_new_type', new_type) assert is_new_type(new_type) assert is_new_type(new_new_type) assert not is_new_type(str) def test_new_type_supertype(): new_type = NewType('new_type', str) new_new_type = NewType('new_new_type', new_type) assert new_type_supertype(new_type) == str assert new_type_supertype(new_new_type) == str def test_pretty(): class MyTestModel(BaseModel): a = 1 b = [1, 2, 3] m = MyTestModel() assert m.__repr_name__() == 'MyTestModel' assert str(m) == 'a=1 b=[1, 2, 3]' assert repr(m) == 'MyTestModel(a=1, b=[1, 2, 3])' assert list(m.__pretty__(lambda x: f'fmt: {x!r}')) == [ 'MyTestModel(', 1, 'a=', 'fmt: 1', ',', 0, 'b=', 'fmt: [1, 2, 3]', ',', 0, -1, ')', ] def test_pretty_color(): c = Color('red') assert str(c) == 'red' assert repr(c) == "Color('red', rgb=(255, 0, 0))" assert list(c.__pretty__(lambda x: f'fmt: {x!r}')) == [ 'Color(', 1, "fmt: 'red'", ',', 0, 'rgb=', 'fmt: (255, 0, 0)', ',', 0, -1, ')', ] @pytest.mark.skipif(not devtools, reason='devtools not installed') def test_devtools_output(): class MyTestModel(BaseModel): a = 1 b = [1, 2, 3] assert devtools.pformat(MyTestModel()) == 'MyTestModel(\n a=1,\n b=[1, 2, 3],\n)' @pytest.mark.skipif(not devtools, reason='devtools not installed') def test_devtools_output_validation_error(): class Model(BaseModel): a: int with pytest.raises(ValueError) as exc_info: Model() assert devtools.pformat(exc_info.value) == ( 'ValidationError(\n' " model='Model',\n" ' errors=[\n' ' {\n' " 'loc': ('a',),\n" " 'msg': 'field required',\n" " 'type': 'value_error.missing',\n" ' },\n' ' ],\n' ')' ) @pytest.mark.parametrize( 'mapping, updating_mapping, expected_mapping, msg', [ ( {'key': {'inner_key': 0}}, {'other_key': 1}, {'key': {'inner_key': 0}, 'other_key': 1}, 'extra keys are inserted', ), ( {'key': {'inner_key': 0}, 'other_key': 1}, {'key': [1, 2, 3]}, {'key': [1, 2, 3], 'other_key': 1}, 'values that can not be merged are updated', ), ( {'key': {'inner_key': 0}}, {'key': {'other_key': 1}}, {'key': {'inner_key': 0, 'other_key': 1}}, 'values that have corresponding keys are merged', ), ( {'key': {'inner_key': {'deep_key': 0}}}, {'key': {'inner_key': {'other_deep_key': 1}}}, {'key': {'inner_key': {'deep_key': 0, 'other_deep_key': 1}}}, 'deeply nested values that have corresponding keys are merged', ), ], ) def test_deep_update(mapping, updating_mapping, expected_mapping, msg): assert deep_update(mapping, updating_mapping) == expected_mapping, msg def test_deep_update_is_not_mutating(): mapping = {'key': {'inner_key': {'deep_key': 1}}} updated_mapping = deep_update(mapping, {'key': {'inner_key': {'other_deep_key': 1}}}) assert updated_mapping == {'key': {'inner_key': {'deep_key': 1, 'other_deep_key': 1}}} assert mapping == {'key': {'inner_key': {'deep_key': 1}}} pydantic-1.2/tests/test_validators.py000066400000000000000000000657311357000400300201160ustar00rootroot00000000000000from datetime import datetime from itertools import product from typing import Dict, List, Optional, Tuple import pytest from pydantic import BaseModel, ConfigError, Extra, ValidationError, errors, validator from pydantic.class_validators import make_generic_validator, root_validator def test_simple(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Model(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [{'loc': ('a',), 'msg': '"foobar" not found in a', 'type': 'value_error'}] def test_int_validation(): class Model(BaseModel): a: int with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert Model(a=3).a == 3 assert Model(a=True).a == 1 assert Model(a=False).a == 0 assert Model(a=4.5).a == 4 def test_frozenset_validation(): class Model(BaseModel): a: frozenset with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid frozenset', 'type': 'type_error.frozenset'} ] assert Model(a={1, 2, 3}).a == frozenset({1, 2, 3}) assert Model(a=frozenset({1, 2, 3})).a == frozenset({1, 2, 3}) assert Model(a=[4, 5]).a == frozenset({4, 5}) assert Model(a=(6,)).a == frozenset({6}) def test_validate_whole(): class Model(BaseModel): a: List[int] @validator('a', pre=True) def check_a1(cls, v): v.append('123') return v @validator('a') def check_a2(cls, v): v.append(456) return v assert Model(a=[1, 2]).a == [1, 2, 123, 456] def test_validate_kwargs(): class Model(BaseModel): b: int a: List[int] @validator('a', each_item=True) def check_a1(cls, v, values, **kwargs): return v + values['b'] assert Model(a=[1, 2], b=6).a == [7, 8] def test_validate_pre_error(): calls = [] class Model(BaseModel): a: List[int] @validator('a', pre=True) def check_a1(cls, v): calls.append(f'check_a1 {v}') if 1 in v: raise ValueError('a1 broken') v[0] += 1 return v @validator('a') def check_a2(cls, v): calls.append(f'check_a2 {v}') if 10 in v: raise ValueError('a2 broken') return v assert Model(a=[3, 8]).a == [4, 8] assert calls == ['check_a1 [3, 8]', 'check_a2 [4, 8]'] calls = [] with pytest.raises(ValidationError) as exc_info: Model(a=[1, 3]) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'a1 broken', 'type': 'value_error'}] assert calls == ['check_a1 [1, 3]'] calls = [] with pytest.raises(ValidationError) as exc_info: Model(a=[5, 10]) assert exc_info.value.errors() == [{'loc': ('a',), 'msg': 'a2 broken', 'type': 'value_error'}] assert calls == ['check_a1 [5, 10]', 'check_a2 [6, 10]'] class ValidateAssignmentModel(BaseModel): a: int = 4 b: str = ... c: int = 0 @validator('b') def b_length(cls, v, values, **kwargs): if 'a' in values and len(v) < values['a']: raise ValueError('b too short') return v @validator('c') def double_c(cls, v): return v * 2 class Config: validate_assignment = True extra = Extra.allow def test_validating_assignment_ok(): p = ValidateAssignmentModel(b='hello') assert p.b == 'hello' def test_validating_assignment_fail(): with pytest.raises(ValidationError): ValidateAssignmentModel(a=10, b='hello') p = ValidateAssignmentModel(b='hello') with pytest.raises(ValidationError): p.b = 'x' def test_validating_assignment_value_change(): p = ValidateAssignmentModel(b='hello', c=2) assert p.c == 4 p = ValidateAssignmentModel(b='hello') assert p.c == 0 p.c = 3 assert p.c == 6 def test_validating_assignment_extra(): p = ValidateAssignmentModel(b='hello', extra_field=1.23) assert p.extra_field == 1.23 p = ValidateAssignmentModel(b='hello') p.extra_field = 1.23 assert p.extra_field == 1.23 p.extra_field = 'bye' assert p.extra_field == 'bye' def test_validating_assignment_dict(): with pytest.raises(ValidationError) as exc_info: ValidateAssignmentModel(a='x', b='xx') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] def test_validate_multiple(): # also test TypeError class Model(BaseModel): a: str b: str @validator('a', 'b') def check_a_and_b(cls, v, field, **kwargs): if len(v) < 4: raise TypeError(f'{field.alias} is too short') return v + 'x' assert Model(a='1234', b='5678').dict() == {'a': '1234x', 'b': '5678x'} with pytest.raises(ValidationError) as exc_info: Model(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'a is too short', 'type': 'type_error'}, {'loc': ('b',), 'msg': 'b is too short', 'type': 'type_error'}, ] def test_classmethod(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert cls is Model return v m = Model(a='this is foobar good') assert m.a == 'this is foobar good' m.check_a('x') def test_duplicates(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str b: str @validator('a') def duplicate_name(cls, v): return v @validator('b') # noqa def duplicate_name(cls, v): # noqa return v assert str(exc_info.value) == ( 'duplicate validator function ' '"tests.test_validators.test_duplicates..Model.duplicate_name"; ' 'if this is intended, set `allow_reuse=True`' ) def test_use_bare(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator def checker(cls, v): return v assert 'validators should be used with fields' in str(exc_info.value) def test_use_no_fields(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator() def checker(cls, v): return v assert 'validator with no fields specified' in str(exc_info.value) def test_validate_always(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', pre=True, always=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'xxx' assert Model().a == 'xxx' assert check_calls == 1 assert Model(a='y').a == 'y' assert check_calls == 2 def test_validate_not_always(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'xxx' assert Model().a is None assert check_calls == 0 assert Model(a='y').a == 'y' assert check_calls == 1 def test_wildcard_validators(): calls = [] class Model(BaseModel): a: str b: int @validator('a') def check_a(cls, v, field, **kwargs): calls.append(('check_a', v, field.name)) return v @validator('*') def check_all(cls, v, field, **kwargs): calls.append(('check_all', v, field.name)) return v assert Model(a='abc', b='123').dict() == dict(a='abc', b=123) assert calls == [('check_a', 'abc', 'a'), ('check_all', 'abc', 'a'), ('check_all', 123, 'b')] def test_wildcard_validator_error(): class Model(BaseModel): a: str b: str @validator('*') def check_all(cls, v, field, **kwargs): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Model(a='foobar a', b='foobar b').b == 'foobar b' with pytest.raises(ValidationError) as exc_info: Model(a='snap') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': '"foobar" not found in a', 'type': 'value_error'}, {'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}, ] def test_invalid_field(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: str @validator('b') def check_b(cls, v): return v assert str(exc_info.value) == ( "Validators defined with incorrect fields: check_b " # noqa: Q000 "(use check_fields=False if you're inheriting from the model and intended this)" ) def test_validate_child(): class Parent(BaseModel): a: str class Child(Parent): @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Parent(a='this is not a child').a == 'this is not a child' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Child(a='snap') def test_validate_child_extra(): class Parent(BaseModel): a: str @validator('a') def check_a_one(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): @validator('a') def check_a_two(cls, v): return v.upper() assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'THIS IS FOOBAR GOOD' with pytest.raises(ValidationError): Child(a='snap') def test_validate_child_all(): class Parent(BaseModel): a: str class Child(Parent): @validator('*') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v assert Parent(a='this is not a child').a == 'this is not a child' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Child(a='snap') def test_validate_parent(): class Parent(BaseModel): a: str @validator('a') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Parent(a='snap') with pytest.raises(ValidationError): Child(a='snap') def test_validate_parent_all(): class Parent(BaseModel): a: str @validator('*') def check_a(cls, v): if 'foobar' not in v: raise ValueError('"foobar" not found in a') return v class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good' assert Child(a='this is foobar good').a == 'this is foobar good' with pytest.raises(ValidationError): Parent(a='snap') with pytest.raises(ValidationError): Child(a='snap') def test_inheritance_keep(): class Parent(BaseModel): a: int @validator('a') def add_to_a(cls, v): return v + 1 class Child(Parent): pass assert Child(a=0).a == 1 def test_inheritance_replace(): class Parent(BaseModel): a: int @validator('a') def add_to_a(cls, v): return v + 1 class Child(Parent): @validator('a') def add_to_a(cls, v): return v + 5 assert Child(a=0).a == 5 def test_inheritance_new(): class Parent(BaseModel): a: int @validator('a') def add_one_to_a(cls, v): return v + 1 class Child(Parent): @validator('a') def add_five_to_a(cls, v): return v + 5 assert Child(a=0).a == 6 def test_validation_each_item(): class Model(BaseModel): foobar: Dict[int, int] @validator('foobar', each_item=True) def check_foobar(cls, v): return v + 1 assert Model(foobar={1: 1}).foobar == {1: 2} def test_key_validation(): class Model(BaseModel): foobar: Dict[int, int] @validator('foobar') def check_foobar(cls, value): return {k + 1: v + 1 for k, v in value.items()} assert Model(foobar={1: 1}).foobar == {2: 2} def test_validator_always_optional(): check_calls = 0 class Model(BaseModel): a: Optional[str] = None @validator('a', pre=True, always=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'default value' assert Model(a='y').a == 'y' assert check_calls == 1 assert Model().a == 'default value' assert check_calls == 2 def test_validator_always_pre(): check_calls = 0 class Model(BaseModel): a: str = None @validator('a', always=True, pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' assert check_calls == 2 def test_validator_always_post(): class Model(BaseModel): a: str = None @validator('a', always=True) def check_a(cls, v): return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' def test_validator_always_post_optional(): class Model(BaseModel): a: Optional[str] = None @validator('a', always=True, pre=True) def check_a(cls, v): return v or 'default value' assert Model(a='y').a == 'y' assert Model().a == 'default value' def test_datetime_validator(): check_calls = 0 class Model(BaseModel): d: datetime = None @validator('d', pre=True, always=True) def check_d(cls, v): nonlocal check_calls check_calls += 1 return v or datetime(2032, 1, 1) assert Model(d='2023-01-01T00:00:00').d == datetime(2023, 1, 1) assert check_calls == 1 assert Model().d == datetime(2032, 1, 1) assert check_calls == 2 assert Model(d=datetime(2023, 1, 1)).d == datetime(2023, 1, 1) assert check_calls == 3 def test_pre_called_once(): check_calls = 0 class Model(BaseModel): a: Tuple[int, int, int] @validator('a', pre=True) def check_a(cls, v): nonlocal check_calls check_calls += 1 return v assert Model(a=['1', '2', '3']).a == (1, 2, 3) assert check_calls == 1 @pytest.mark.parametrize( 'fields,result', [ (['val'], '_v_'), (['foobar'], '_v_'), (['val', 'field'], '_v_,_field_'), (['val', 'config'], '_v_,_config_'), (['val', 'values'], '_v_,_values_'), (['val', 'field', 'config'], '_v_,_field_,_config_'), (['val', 'field', 'values'], '_v_,_field_,_values_'), (['val', 'config', 'values'], '_v_,_config_,_values_'), (['val', 'field', 'values', 'config'], '_v_,_field_,_values_,_config_'), (['cls', 'val'], '_cls_,_v_'), (['cls', 'foobar'], '_cls_,_v_'), (['cls', 'val', 'field'], '_cls_,_v_,_field_'), (['cls', 'val', 'config'], '_cls_,_v_,_config_'), (['cls', 'val', 'values'], '_cls_,_v_,_values_'), (['cls', 'val', 'field', 'config'], '_cls_,_v_,_field_,_config_'), (['cls', 'val', 'field', 'values'], '_cls_,_v_,_field_,_values_'), (['cls', 'val', 'config', 'values'], '_cls_,_v_,_config_,_values_'), (['cls', 'val', 'field', 'values', 'config'], '_cls_,_v_,_field_,_values_,_config_'), ], ) def test_make_generic_validator(fields, result): exec(f"""def testing_function({', '.join(fields)}): return {' + "," + '.join(fields)}""") func = locals()['testing_function'] validator = make_generic_validator(func) assert validator.__qualname__ == 'testing_function' assert validator.__name__ == 'testing_function' # args: cls, v, values, field, config assert validator('_cls_', '_v_', '_values_', '_field_', '_config_') == result def test_make_generic_validator_kwargs(): def test_validator(v, **kwargs): return ', '.join(f'{k}: {v}' for k, v in kwargs.items()) validator = make_generic_validator(test_validator) assert validator.__name__ == 'test_validator' assert validator('_cls_', '_v_', '_vs_', '_f_', '_c_') == 'values: _vs_, field: _f_, config: _c_' def test_make_generic_validator_invalid(): def test_validator(v, foobar): return foobar with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (v, foobar), should be: (value, values, config, field)' in str(exc_info.value) def test_make_generic_validator_cls_kwargs(): def test_validator(cls, v, **kwargs): return ', '.join(f'{k}: {v}' for k, v in kwargs.items()) validator = make_generic_validator(test_validator) assert validator.__name__ == 'test_validator' assert validator('_cls_', '_v_', '_vs_', '_f_', '_c_') == 'values: _vs_, field: _f_, config: _c_' def test_make_generic_validator_cls_invalid(): def test_validator(cls, v, foobar): return foobar with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (cls, v, foobar), should be: (cls, value, values, config, field)' in str(exc_info.value) def test_make_generic_validator_self(): def test_validator(self, v): return v with pytest.raises(ConfigError) as exc_info: make_generic_validator(test_validator) assert ': (self, v), "self" not permitted as first argument, should be: (cls, value' in str(exc_info.value) def test_assert_raises_validation_error(): class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert v == 'a', 'invalid a' return v Model(a='a') with pytest.raises(ValidationError) as exc_info: Model(a='snap') injected_by_pytest = "\nassert 'snap' == 'a'\n - snap\n + a" assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': f'invalid a{injected_by_pytest}', 'type': 'assertion_error'} ] def test_whole(): with pytest.warns(DeprecationWarning, match='The "whole" keyword argument is deprecated'): class Model(BaseModel): x: List[int] @validator('x', whole=True) def check_something(cls, v): return v def test_root_validator(): root_val_values = [] class Model(BaseModel): a: int = 1 b: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator def example_root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') assert Model(a='123', b='bar').dict() == {'a': 123, 'b': 'changed'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon') assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Model(a='broken', b='bar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert root_val_values == [{'a': 123, 'b': 'barbar'}, {'a': 1, 'b': 'snap dragonsnap dragon'}, {'b': 'barbar'}] def test_root_validator_pre(): root_val_values = [] class Model(BaseModel): a: int = 1 b: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator(pre=True) def root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return {'a': 42, 'b': 'changed'} assert Model(a='123', b='bar').dict() == {'a': 42, 'b': 'changedchanged'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon') assert root_val_values == [{'a': '123', 'b': 'bar'}, {'b': 'snap dragon'}] assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] def test_root_validator_repeat(): with pytest.raises(errors.ConfigError, match='duplicate validator function'): class Model(BaseModel): a: int = 1 @root_validator def root_validator_repeated(cls, values): return values @root_validator # noqa: F811 def root_validator_repeated(cls, values): # noqa: F811 return values def test_root_validator_repeat2(): with pytest.raises(errors.ConfigError, match='duplicate validator function'): class Model(BaseModel): a: int = 1 @validator('a') def repeat_validator(cls, v): return v @root_validator(pre=True) # noqa: F811 def repeat_validator(cls, values): # noqa: F811 return values def test_root_validator_self(): with pytest.raises( errors.ConfigError, match=r'Invalid signature for root validator root_validator: \(self, values\)' ): class Model(BaseModel): a: int = 1 @root_validator def root_validator(self, values): return values def test_root_validator_extra(): with pytest.raises(errors.ConfigError) as exc_info: class Model(BaseModel): a: int = 1 @root_validator def root_validator(cls, values, another): return values assert str(exc_info.value) == ( 'Invalid signature for root validator root_validator: (cls, values, another), should be: (cls, values).' ) def test_root_validator_types(): root_val_values = None class Model(BaseModel): a: int = 1 b: str @root_validator def root_validator(cls, values): nonlocal root_val_values root_val_values = cls, values return values class Config: extra = Extra.allow assert Model(b='bar', c='wobble').dict() == {'a': 1, 'b': 'bar', 'c': 'wobble'} assert root_val_values == (Model, {'a': 1, 'b': 'bar', 'c': 'wobble'}) def test_root_validator_inheritance(): calls = [] class Parent(BaseModel): pass @root_validator def root_validator_parent(cls, values): calls.append(f'parent validator: {values}') return {'extra1': 1, **values} class Child(Parent): a: int @root_validator def root_validator_child(cls, values): calls.append(f'child validator: {values}') return {'extra2': 2, **values} assert len(Child.__post_root_validators__) == 2 assert len(Child.__pre_root_validators__) == 0 assert Child(a=123).dict() == {'extra2': 2, 'extra1': 1, 'a': 123} assert calls == ["parent validator: {'a': 123}", "child validator: {'extra1': 1, 'a': 123}"] def reusable_validator(num): return num * 2 def test_reuse_global_validators(): class Model(BaseModel): x: int y: int double_x = validator('x', allow_reuse=True)(reusable_validator) double_y = validator('y', allow_reuse=True)(reusable_validator) assert dict(Model(x=1, y=1)) == {'x': 2, 'y': 2} def declare_with_reused_validators(include_root, allow_1, allow_2, allow_3): class Model(BaseModel): a: str b: str @validator('a', allow_reuse=allow_1) def duplicate_name(cls, v): return v @validator('b', allow_reuse=allow_2) # noqa F811 def duplicate_name(cls, v): # noqa F811 return v if include_root: @root_validator(allow_reuse=allow_3) # noqa F811 def duplicate_name(cls, values): # noqa F811 return values @pytest.fixture def reset_tracked_validators(): from pydantic.class_validators import _FUNCS original_tracked_validators = set(_FUNCS) yield _FUNCS.clear() _FUNCS.update(original_tracked_validators) @pytest.mark.parametrize('include_root,allow_1,allow_2,allow_3', product(*[[True, False]] * 4)) def test_allow_reuse(include_root, allow_1, allow_2, allow_3, reset_tracked_validators): duplication_count = int(not allow_1) + int(not allow_2) + int(include_root and not allow_3) if duplication_count > 1: with pytest.raises(ConfigError) as exc_info: declare_with_reused_validators(include_root, allow_1, allow_2, allow_3) assert str(exc_info.value).startswith('duplicate validator function') else: declare_with_reused_validators(include_root, allow_1, allow_2, allow_3) @pytest.mark.parametrize('validator_classmethod,root_validator_classmethod', product(*[[True, False]] * 2)) def test_root_validator_classmethod(validator_classmethod, root_validator_classmethod, reset_tracked_validators): root_val_values = [] class Model(BaseModel): a: int = 1 b: str def repeat_b(cls, v): return v * 2 if validator_classmethod: repeat_b = classmethod(repeat_b) repeat_b = validator('b')(repeat_b) def example_root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') if root_validator_classmethod: example_root_validator = classmethod(example_root_validator) example_root_validator = root_validator(example_root_validator) assert Model(a='123', b='bar').dict() == {'a': 123, 'b': 'changed'} with pytest.raises(ValidationError) as exc_info: Model(b='snap dragon') assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] with pytest.raises(ValidationError) as exc_info: Model(a='broken', b='bar') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'} ] assert root_val_values == [{'a': 123, 'b': 'barbar'}, {'a': 1, 'b': 'snap dragonsnap dragon'}, {'b': 'barbar'}] pydantic-1.2/tests/test_validators_dataclass.py000077500000000000000000000063751357000400300221370ustar00rootroot00000000000000from dataclasses import asdict, is_dataclass from typing import List import pytest from pydantic import ValidationError, root_validator, validator from pydantic.dataclasses import dataclass def test_simple(): @dataclass class MyDataclass: a: str @validator('a') def change_a(cls, v): return v + ' changed' assert MyDataclass(a='this is foobar good').a == 'this is foobar good changed' def test_validate_pre(): @dataclass class MyDataclass: a: List[int] @validator('a', pre=True) def check_a1(cls, v): v.append('123') return v @validator('a') def check_a2(cls, v): v.append(456) return v assert MyDataclass(a=[1, 2]).a == [1, 2, 123, 456] def test_validate_multiple(): # also test TypeError @dataclass class MyDataclass: a: str b: str @validator('a', 'b') def check_a_and_b(cls, v, field, **kwargs): if len(v) < 4: raise TypeError(f'{field.alias} is too short') return v + 'x' assert asdict(MyDataclass(a='1234', b='5678')) == {'a': '1234x', 'b': '5678x'} with pytest.raises(ValidationError) as exc_info: MyDataclass(a='x', b='x') assert exc_info.value.errors() == [ {'loc': ('a',), 'msg': 'a is too short', 'type': 'type_error'}, {'loc': ('b',), 'msg': 'b is too short', 'type': 'type_error'}, ] def test_classmethod(): @dataclass class MyDataclass: a: str @validator('a') def check_a(cls, v): assert cls is MyDataclass and is_dataclass(MyDataclass) return v m = MyDataclass(a='this is foobar good') assert m.a == 'this is foobar good' m.check_a('x') def test_validate_parent(): @dataclass class Parent: a: str @validator('a') def change_a(cls, v): return v + ' changed' @dataclass class Child(Parent): pass assert Parent(a='this is foobar good').a == 'this is foobar good changed' assert Child(a='this is foobar good').a == 'this is foobar good changed' def test_inheritance_replace(): @dataclass class Parent: a: int @validator('a') def add_to_a(cls, v): return v + 1 @dataclass class Child(Parent): @validator('a') def add_to_a(cls, v): return v + 5 assert Child(a=0).a == 5 def test_root_validator(): root_val_values = [] @dataclass class MyDataclass: a: int b: str @validator('b') def repeat_b(cls, v): return v * 2 @root_validator def root_validator(cls, values): root_val_values.append(values) if 'snap' in values.get('b', ''): raise ValueError('foobar') return dict(values, b='changed') assert asdict(MyDataclass(a='123', b='bar')) == {'a': 123, 'b': 'changed'} with pytest.raises(ValidationError) as exc_info: MyDataclass(a=1, b='snap dragon') assert root_val_values == [{'a': 123, 'b': 'barbar'}, {'a': 1, 'b': 'snap dragonsnap dragon'}] assert exc_info.value.errors() == [{'loc': ('__root__',), 'msg': 'foobar', 'type': 'value_error'}] pydantic-1.2/tests/try_assert.py000066400000000000000000000017171357000400300171000ustar00rootroot00000000000000""" This test is executed separately due to pytest's assertion-rewriting """ from pydantic import BaseModel, ValidationError, validator def test_assert_raises_validation_error(): test_name = test_assert_raises_validation_error.__name__ class Model(BaseModel): a: str @validator('a') def check_a(cls, v): assert v == 'a', 'invalid a' return v Model(a='a') expected_errors = [{'loc': ('a',), 'msg': f'invalid a', 'type': 'assertion_error'}] try: Model(a='snap') except ValidationError as exc: actual_errors = exc.errors() if actual_errors != expected_errors: raise RuntimeError(f'{test_name}:\nActual errors: {actual_errors}\nExpected errors: {expected_errors}') else: raise RuntimeError(f'{test_name}: ValidationError was not raised') if __name__ == '__main__': test_assert_raises_validation_error() print('Non-pytest assert tests passed')