pax_global_header00006660000000000000000000000064145160504070014514gustar00rootroot0000000000000052 comment=d4cddecdae86876b1a2f8d9ea6c74047c7a183a7 pydantic-compat-0.1.2/000077500000000000000000000000001451605040700146105ustar00rootroot00000000000000pydantic-compat-0.1.2/.github/000077500000000000000000000000001451605040700161505ustar00rootroot00000000000000pydantic-compat-0.1.2/.github/ISSUE_TEMPLATE.md000066400000000000000000000005061451605040700206560ustar00rootroot00000000000000* pydantic-compat version: * Python version: * Operating System: ### Description Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. ### What I Did ``` Paste the command(s) you ran and the output. If there was a crash, please include the traceback here. ``` pydantic-compat-0.1.2/.github/TEST_FAIL_TEMPLATE.md000066400000000000000000000006001451605040700213330ustar00rootroot00000000000000--- title: "{{ env.TITLE }}" labels: [bug] --- The {{ workflow }} workflow failed on {{ date | date("YYYY-MM-DD HH:mm") }} UTC The most recent failing test was on {{ env.PLATFORM }} py{{ env.PYTHON }} with commit: {{ sha }} Full run: https://github.com/{{ repo }}/actions/runs/{{ env.RUN_ID }} (This post will be updated if another test fails, as long as this issue remains open.) pydantic-compat-0.1.2/.github/dependabot.yml000066400000000000000000000004241451605040700210000ustar00rootroot00000000000000# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" commit-message: prefix: "ci(dependabot):" pydantic-compat-0.1.2/.github/workflows/000077500000000000000000000000001451605040700202055ustar00rootroot00000000000000pydantic-compat-0.1.2/.github/workflows/ci.yml000066400000000000000000000047571451605040700213400ustar00rootroot00000000000000name: CI concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true on: push: branches: - main tags: - "v*" pull_request: workflow_dispatch: schedule: - cron: "0 0 * * 0" # every week (for --pre release tests) jobs: check-manifest: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - run: pipx run check-manifest test: name: ${{ matrix.platform }} (${{ matrix.python-version }}) runs-on: ${{ matrix.platform }} strategy: fail-fast: false matrix: python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12-dev"] platform: [ubuntu-latest] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: python -m pip install -U pip hatch - name: Test run: hatch run test:test-cov-xml env: # if github.event_name == 'schedule' we want to test the --pre release PIP_PRE: ${{ github.event_name == 'schedule' }} # If something goes wrong, we can open an issue in the repo - name: Report --pre Failures if: failure() && github.event_name == 'schedule' uses: JasonEtco/create-an-issue@v2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PLATFORM: ${{ matrix.platform }} PYTHON: ${{ matrix.python-version }} RUN_ID: ${{ github.run_id }} TITLE: "[test-bot] pip install --pre is failing" with: filename: .github/TEST_FAIL_TEMPLATE.md update_existing: true - name: Coverage uses: codecov/codecov-action@v3 deploy: name: Deploy needs: test if: success() && startsWith(github.ref, 'refs/tags/') && github.event_name != 'schedule' runs-on: ubuntu-latest permissions: id-token: write contents: write steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: install run: | python -m pip install build python -m build - name: 🚢 Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - uses: softprops/action-gh-release@v1 with: generate_release_notes: true files: './dist/*' pydantic-compat-0.1.2/.github_changelog_generator000066400000000000000000000004271451605040700221530ustar00rootroot00000000000000user=pyapp-kit project=pydantic-compat issues=false exclude-labels=duplicate,question,invalid,wontfix,hide add-sections={"tests":{"prefix":"**Tests & CI:**","labels":["tests"]}, "documentation":{"prefix":"**Documentation:**", "labels":["documentation"]}} exclude-tags-regex=.*rc pydantic-compat-0.1.2/.gitignore000066400000000000000000000022551451605040700166040ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # IDE settings .vscode/ pydantic-compat-0.1.2/.pre-commit-config.yaml000066400000000000000000000012301451605040700210650ustar00rootroot00000000000000ci: autoupdate_schedule: monthly autofix_commit_msg: "style(pre-commit.ci): auto fixes [...]" autoupdate_commit_msg: "ci(pre-commit.ci): autoupdate" repos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.0.292 hooks: - id: ruff args: [--fix] - repo: https://github.com/psf/black rev: 23.9.1 hooks: - id: black - repo: https://github.com/abravalheri/validate-pyproject rev: v0.14 hooks: - id: validate-pyproject - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.5.1 hooks: - id: mypy files: "^src/" additional_dependencies: - pydantic pydantic-compat-0.1.2/LICENSE000066400000000000000000000027071451605040700156230ustar00rootroot00000000000000Copyright (c) 2023, Talley Lambert Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pydantic-compat-0.1.2/README.md000066400000000000000000000154241451605040700160750ustar00rootroot00000000000000# pydantic-compat [![GitHub](https://img.shields.io/github/license/pyapp-kit/pydantic-compat) ](https://github.com/pyapp-kit/pydantic-compat/raw/main/LICENSE) [![PyPI](https://img.shields.io/pypi/v/pydantic-compat.svg?color=green)](https://pypi.org/project/pydantic-compat) [![Python Version](https://img.shields.io/pypi/pyversions/pydantic-compat.svg?color=green)](https://python.org) [![CI](https://github.com/pyapp-kit/pydantic-compat/actions/workflows/ci.yml/badge.svg)](https://github.com/pyapp-kit/pydantic-compat/actions/workflows/ci.yml) [![codecov](https://codecov.io/gh/pyapp-kit/pydantic-compat/branch/main/graph/badge.svg)](https://codecov.io/gh/pyapp-kit/pydantic-compat) ## Motivation Pydantic 2 was a major release that completely changed the pydantic API. For applications, this is not a big deal, as they can pin to whatever version of pydantic they need. But for libraries that want to exist in a broader environment, pinning to a specific version of pydantic is not always an option (as it limits the ability to co-exist with other libraries). This package provides (unofficial) compatibility mixins and function adaptors for pydantic v1-v2 cross compatibility. It allows you to use either v1 or v2 API names, regardless of the pydantic version installed. (Prefer using v2 names when possible). Tests are run on Pydantic v1.8 and up The API conversion is not exhaustive, but suffices for many of the use cases I have come across. It is in use by the following libraries: - [ome-types](https://github.com/tlambert03/ome-types) - [psygnal](https://github.com/pyapp-kit/psygnal) - [app-model](https://github.com/pyapp-kit/app-model) - [useq-schema](https://github.com/pymmcore-plus/useq-schema) Feel free to open an issue or PR if you find it useful, but lacking features you need. ## What does it do? Not much! :joy: Mostly it serves to translate names from one API to another. It backports the v2 API to v1 (so you can v2 names in a pydantic1 runtime), and forwards the v1 API to v2 (so you can use v1 names in a v2 runtime without deprecation warnings). > While pydantic2 does offer deprecated access to the v1 API, if you explicitly > wish to support pydantic1 without your users seeing deprecation warnings, > then you need to do a lot of name adaptation depending on the runtime > pydantic version. This package does that for you. It does _not_ do any significantly complex translation of API logic. For custom types, you will still likely need to add class methods to support both versions of pydantic. It also does not prevent you from needing to know a what's changing under the hood in pydantic 2. You should be running tests on both versions of pydantic to ensure your library works as expected. This library just makes it much easier to support both versions in a single codebase without a lot of ugly conditionals and boilerplate. ## Usage ```py from pydantic import BaseModel from pydantic_compat import PydanticCompatMixin from pydantic_compat import field_validator # or 'validator' from pydantic_compat import model_validator # or 'root_validator' class MyModel(PydanticCompatMixin, BaseModel): x: int y: int = 2 # prefer v2 dict, but v1 class Config is supported model_config = {'frozen': True} @field_validator('x', mode='after') def _check_x(cls, v): if v != 42: raise ValueError("That's not the answer!") return v @model_validator('x', mode='after') def _check_x(cls, v: MyModel): # ... return v ``` You can now use the following attributes and methods regardless of the pydantic version installed (without deprecation warnings): | v1 name | v2 name | | --------------------------- | --------------------------- | | `obj.dict()` | `obj.model_dump()` | | `obj.json()` | `obj.model_dump_json()` | | `obj.copy()` | `obj.model_copy()` | | `Model.construct` | `Model.model_construct` | | `Model.schema` | `Model.model_json_schema` | | `Model.validate` | `Model.model_validate` | | `Model.parse_obj` | `Model.model_validate` | | `Model.parse_raw` | `Model.model_validate_json` | | `Model.update_forward_refs` | `Model.model_rebuild` | | `Model.__fields__` | `Model.model_fields` | | `Model.__fields_set__` | `Model.model_fields_set` | ## `Field` notes - `pydantic_compat.Field` will remove outdated fields (`const`) and translate fields with new names: | v1 name | v2 name | | ---------------- | ------------------- | | `min_items` | `min_length` | | `max_items` | `max_length` | | `regex` | `pattern` | | `allow_mutation` | `not frozen` | | `metadata` | `json_schema_extra` | - Don't use `var = Field(..., const='val')`, use `var: Literal['val'] = 'val'` it works in both v1 and v2 - No attempt is made to convert between v1's `unique_items` and v2's `Set[]` semantics. See for discussion. ## API rules - both V1 and V2 names may be used (regardless of pydantic version), but usage of V2 names are strongly recommended. - But the API must match the pydantic version matching the name you are using. For example, if you are using `pydantic_compat.field_validator` then the signature must match the pydantic (v2) `field_validator` signature (regardless) of the pydantic version installed. Similarly, if you choose to use `pydantic_compat.validator` then the signature must match the pydantic (v1) `validator` signature. ## Notable differences - `BaseModel.__fields__` in v1 is a dict of `{'field_name' -> ModelField}` whereas in v2 `BaseModel.model_fields` is a dict of `{'field_name' -> FieldInfo}`. `FieldInfo` is a much simpler object that ModelField, so it is difficult to directly support complicated v1 usage of `__fields__`. `pydantic-compat` simply provides a name addaptor that lets you access many of the attributes you may have accessed on `ModelField` in v1 while operating in a v2 world, but `ModelField` methods will not be made available. You'll need to update your usage accordingly. - in V2, `pydantic.model_validator(..., mode='after')` passes a model _instance_ to the validator function, whereas `pydantic.v1.root_validator(..., pre=False)` passes a dict of `{'field_name' -> validated_value}` to the validator function. In pydantic-compat, both decorators follow the semantics of their corresponding pydantic versions, _but_ `root_validator` gains parameter `construct_object: bool=False` that matches the `model_validator` behavior (only when `mode=='after'`). If you want that behavior though, prefer using `model_validator` directly. ## TODO: - Serialization decorators pydantic-compat-0.1.2/pyproject.toml000066400000000000000000000072301451605040700175260ustar00rootroot00000000000000# https://peps.python.org/pep-0517/ [build-system] requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" # https://peps.python.org/pep-0621/ [project] name = "pydantic-compat" description = "Compatibility layer for pydantic v1/v2" readme = "README.md" requires-python = ">=3.7" license = { text = "BSD 3-Clause License" } authors = [{ email = "talley.lambert@gmail.com", name = "Talley Lambert" }] classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Framework :: Pydantic", "Typing :: Typed", ] dynamic = ["version"] dependencies = ["pydantic", "importlib_metadata; python_version<'3.8'"] [tool.hatch.envs.default] dependencies = [ "pytest", "pytest-cov", "pdbpp", "rich", "importlib_metadata; python_version<'3.8'", ] [tool.hatch.envs.test] [tool.hatch.envs.test.scripts] test = "pytest -v" test-cov = "pytest -v --cov --cov-report=term-missing" test-cov-xml = "pytest -v --color=yes --cov --cov-report=xml --cov-append" [[tool.hatch.envs.test.matrix]] # python = ["3.8", "3.11"] # good for local, too verbose for CI pydantic = ["v1.8", "v1.9", "v1", "v2"] [tool.hatch.envs.test.overrides] matrix.pydantic.extra-dependencies = [ { value = "pydantic<1.9", if = ["v1.8"] }, { value = "pydantic<1.10", if = ["v1.9"] }, { value = "pydantic<2.0", if = ["v1"] }, { value = "pydantic>=2.0", if = ["v2"] }, ] # https://peps.python.org/pep-0621/#dependencies-optional-dependencies [project.optional-dependencies] test = ["pytest>=6.0", "pytest-cov"] dev = [ "black", "ipython", "mypy", "pdbpp", "pre-commit", "pytest-cov", "pytest", "rich", "ruff", ] [project.urls] homepage = "https://github.com/pyapp-kit/pydantic-compat" repository = "https://github.com/pyapp-kit/pydantic-compat" # https://hatch.pypa.io/latest/config/metadata/ [tool.hatch.version] source = "vcs" # https://hatch.pypa.io/latest/config/build/#file-selection [tool.hatch.build.targets.sdist] include = ["/src", "/tests"] [tool.hatch.build.targets.wheel] only-include = ["src"] sources = ["src"] # https://github.com/charliermarsh/ruff [tool.ruff] line-length = 88 target-version = "py37" src = ["src"] # https://beta.ruff.rs/docs/rules/ select = [ "E", # style errors "W", # style warnings "F", # flakes "I", # isort "UP", # pyupgrade "C4", # flake8-comprehensions "B", # flake8-bugbear "A001", # flake8-builtins "RUF", # ruff-specific rules "TCH", "TID", ] # https://docs.pytest.org/en/6.2.x/customize.html [tool.pytest.ini_options] minversion = "6.0" testpaths = ["tests"] filterwarnings = ["error"] # https://mypy.readthedocs.io/en/stable/config_file.html [tool.mypy] files = "src/**/" strict = true disallow_any_generics = false disallow_subclassing_any = false show_error_codes = true pretty = true # https://coverage.readthedocs.io/en/6.4/config.html [tool.coverage.report] exclude_lines = [ "pragma: no cover", "if TYPE_CHECKING:", "@overload", "except ImportError", "\\.\\.\\.", "raise NotImplementedError()", ] [tool.coverage.run] source = ["pydantic_compat"] # https://github.com/mgedmin/check-manifest#configuration [tool.check-manifest] ignore = [ ".github_changelog_generator", ".pre-commit-config.yaml", ".ruff_cache/**/*", "tests/**/*", ] pydantic-compat-0.1.2/src/000077500000000000000000000000001451605040700153775ustar00rootroot00000000000000pydantic-compat-0.1.2/src/pydantic_compat/000077500000000000000000000000001451605040700205555ustar00rootroot00000000000000pydantic-compat-0.1.2/src/pydantic_compat/__init__.py000066400000000000000000000032001451605040700226610ustar00rootroot00000000000000"""CompatibilityMixin for pydantic v1/1/v2.""" try: from importlib.metadata import PackageNotFoundError, version except ImportError: from importlib_metadata import PackageNotFoundError, version # type: ignore from typing import TYPE_CHECKING try: __version__ = version("pydantic-compat") except PackageNotFoundError: # pragma: no cover __version__ = "uninstalled" __author__ = "Talley Lambert" __email__ = "talley.lambert@gmail.com" __all__ = [ "__version__", "field_validator", "model_validator", "PYDANTIC2", "PydanticCompatMixin", "root_validator", "validator", "Field", "BaseModel", ] from ._shared import PYDANTIC2 if TYPE_CHECKING: from pydantic import ( Field, field_validator, model_validator, root_validator, validator, ) # using this to avoid breaking pydantic mypy plugin # not that we could use a protocol. but it will be hard to provide proper names # AND proper signatures for both versions of pydantic without a ton of potentially # outdated signatures PydanticCompatMixin = type else: from ._shared import Field if PYDANTIC2: from pydantic import field_validator, model_validator from ._v2 import PydanticCompatMixin, root_validator, validator else: from pydantic import validator from ._v1 import ( PydanticCompatMixin, field_validator, model_validator, root_validator, ) import pydantic class BaseModel(PydanticCompatMixin, pydantic.BaseModel): """BaseModel with pydantic_compat mixins.""" del pydantic pydantic-compat-0.1.2/src/pydantic_compat/_shared.py000066400000000000000000000055741451605040700225470ustar00rootroot00000000000000import contextlib import warnings from typing import Any import pydantic import pydantic.version PYDANTIC2 = pydantic.version.VERSION.startswith("2") V2_REMOVED_CONFIG_KEYS = { "allow_mutation", "error_msg_templates", "fields", "getter_dict", "smart_union", "underscore_attrs_are_private", "json_loads", "json_dumps", "copy_on_model_validation", "post_init_call", } V2_RENAMED_CONFIG_KEYS = { "allow_population_by_field_name": "populate_by_name", "anystr_lower": "str_to_lower", "anystr_strip_whitespace": "str_strip_whitespace", "anystr_upper": "str_to_upper", "keep_untouched": "ignored_types", "max_anystr_length": "str_max_length", "min_anystr_length": "str_min_length", "orm_mode": "from_attributes", "schema_extra": "json_schema_extra", "validate_all": "validate_default", } V1_FIELDS_TO_V2_FIELDS = { "min_items": "min_length", "max_items": "max_length", "regex": "pattern", "allow_mutation": "-frozen", "metadata": "json_schema_extra", } V2_FIELDS_TO_V1_FIELDS = {} for k, v in V1_FIELDS_TO_V2_FIELDS.items(): if v.startswith("-"): v = v[1:] k = f"-{k}" V2_FIELDS_TO_V1_FIELDS[v] = k FIELD_NAME_MAP = V1_FIELDS_TO_V2_FIELDS if PYDANTIC2 else V2_FIELDS_TO_V1_FIELDS def check_mixin_order(cls: type, mixin_class: type, base_model: type) -> None: """Warn if mixin_class appears after base_model in cls.__bases__.""" bases = cls.__bases__ with contextlib.suppress(ValueError): mixin_index = bases.index(mixin_class) base_model_index = bases.index(base_model) if mixin_index > base_model_index: warnings.warn( f"{mixin_class.__name__} should appear before pydantic.BaseModel", stacklevel=3, ) def move_field_kwargs(kwargs: dict) -> dict: """Move Field(...) kwargs from v1 to v2 and vice versa.""" for old_name, new_name in FIELD_NAME_MAP.items(): negate = False if new_name.startswith("-"): new_name = new_name[1:] negate = True if old_name in kwargs: if new_name in kwargs: raise ValueError(f"Cannot specify both {old_name} and {new_name}") val = not kwargs.pop(old_name) if negate else kwargs.pop(old_name) kwargs[new_name] = val return kwargs def clean_field_kwargs(kwargs: dict) -> dict: """Remove outdated Field(...) kwargs.""" const = kwargs.pop("const", None) if const is not None: raise TypeError( f"`const` is removed in v2, use `Literal[{const!r}]` instead, " "it works in v1 and v2." ) return kwargs def Field(*args: Any, **kwargs: Any) -> Any: """Create a field for objects that can be configured.""" kwargs = clean_field_kwargs(kwargs) kwargs = move_field_kwargs(kwargs) return pydantic.Field(*args, **kwargs) pydantic-compat-0.1.2/src/pydantic_compat/_v1/000077500000000000000000000000001451605040700212425ustar00rootroot00000000000000pydantic-compat-0.1.2/src/pydantic_compat/_v1/__init__.py000066400000000000000000000006261451605040700233570ustar00rootroot00000000000000import pydantic.version if not pydantic.version.VERSION.startswith("1"): # pragma: no cover raise ImportError("pydantic_compat._v1 only supports pydantic v1.x") from .decorators import field_validator as field_validator from .decorators import model_validator as model_validator from .decorators import root_validator as root_validator from .mixin import PydanticCompatMixin as PydanticCompatMixin pydantic-compat-0.1.2/src/pydantic_compat/_v1/decorators.py000066400000000000000000000052571451605040700237720ustar00rootroot00000000000000from __future__ import annotations from functools import wraps from typing import TYPE_CHECKING, Any, Callable import pydantic if TYPE_CHECKING: from typing import Literal # V2 signature def field_validator( _field: str, *fields: str, mode: Literal["before", "after", "wrap", "plain"] = "after", check_fields: bool | None = None, ) -> Callable: """Adaptor from v2.field_validator -> v1.validator.""" # V1 signature # def validator( # *fields: str, # pre: bool = False, # each_item: bool = False, # always: bool = False, # check_fields: bool = True, # whole: Optional[bool] = None, # allow_reuse: bool = False, # ) -> Callable[[AnyCallable], 'AnyClassMethod']: # ... return pydantic.validator( _field, *fields, pre=(mode in ("before")), always=True, # should it be? check_fields=bool(check_fields), allow_reuse=True, ) # V2 signature def model_validator(*, mode: Literal["wrap", "before", "after"]) -> Any: """Adaptor from v2.model_validator -> v1.root_validator.""" # V1 signature # def root_validator( # _func: Optional[AnyCallable] = None, # *, # pre: bool = False, # allow_reuse: bool = False, # skip_on_failure: bool = False, # ) -> Union["AnyClassMethod", Callable[[AnyCallable], "AnyClassMethod"]]: # ... return root_validator( pre=mode == "before", allow_reuse=True, construct_object=mode == "after" ) def root_validator( _func: Callable | None = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False, construct_object: bool = False, ) -> Any: def _inner(_func: Callable) -> Any: func = _func if construct_object and not pre: if isinstance(_func, classmethod): _func = _func.__func__ @wraps(_func) def func(cls: type[pydantic.BaseModel], *args: Any, **kwargs: Any) -> Any: arg0, *rest = args # cast dict to model to match the v2 model_validator signature # using construct because it should already be valid new_args = (cls.construct(**arg0), *rest) result: pydantic.BaseModel = _func(cls, *new_args, **kwargs) # cast back to dict of field -> value return {k: getattr(result, k) for k in result.__fields__} deco = pydantic.root_validator( # type: ignore [call-overload] pre=pre, allow_reuse=allow_reuse, skip_on_failure=skip_on_failure ) return deco(func) return _inner(_func) if _func else _inner pydantic-compat-0.1.2/src/pydantic_compat/_v1/mixin.py000066400000000000000000000135741451605040700227520ustar00rootroot00000000000000from __future__ import annotations import sys from typing import TYPE_CHECKING, Any, ClassVar, Iterator, Mapping from pydantic import main from pydantic_compat._shared import V2_RENAMED_CONFIG_KEYS, check_mixin_order if TYPE_CHECKING: from typing import Dict from pydantic.fields import ModelField # type: ignore from typing_extensions import Protocol # fmt:off class Model(Protocol): def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: ... # noqa: UP006 def json(self, *args: Any, **kwargs: Any) -> str: ... def copy(self, *args: Any, **kwargs: Any) -> Model: ... @classmethod def schema(cls, *args: Any, **kwargs: Any) -> Dict[str, Any]: ... # noqa: UP006 @classmethod def validate(cls, *args: Any, **kwargs: Any) -> Model: ... @classmethod def construct(cls, *args: Any, **kwargs: Any) -> Model: ... @classmethod def parse_raw(cls, *args: Any, **kwargs: Any) -> type[Model]: ... @classmethod def update_forward_refs(cls, *args: Any, **kwargs: Any) -> None: ... __fields__: ClassVar[Dict] # noqa: UP006 __fields_set__: set[str] __config__: ClassVar[type] # fmt:on if sys.version_info < (3, 9): def _get_fields(obj) -> dict[str, Any]: return obj.__fields__ main.ModelMetaclass.model_fields = property(_get_fields) REVERSE_CONFIG_NAME_MAP = {v: k for k, v in V2_RENAMED_CONFIG_KEYS.items()} def _convert_config(config_dict: dict) -> type: deprecated_renamed_keys = REVERSE_CONFIG_NAME_MAP.keys() & config_dict.keys() for k in sorted(deprecated_renamed_keys): config_dict[REVERSE_CONFIG_NAME_MAP[k]] = config_dict.pop(k) return type("Config", (), config_dict) class _MixinMeta(main.ModelMetaclass): def __new__(cls, name, bases, namespace: dict, **kwargs): # type: ignore if "model_config" in namespace and isinstance(namespace["model_config"], dict): namespace["Config"] = _convert_config(namespace.pop("model_config")) return super().__new__(cls, name, bases, namespace, **kwargs) class PydanticCompatMixin(metaclass=_MixinMeta): @classmethod def __try_update_forward_refs__(cls, **localns: Any) -> None: sup = super() if hasattr(sup, "__try_update_forward_refs__"): sup.__try_update_forward_refs__(**localns) def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: check_mixin_order(cls, PydanticCompatMixin, main.BaseModel) def model_dump(self: Model, *args: Any, **kwargs: Any) -> Any: return self.dict(*args, **kwargs) def model_dump_json(self: Model, *args: Any, **kwargs: Any) -> Any: return self.json(*args, **kwargs) def model_copy(self: Model, *args: Any, **kwargs: Any) -> Any: return self.copy(*args, **kwargs) @classmethod def model_json_schema(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.schema(*args, **kwargs) @classmethod def model_validate(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.validate(*args, **kwargs) @classmethod def model_construct(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.construct(*args, **kwargs) @classmethod def model_validate_json(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.parse_raw(*args, **kwargs) @classmethod def model_rebuild(cls: type[Model], force: bool = True, **kwargs: Any) -> None: return cls.update_forward_refs(**kwargs) if sys.version_info < (3, 9): # differences in the behavior of patching class properties in python<3.9 @property def model_fields(cls: type[Model]) -> Mapping[str, Any]: return FieldInfoMap(cls.__fields__) else: @classmethod # type: ignore [misc] @property def model_fields(cls: type[Model]) -> Mapping[str, Any]: return FieldInfoMap(cls.__fields__) @property def model_fields_set(self: Model) -> set[str]: return self.__fields_set__ @classmethod # type: ignore [misc] @property def model_config(cls: type[Model]) -> Mapping[str, Any]: return DictLike(cls.__config__) class FieldInfoLike: """Wrapper to convera pydantic v1 ModelField to v2 FieldInfo.""" def __init__(self, model_field: ModelField) -> None: self._model_field = model_field @property def annotation(self) -> Any: return self._model_field.outer_type_ @property def frozen(self) -> bool: return not self._model_field.field_info.allow_mutation def __getattr__(self, key: str) -> Any: return getattr(self._model_field, key) class FieldInfoMap(Mapping[str, FieldInfoLike]): """Adaptor between v1 __fields__ and v2 model_field.""" def __init__(self, fields: dict[str, ModelField]) -> None: self._fields = fields def get(self, key: str, default: Any = None) -> Any: return self[key] if key in self._fields else default def __getitem__(self, key: str) -> FieldInfoLike: return FieldInfoLike(self._fields[key]) def __setitem__(self, key: str, value: Any) -> None: self._fields[key] = value def __iter__(self) -> Iterator[str]: yield from self._fields def __len__(self) -> int: return len(self._fields) class DictLike(Mapping[str, Any]): """Provide dict-like interface to an object.""" def __init__(self, obj: Any) -> None: self._obj = obj def get(self, key: str, default: Any = None) -> Any: return getattr(self._obj, key, default) def __getitem__(self, key: str) -> Any: return getattr(self._obj, key) def __setitem__(self, key: str, value: Any) -> None: setattr(self._obj, key, value) def __iter__(self) -> Iterator[str]: yield from self._obj.__dict__ def __len__(self) -> int: return len(self._obj.__dict__) pydantic-compat-0.1.2/src/pydantic_compat/_v2/000077500000000000000000000000001451605040700212435ustar00rootroot00000000000000pydantic-compat-0.1.2/src/pydantic_compat/_v2/__init__.py000066400000000000000000000005071451605040700233560ustar00rootroot00000000000000import pydantic.version if int(pydantic.version.VERSION[0]) <= 1: # pragma: no cover raise ImportError("pydantic_compat._v2 only supports pydantic v2.x") from .decorators import root_validator as root_validator from .decorators import validator as validator from .mixin import PydanticCompatMixin as PydanticCompatMixin pydantic-compat-0.1.2/src/pydantic_compat/_v2/decorators.py000066400000000000000000000044171451605040700237700ustar00rootroot00000000000000from __future__ import annotations import warnings from typing import Any, Callable from pydantic.deprecated import class_validators # V1 signature # def validator( # *fields: str, # pre: bool = False, # each_item: bool = False, # always: bool = False, # check_fields: bool = True, # whole: Optional[bool] = None, # allow_reuse: bool = False, # ) -> Callable[[AnyCallable], 'AnyClassMethod']: # ... def validator( _field: str, *fields: str, **kwargs: Any ) -> Callable[[Callable], Callable]: """Adaptor from v1.validator -> v2.field_validator.""" with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) return class_validators.validator(_field, *fields, **kwargs) # V1 signature # def root_validator( # _func: Optional[AnyCallable] = None, # *, # pre: bool = False, # allow_reuse: bool = False, # skip_on_failure: bool = False, # ) -> Union["AnyClassMethod", Callable[[AnyCallable], "AnyClassMethod"]]: # ... def root_validator( *_args: str, pre: bool = False, skip_on_failure: bool | None = None, allow_reuse: bool = False, construct_object: bool = False, # here to match our v1 patch behavior ) -> Any: """Adaptor from v1.root_validator -> v2.model_validator.""" # If you use `@root_validator` with pre=False (the default) # you MUST specify `skip_on_failure=True` # we let explicit `skip_on_failure=False` pass through to fail, # but we default to `skip_on_failure=True` to match v1 behavior if not pre and skip_on_failure is None: skip_on_failure = True if construct_object: raise ValueError( "construct_object=True is not supported by pydantic-compat when running on " "pydantic v2. Please use pydantic_compat.model_validator(mode='after') " "instead. (It works for both versions)." ) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) # def model_validator( *, mode: Literal['wrap', 'before', 'after']) -> Any: return class_validators.root_validator( # type: ignore [call-overload] *_args, pre=pre, skip_on_failure=bool(skip_on_failure), allow_reuse=allow_reuse, ) pydantic-compat-0.1.2/src/pydantic_compat/_v2/mixin.py000066400000000000000000000105151451605040700227430ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, Any, ClassVar, Dict, cast from pydantic import BaseModel from pydantic._internal import _model_construction from pydantic_compat._shared import V2_RENAMED_CONFIG_KEYS, check_mixin_order if TYPE_CHECKING: from pydantic import ConfigDict from typing_extensions import Protocol # fmt:off class Model(Protocol): def model_dump(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... def model_dump_json(self, *args: Any, **kwargs: Any) -> str: ... def model_copy(self, *args: Any, **kwargs: Any) -> Model: ... @classmethod def model_json_schema(cls, *args: Any, **kwargs: Any) -> dict[str, Any]: ... @classmethod def model_validate(cls, *args: Any, **kwargs: Any) -> Model: ... @classmethod def model_construct(cls, *args: Any, **kwargs: Any) -> Model: ... @classmethod def model_validate_json(cls, *args: Any, **kwargs: Any) -> type[Model]: ... @classmethod def model_rebuild(cls, *args: Any, **kwargs: Any) -> bool | None: ... model_fields: ClassVar[dict] model_fields_set: ClassVar[set[str]] model_config: ClassVar[ConfigDict] # fmt:on def _convert_config(config: type) -> ConfigDict: config_dict = {k: getattr(config, k) for k in dir(config) if not k.startswith("__")} deprecated_renamed_keys = V2_RENAMED_CONFIG_KEYS.keys() & config_dict.keys() for k in sorted(deprecated_renamed_keys): config_dict[V2_RENAMED_CONFIG_KEYS[k]] = config_dict.pop(k) # leave these here for now to warn about lost functionality # deprecated_removed_keys = V2_REMOVED_CONFIG_KEYS & config_dict.keys() # for k in sorted(deprecated_removed_keys): # config_dict.pop(k) return cast("ConfigDict", config_dict) class _MixinMeta(_model_construction.ModelMetaclass): def __new__(cls, name, bases, namespace, **kwargs): # type: ignore if "Config" in namespace and isinstance(namespace["Config"], type): namespace["model_config"] = _convert_config(namespace.pop("Config")) return super().__new__(cls, name, bases, namespace, **kwargs) class PydanticCompatMixin(metaclass=_MixinMeta): def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: check_mixin_order(cls, PydanticCompatMixin, BaseModel) # the deprecation warning is on the metaclass type(cls).__fields__ = property(lambda cls: cls.model_fields) # type: ignore def dict(self: Model, *args: Any, **kwargs: Any) -> Any: return self.model_dump(*args, **kwargs) def json(self: Model, *args: Any, **kwargs: Any) -> Any: return self.model_dump_json(*args, **kwargs) def copy(self: Model, *args: Any, **kwargs: Any) -> Any: return self.model_copy(*args, **kwargs) @classmethod def schema(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.model_json_schema(*args, **kwargs) @classmethod def validate(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.model_validate(*args, **kwargs) @classmethod def construct(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.model_construct(*args, **kwargs) @classmethod def parse_obj(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.model_validate(*args, **kwargs) @classmethod def parse_raw(cls: type[Model], *args: Any, **kwargs: Any) -> Any: return cls.model_validate_json(*args, **kwargs) # this is needed in addition to the metaclass patch in __init_subclass__ @property def __fields__(self: Model) -> Dict[str, Any]: # noqa: UP006 return self.model_fields @property def __fields_set__(self: Model) -> set[str]: return self.model_fields_set @classmethod def update_forward_refs( cls: type[Model], force: bool = False, raise_errors: bool = True, **localns: Any, ) -> None: cls.model_rebuild( forc=force, raise_errors=raise_errors, _types_namespace=localns ) @classmethod def model_rebuild( cls: type[Model], force: bool = False, raise_errors: bool = True, **kwargs: Any ) -> bool | None: return super().model_rebuild( force=force, raise_errors=raise_errors, _types_namespace=kwargs ) pydantic-compat-0.1.2/src/pydantic_compat/py.typed000066400000000000000000000000001451605040700222420ustar00rootroot00000000000000pydantic-compat-0.1.2/tests/000077500000000000000000000000001451605040700157525ustar00rootroot00000000000000pydantic-compat-0.1.2/tests/test_base_model.py000066400000000000000000000061071451605040700214610ustar00rootroot00000000000000from typing import ClassVar import pydantic import pytest from pydantic_compat import PydanticCompatMixin class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 def test_v1_api(): m = Model() assert m.x == 1 assert m.dict() == {"x": 1} assert m.json().replace(" ", "") == '{"x":1}' assert m.copy() == m assert Model.parse_raw('{"x": 2}') == Model(x=2) assert Model.parse_obj({"x": 2}) == Model(x=2) assert Model.construct(x=2) == Model(x=2) assert Model.validate({"x": 2}) == Model(x=2) assert Model.schema() == { "title": "Model", "type": "object", "properties": { "x": {"title": "X", "type": "integer", "default": 1}, }, } Model.update_forward_refs(name="name") def test_v2_api(): m = Model() assert m.x == 1 assert m.model_dump() == {"x": 1} assert m.model_dump_json().replace(" ", "") == '{"x":1}' assert m.model_copy() == m assert Model.model_validate_json('{"x": 2}') == Model(x=2) assert Model.model_validate({"x": 2}) == Model(x=2) assert Model.model_construct(x=2) == Model(x=2) assert Model.model_validate({"x": 2}) == Model(x=2) assert Model.model_json_schema() == { "title": "Model", "type": "object", "properties": { "x": {"title": "X", "type": "integer", "default": 1}, }, } Model.model_rebuild(force=True) def test_v1_attributes(): m = Model() assert "x" in m.__fields__ assert "x" in Model.__fields__ assert "x" not in m.__fields_set__ m.x = 2 assert "x" in m.__fields_set__ def test_v2_attributes(): m = Model() assert "x" in m.model_fields assert "x" in Model.model_fields assert "x" not in m.model_fields_set m.x = 2 assert "x" in m.model_fields_set def test_mixin_order(): with pytest.warns( match="PydanticCompatMixin should appear before pydantic.BaseModel" ): class Model1(pydantic.BaseModel, PydanticCompatMixin): x: int = 1 class Model2(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 V2Config = {"populate_by_name": True, "extra": "forbid", "frozen": True} class V1Config: allow_population_by_field_name = True extra = "forbid" frozen = True json_encoders: ClassVar[dict] = {} @pytest.mark.parametrize("config", [V1Config, V2Config]) def test_config(config): class Model1(PydanticCompatMixin, pydantic.BaseModel): name: str = pydantic.Field(alias="full_name") # to make sure that populate_by_name is working with pytest.raises((ValueError, TypeError)): # (v1, v2) m = Model1(name="John") class Model(PydanticCompatMixin, pydantic.BaseModel): name: str = pydantic.Field(alias="full_name") if isinstance(config, dict): model_config = config else: Config = config m = Model(name="John") # test frozen with pytest.raises((ValueError, TypeError)): # (v1, v2) m.name = "Sue" # test extra with pytest.raises((ValueError, TypeError)): # (v1, v2) Model(extra=1) pydantic-compat-0.1.2/tests/test_decorators.py000066400000000000000000000065651451605040700215440ustar00rootroot00000000000000from unittest.mock import Mock import pydantic import pytest from pydantic_compat import ( PYDANTIC2, PydanticCompatMixin, field_validator, model_validator, root_validator, validator, ) def test_v1_validator(): mock_before = Mock() mock_after = Mock() class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 @validator("x", pre=True) def _validate_x_before(cls, v): mock_before(v) return v @validator("x") def _validate_x_after(cls, v): mock_after(v) return v m = Model(x="2") mock_before.assert_called_once_with("2") mock_after.assert_called_once_with(2) assert m.x == 2 def test_v2_field_validator(): mock_before = Mock() mock_after = Mock() class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 @field_validator("x", mode="before") def _validate_x_before(cls, v): mock_before(v) return v @field_validator("x", mode="after") def _validate_x_after(cls, v): mock_after(v) return v m = Model(x="2") mock_before.assert_called_once_with("2") mock_after.assert_called_once_with(2) assert m.x == 2 def test_v1_root_validator(): mock_before = Mock() mock_after = Mock() class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 @root_validator(pre=True) def _validate_x_before(cls, v): mock_before(v) return v @root_validator(pre=False) def _validate_x_after(cls, v): mock_after(v) return v m = Model(x="2") mock_before.assert_called_once_with({"x": "2"}) mock_after.assert_called_once_with({"x": 2}) assert m.x == 2 @pytest.mark.xfail(PYDANTIC2, reason="not supported in pydantic v2", strict=True) def test_v1_root_validator_with_construct(): """Test the construct_object parameter of root_validator. This converts the input dict to the model object before calling the validator. To match the v2 behavior. It's not supported when running on v2. For that, just use model_validator(mode='after'). """ mock_after2 = Mock() class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 @root_validator(pre=False, construct_object=True) def _validate_x_after2(cls, values): assert isinstance(values, Model) mock_after2(values.x) return values m = Model(x="2") mock_after2.assert_called_once_with(2) assert m.x == 2 def test_v2_model_validator(): mock_before = Mock() mock_after = Mock() mock_after_cm = Mock() class Model(PydanticCompatMixin, pydantic.BaseModel): x: int = 1 @model_validator(mode="before") def _validate_x_before(cls, v): mock_before(v) return v @model_validator(mode="after") def _validate_x_after(cls, v): mock_after(v) return v # this also needs to work @model_validator(mode="after") @classmethod def _validate_x_after_cm(cls, v): mock_after_cm(v) return v m = Model(x="2") mock_before.assert_called_once_with({"x": "2"}) mock_after.assert_called_once_with(m) mock_after_cm.assert_called_once_with(m) assert m.x == 2 pydantic-compat-0.1.2/tests/test_fields.py000066400000000000000000000047671451605040700206470ustar00rootroot00000000000000from typing import ClassVar, List, Tuple import pytest from typing_extensions import Literal from pydantic_compat import BaseModel, Field def test_field_const() -> None: with pytest.raises(TypeError, match="use `Literal\\['bar'\\]` instead"): Field(..., const="bar") # type: ignore class Foo(BaseModel): bar: Literal["bar"] = "bar" with pytest.raises(ValueError, match="validation error"): Foo(bar="baz") # type: ignore @pytest.mark.parametrize("post", ["items", "length"]) @pytest.mark.parametrize("pre", ["min", "max"]) def test_field_min_max_items(pre: str, post: str) -> None: class Foo(BaseModel): bar: List[int] = Field(..., **{f"{pre}_{post}": 2}) # type: ignore bad_val = [1, 2, 3] if pre == "max" else [1] with pytest.raises((TypeError, ValueError)): # (v1, v2) Foo(bar=bad_val) def test_field_allow_mutation() -> None: # used in v1 class Foo(BaseModel): bar: int = Field(default=1, allow_mutation=False) class Config: validate_assignment = True foo = Foo() with pytest.raises((TypeError, ValueError)): # (v1, v2) foo.bar = 2 def test_field_frozen() -> None: # used in v2 class Foo(BaseModel): bar: int = Field(default=1, frozen=True) model_config: ClassVar[dict] = {"validate_assignment": True} # type: ignore foo = Foo() with pytest.raises((TypeError, ValueError)): # (v1, v2) foo.bar = 2 @pytest.mark.parametrize("key", ["regex", "pattern"]) def test_regex_pattern(key: str) -> None: class Foo(BaseModel): bar: str = Field(..., **{key: "^bar$"}) # type: ignore Foo(bar="bar") with pytest.raises(ValueError): Foo(bar="baz") @pytest.mark.parametrize( "keys", [ ("min_items", "min_length"), ("max_items", "max_length"), ("allow_mutation", "frozen"), ("regex", "pattern"), ], ) def test_double_usage_raises(keys: Tuple[str, str]) -> None: with pytest.raises(ValueError, match="Cannot specify both"): Field(..., **dict.fromkeys(keys)) # type: ignore # not attempting unique_items yet... # see https://github.com/pydantic/pydantic-core/issues/296 # @pytest.mark.skipif( # pydantic.version.VERSION.startswith("1.8"), # reason="pydantic 1.8 does not support unique_items", # ) # def test_unique_items() -> None: # class Foo(BaseModel): # bar: List[int] = Field(..., unique_items=True) # with pytest.raises(ValueError): # Foo(bar=[1, 2, 3, 1])