pax_global_header00006660000000000000000000000064146333100120014503gustar00rootroot0000000000000052 comment=e945ee4319db49da9f7b8ede614e988cc8c8956b mashumaro-3.13.1/000077500000000000000000000000001463331001200135645ustar00rootroot00000000000000mashumaro-3.13.1/.editorconfig000066400000000000000000000007151463331001200162440ustar00rootroot00000000000000# EditorConfig is awesome: http://EditorConfig.org # top-most EditorConfig file root = true # Unix-style newlines with a newline ending every file [*] end_of_line = lf insert_final_newline = true indent_style = space indent_size = 4 trim_trailing_whitespace = true charset = utf-8 max_line_length=79 [Makefile] indent_style = tab [*.{yml,yaml,feature,json,toml}] indent_size = 2 [*.{tsv,csv}] trim_trailing_whitespace = false [*.rst] max_line_length = 80 mashumaro-3.13.1/.github/000077500000000000000000000000001463331001200151245ustar00rootroot00000000000000mashumaro-3.13.1/.github/CODE_OF_CONDUCT.md000066400000000000000000000121471463331001200177300ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socioeconomic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at random.gauss@gmail.com. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. mashumaro-3.13.1/.github/CONTRIBUTING.md000066400000000000000000000306431463331001200173630ustar00rootroot00000000000000 # Contributing to mashumaro First off, thanks for taking the time to contribute! ❤️ All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉 > And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about: > - Star the project > - Tweet about it > - Refer this project in your project's readme > - Mention the project at local meetups and tell your friends/colleagues ## Table of Contents - [I Have a Question](#i-have-a-question) - [I Want To Contribute](#i-want-to-contribute) - [Reporting Bugs](#reporting-bugs) - [Suggesting Enhancements](#suggesting-enhancements) - [Your First Code Contribution](#your-first-code-contribution) - [Improving The Documentation](#improving-the-documentation) ## I Have a Question > If you want to ask a question, we assume that you have read the available [Documentation](https://github.com/Fatal1ty/mashumaro/blob/master/README.md). Before you ask a question, it is best to search for existing [Issues](https://github.com/Fatal1ty/mashumaro/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first. If you then still feel the need to ask a question and need clarification, we recommend the following: - Open an [Issue](https://github.com/Fatal1ty/mashumaro/issues/new). - Provide as much context as you can about what you're running into. - Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. - Tag your issue with the `question` tag We will then take care of the issue as soon as possible. ## I Want To Contribute > ### Legal Notice > When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project licence. ### Reporting Bugs #### Before Submitting a Bug Report A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible. - Make sure that you are using the latest version. - Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://github.com/Fatal1ty/mashumaro/blob/master/README.md). If you are looking for support, you might want to check [this section](#i-have-a-question)). - To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/Fatal1ty/mashumaro/issues?q=label%3Abug). - Also make sure to search the internet (including Stack Overflow) to see if users outside the GitHub community have discussed the issue. - Collect information about the bug: - Stack trace (Traceback) - OS, Platform and Version (Windows, Linux, macOS, x86, ARM) - Version of the interpreter, compiler, SDK, runtime environment, package manager, depending on what seems relevant. - Possibly your input and the output - Can you reliably reproduce the issue? And can you also reproduce it with older versions? #### How Do I Submit a Good Bug Report? > You must never report security related issues, vulnerabilities or bugs including sensitive information to the issue tracker, or elsewhere in public. Instead sensitive bugs must be reported in accordance with our [security policy](https://github.com/Fatal1ty/mashumaro/security/policy). We use GitHub issues to track bugs and errors. If you run into an issue with the project: - Open an [Issue](https://github.com/Fatal1ty/mashumaro/issues/new?template=bug_report.md). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.) - Explain the behavior you would expect and the actual behavior. - Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case. - Provide the information you collected in the previous section. Once it's filed: - The project team will label the issue accordingly. - A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs information`. Bugs with the `needs information` tag will not be addressed until they are reproduced. - If the team is able to reproduce the issue, it will be labeled with the `bug` tag, as well as possibly other tags, and the issue will be left to be [implemented by someone](#your-first-code-contribution). ### Suggesting Enhancements This section guides you through submitting an enhancement suggestion for mashumaro, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions. #### Before Submitting an Enhancement - Make sure that you are using the latest version. - Read the [documentation](https://github.com/Fatal1ty/mashumaro/blob/master/README.md) carefully and find out if the functionality is already covered, maybe by an individual configuration. - Perform a [search](https://github.com/Fatal1ty/mashumaro/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one. - Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library. #### How Do I Submit a Good Enhancement Suggestion? Enhancement suggestions are tracked as [GitHub issues](https://github.com/Fatal1ty/mashumaro/issues?q=is%3Aissue+is%3Aopen+label%3Aenhancement) with the `enhancement` tag. - Open an [Feature Request](https://github.com/Fatal1ty/mashumaro/issues/new?template=feature_request.md). - Use a **clear and descriptive title** for the issue to identify the suggestion. - Provide a **step-by-step description of the suggested enhancement** in as many details as possible. - **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you. - **Explain why this enhancement would be useful** to most mashumaro users. You may also want to point out the other projects that solved it better and which could serve as inspiration. ### Your First Code Contribution > Please, refrain from creating a new pull request without an existing related issue that your pull request solves. Minor corrections, such as spelling errors, may be an exception. #### How to Get Started 1. Finding an Issue: Browse through our repository and look for issues you may help with. If you're new to open source and looking for a good starting point, we have a special label called [`good first issue`](https://github.com/Fatal1ty/mashumaro/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) that highlights issues that are ideal for beginners to tackle. 2. Understanding the Issue: Once you've found a good issue, take the time to read through the description and understand what needs to be done. Feel free to ask questions if anything is unclear. 3. Setting up your Development Environment: Follow [the instructions](#setting-up-your-development-environment) to set up your development environment. Make sure you have all the necessary tools and dependencies installed. 4. Making your Contribution: Fork the repository, create a new branch for your changes, and start working on the issue. Make sure to follow our coding guidelines and best practices. 5. Submitting your Pull Request: Once you're done with your changes, submit a pull request back to the main repository. Be sure to reference the issue you're addressing in your pull request description. ### What Can You Help With? Here are some common areas where you can make contributions: - Fixing bugs or issues reported by users - Implementing new features or enhancements - Improving the documentation - Refactoring code for better readability and performance Remember, everyone starts somewhere, and we're here to support you on your journey to becoming a contributor. If you have any questions or need help along the way, feel free to reach out to our team or community members for assistance. #### Setting up your Development Environment Before getting started, you will need to have already installed: * [python](https://www.python.org) (3.8+ only) * [git](https://git-scm.com) * [just](https://github.com/casey/just) Once you have those installed, you're ready to: * Clone the repository * Create a virtual environment * Install all development dependencies * Install a development version of mashumaro > Please note that you can use any virtual management tool you like. Here we show the basic instructions using the standard `venv` library. ```shell # Clone the repository git clone https://github.com/Fatal1ty/mashumaro # cd into the repo root directory cd mashumaro/ # Create and activate a virtual environment (recommended) python -m venv env && source env/bin/activate # Install mashumaro and all development dependencies just build ``` #### Linters To run linters, use the following command: ```shell just lint ``` #### Format the code To format the code according to style guides, use the following command: ```shell just format ``` #### Testing Tests are located in the `tests/` directory. Any code changes should include additional tests or modification of existing tests to ensure correctness. To run tests locally, use the following command: ```shell just test ``` Please make sure that test coverage has not decreased after your changes. Also note that test coverage depends on the Python version that your virtual environment was created with, since not all functionality works on every supported version of Python. To run tests with coverage report, you can use the following command: ```shell just test-with-coverage ``` #### Continuous integration We use GitHub Actions to provide "continuous integration" testing for all pull requests. When submitting a pull request, please check that all tests are passing and fix any issues that may arise. ### Improving The Documentation We welcome contributions to improve the documentation of this project. Whether it's fixing typos, clarifying instructions, adding examples, or creating new sections, your help is greatly appreciated. Here are some ways you can contribute to improving the documentation: 1. Fixing Typos and Grammar: If you notice any typos or grammatical errors in the documentation, feel free to submit a pull request with the corrections. 2. Clarifying Instructions: If you find any instructions that are unclear or ambiguous, you can suggest improvements to make them more understandable for other users. 3. Adding Examples: Providing code examples or step-by-step guides can be very helpful for users trying to understand how to use the project. Feel free to add examples where needed. 4. Creating New Sections: If you think there are important topics missing from the documentation, you can create new sections to cover those topics. 5. Updating Outdated Information: If any information in the documentation is outdated or no longer accurate, please update it with the correct information. mashumaro-3.13.1/.github/FUNDING.yml000066400000000000000000000000531463331001200167370ustar00rootroot00000000000000custom: ['https://coindrop.to/tikhonov_a'] mashumaro-3.13.1/.github/ISSUE_TEMPLATE/000077500000000000000000000000001463331001200173075ustar00rootroot00000000000000mashumaro-3.13.1/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000006561463331001200220100ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- * mashumaro version: * Python version: * Operating System: ### Description Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. ### What I Did ``` Paste the code, command(s) you ran and the output. If there was a crash, please include the traceback here. ``` mashumaro-3.13.1/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000011041463331001200230300ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: '' labels: '' assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context about the feature request here. mashumaro-3.13.1/.github/SECURITY.md000066400000000000000000000030711463331001200167160ustar00rootroot00000000000000At our GitHub project, we take security seriously and strive to maintain the highest level of security for our users. We encourage all members of our community to report any security-related bugs they discover to us as soon as possible. In order to ensure that these reports are handled in a secure and efficient manner, we have established the following policy: 1. Reporting Security-Related Bugs: If you believe that you have discovered a security-related bug in our project, please report it to us immediately by sending an email to random.gauss@gmail.com. Please do not open a GitHub issue for security-related bugs, as this may put our users at risk. 2. Providing Details: When reporting a security-related bug, please provide as much detail as possible, including a detailed description of the issue, steps to reproduce the problem, and any relevant code or screenshots. This will help us to quickly identify and address the issue. 3. Confidentiality: We take the confidentiality of security-related bug reports very seriously. We will keep all information related to the bug confidential and will not share it with anyone outside of our team without your permission, except as required by law. 4. Resolution: We will work diligently to resolve the issue as quickly as possible and will keep you informed of our progress throughout the process. 5. Public Disclosure: Once the issue has been resolved, we will make a release and announce the security fix through our normal communication channels. When it makes sense we may also obtain a CVE ID. mashumaro-3.13.1/.github/workflows/000077500000000000000000000000001463331001200171615ustar00rootroot00000000000000mashumaro-3.13.1/.github/workflows/main.yml000066400000000000000000000061141463331001200206320ustar00rootroot00000000000000name: tests on: push: branches: - '*' pull_request: branches: - master jobs: test-code-style: name: Code style tests runs-on: ubuntu-latest strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Install dependencies run: | pip install --upgrade pip pip install . pip install -r requirements-dev.txt - name: Run ruff run: ruff check mashumaro - name: Run mypy run: mypy mashumaro - name: Run black run: black --check . - name: Run codespell run: codespell mashumaro tests README.md .github/*.md test-posix: name: Tests on Posix needs: - test-code-style runs-on: ubuntu-latest strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Install dependencies run: | pip install --upgrade pip pip install . pip install -r requirements-dev.txt - name: Run tests with coverage run: pytest --cov=mashumaro --cov=tests - name: Upload Coverage run: coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_FLAG_NAME: posix-${{ matrix.python-version }} COVERALLS_PARALLEL: true test-windows: name: Tests on Windows needs: - test-code-style runs-on: windows-latest strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Install dependencies run: | pip install --upgrade pip pip install . pip install -r requirements-dev.txt pip install tzdata - name: Run tests with coverage run: pytest --cov=mashumaro --cov=tests - name: Upload Coverage run: coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_FLAG_NAME: windows-${{ matrix.python-version }} COVERALLS_PARALLEL: true coveralls: name: Finish Coveralls needs: - test-posix - test-windows runs-on: ubuntu-latest steps: - name: Set up Python uses: actions/setup-python@v5 with: python-version: 3.11 - name: Install dependencies run: pip install coveralls - name: Finish coveralls run: coveralls --service=github --finish env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} mashumaro-3.13.1/.gitignore000066400000000000000000000013711463331001200155560ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # PyCharm .idea # pyenv .python-version # OSX .DS_Store mashumaro-3.13.1/LICENSE000066400000000000000000000250131463331001200145720ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Copyright 2017 Alexander Tikhonov Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. mashumaro-3.13.1/README.md000066400000000000000000003345301463331001200150530ustar00rootroot00000000000000
logo ###### Fast and well tested serialization library [![Build Status](https://github.com/Fatal1ty/mashumaro/workflows/tests/badge.svg)](https://github.com/Fatal1ty/mashumaro/actions) [![Coverage Status](https://coveralls.io/repos/github/Fatal1ty/mashumaro/badge.svg?branch=master)](https://coveralls.io/github/Fatal1ty/mashumaro?branch=master) [![Latest Version](https://img.shields.io/pypi/v/mashumaro.svg)](https://pypi.python.org/pypi/mashumaro) [![Python Version](https://img.shields.io/pypi/pyversions/mashumaro.svg)](https://pypi.python.org/pypi/mashumaro) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
In Python, you often need to dump and load objects based on the schema you have. It can be a dataclass model, a list of third-party generic classes or whatever. Mashumaro not only lets you save and load things in different ways, but it also does it _super quick_. **Key features** * 🚀 One of the fastest libraries * ☝️ Mature and time-tested * 👶 Easy to use out of the box * ⚙️ Highly customizable * 🎉 Built-in support for JSON, YAML, TOML, MessagePack * 📦 Built-in support for almost all Python types including typing-extensions * 📝 JSON Schema generation Table of contents ------------------------------------------------------------------------------- * [Table of contents](#table-of-contents) * [Introduction](#introduction) * [Installation](#installation) * [Changelog](#changelog) * [Supported data types](#supported-data-types) * [Usage example](#usage-example) * [How does it work?](#how-does-it-work) * [Benchmark](#benchmark) * [Supported serialization formats](#supported-serialization-formats) * [Basic form](#basic-form) * [JSON](#json) * [json library](#json-library) * [orjson library](#orjson-library) * [YAML](#yaml) * [TOML](#toml) * [MessagePack](#messagepack) * [Customization](#customization) * [SerializableType interface](#serializabletype-interface) * [User-defined types](#user-defined-types) * [User-defined generic types](#user-defined-generic-types) * [SerializationStrategy](#serializationstrategy) * [Third-party types](#third-party-types) * [Third-party generic types](#third-party-generic-types) * [Field options](#field-options) * [`serialize` option](#serialize-option) * [`deserialize` option](#deserialize-option) * [`serialization_strategy` option](#serialization_strategy-option) * [`alias` option](#alias-option) * [Config options](#config-options) * [`debug` config option](#debug-config-option) * [`code_generation_options` config option](#code_generation_options-config-option) * [`serialization_strategy` config option](#serialization_strategy-config-option) * [`aliases` config option](#aliases-config-option) * [`serialize_by_alias` config option](#serialize_by_alias-config-option) * [`allow_deserialization_not_by_alias` config option](#allow_deserialization_not_by_alias-config-option) * [`omit_none` config option](#omit_none-config-option) * [`omit_default` config option](#omit_default-config-option) * [`namedtuple_as_dict` config option](#namedtuple_as_dict-config-option) * [`allow_postponed_evaluation` config option](#allow_postponed_evaluation-config-option) * [`dialect` config option](#dialect-config-option) * [`orjson_options` config option](#orjson_options-config-option) * [`discriminator` config option](#discriminator-config-option) * [`lazy_compilation` config option](#lazy_compilation-config-option) * [`sort_keys` config option](#sort_keys-config-option) * [`forbid_extra_keys` config option](#forbid_extra_keys-config-option) * [Passing field values as is](#passing-field-values-as-is) * [Extending existing types](#extending-existing-types) * [Field aliases](#field-aliases) * [Dialects](#dialects) * [`serialization_strategy` dialect option](#serialization_strategy-dialect-option) * [`serialize_by_alias` dialect option](#serialize_by_alias-dialect-option) * [`omit_none` dialect option](#omit_none-dialect-option) * [`omit_default` dialect option](#omit_default-dialect-option) * [`namedtuple_as_dict` dialect option](#namedtuple_as_dict-dialect-option) * [`no_copy_collections` dialect option](#no_copy_collections-dialect-option) * [Changing the default dialect](#changing-the-default-dialect) * [Discriminator](#discriminator) * [Subclasses distinguishable by a field](#subclasses-distinguishable-by-a-field) * [Subclasses without a common field](#subclasses-without-a-common-field) * [Class level discriminator](#class-level-discriminator) * [Working with union of classes](#working-with-union-of-classes) * [Using a custom variant tagger function](#using-a-custom-variant-tagger-function) * [Code generation options](#code-generation-options) * [Add `omit_none` keyword argument](#add-omit_none-keyword-argument) * [Add `by_alias` keyword argument](#add-by_alias-keyword-argument) * [Add `dialect` keyword argument](#add-dialect-keyword-argument) * [Add `context` keyword argument](#add-context-keyword-argument) * [Generic dataclasses](#generic-dataclasses) * [Generic dataclass inheritance](#generic-dataclass-inheritance) * [Generic dataclass in a field type](#generic-dataclass-in-a-field-type) * [GenericSerializableType interface](#genericserializabletype-interface) * [Serialization hooks](#serialization-hooks) * [Before deserialization](#before-deserialization) * [After deserialization](#after-deserialization) * [Before serialization](#before-serialization) * [After serialization](#after-serialization) * [JSON Schema](#json-schema) * [Building JSON Schema](#building-json-schema) * [JSON Schema constraints](#json-schema-constraints) * [Extending JSON Schema](#extending-json-schema) * [JSON Schema and custom serialization methods](#json-schema-and-custom-serialization-methods) Introduction ------------------------------------------------------------------------------- This library provides two fundamentally different approaches to converting your data to and from various formats. Each of them is useful in different situations: * Codecs * Mixins Codecs are represented by a set of decoder / encoder classes and decode / encode functions for each supported format. You can use them to convert data of any python built-in and third-party type to JSON, YAML, TOML, MessagePack or a basic form accepted by other serialization formats. For example, you can convert a list of datetime objects to JSON array containing string-represented datetimes and vice versa. Mixins are primarily for dataclass models. They are represented by mixin classes that add methods for converting to and from JSON, YAML, TOML, MessagePack or a basic form accepted by other serialization formats. If you have a root dataclass model, then it will be the easiest way to make it serializable. All you have to do is inherit a particular mixin class. In addition to serialization functionality, this library also provides JSON Schema builder that can be used in places where interoperability matters. Installation ------------------------------------------------------------------------------- Use pip to install: ```shell $ pip install mashumaro ``` The current version of `mashumaro` supports Python versions 3.8 — 3.13. It's not recommended to use any version of Python that has reached its [end of life](https://devguide.python.org/versions/) and is no longer receiving security updates or bug fixes from the Python development team. For convenience, there is a table below that outlines the last version of `mashumaro` that can be installed on unmaintained versions of Python. | Python Version | Last Version of mashumaro | Python EOL | |----------------|--------------------------------------------------------------------|------------| | 3.7 | [3.9.1](https://github.com/Fatal1ty/mashumaro/releases/tag/v3.9.1) | 2023-06-27 | | 3.6 | [3.1.1](https://github.com/Fatal1ty/mashumaro/releases/tag/v3.1.1) | 2021-12-23 | Changelog ------------------------------------------------------------------------------- This project follows the principles of [Semantic Versioning](https://semver.org). Changelog is available on [GitHub Releases page](https://github.com/Fatal1ty/mashumaro/releases). Supported data types ------------------------------------------------------------------------------- There is support for generic types from the standard [`typing`](https://docs.python.org/3/library/typing.html) module: * [`List`](https://docs.python.org/3/library/typing.html#typing.List) * [`Tuple`](https://docs.python.org/3/library/typing.html#typing.Tuple) * [`NamedTuple`](https://docs.python.org/3/library/typing.html#typing.NamedTuple) * [`Set`](https://docs.python.org/3/library/typing.html#typing.Set) * [`FrozenSet`](https://docs.python.org/3/library/typing.html#typing.FrozenSet) * [`Deque`](https://docs.python.org/3/library/typing.html#typing.Deque) * [`Dict`](https://docs.python.org/3/library/typing.html#typing.Dict) * [`OrderedDict`](https://docs.python.org/3/library/typing.html#typing.OrderedDict) * [`DefaultDict`](https://docs.python.org/3/library/typing.html#typing.DefaultDict) * [`TypedDict`](https://docs.python.org/3/library/typing.html#typing.TypedDict) * [`Mapping`](https://docs.python.org/3/library/typing.html#typing.Mapping) * [`MutableMapping`](https://docs.python.org/3/library/typing.html#typing.MutableMapping) * [`Counter`](https://docs.python.org/3/library/typing.html#typing.Counter) * [`ChainMap`](https://docs.python.org/3/library/typing.html#typing.ChainMap) * [`Sequence`](https://docs.python.org/3/library/typing.html#typing.Sequence) for standard generic types on [PEP 585](https://www.python.org/dev/peps/pep-0585/) compatible Python (3.9+): * [`list`](https://docs.python.org/3/library/stdtypes.html#list) * [`tuple`](https://docs.python.org/3/library/stdtypes.html#tuple) * [`namedtuple`](https://docs.python.org/3/library/collections.html#collections.namedtuple) * [`set`](https://docs.python.org/3/library/stdtypes.html#set) * [`frozenset`](https://docs.python.org/3/library/stdtypes.html#frozenset) * [`collections.abc.Set`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Set) * [`collections.abc.MutableSet`](https://docs.python.org/3/library/collections.abc.html#collections.abc.MutableSet) * [`collections.deque`](https://docs.python.org/3/library/collections.html#collections.deque) * [`dict`](https://docs.python.org/3/library/stdtypes.html#dict) * [`collections.OrderedDict`](https://docs.python.org/3/library/collections.html#collections.OrderedDict) * [`collections.defaultdict`](https://docs.python.org/3/library/collections.html#collections.defaultdict) * [`collections.abc.Mapping`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping) * [`collections.abc.MutableMapping`](https://docs.python.org/3/library/collections.abc.html#collections.abc.MutableMapping) * [`collections.Counter`](https://docs.python.org/3/library/collections.html#collections.Counter) * [`collections.ChainMap`](https://docs.python.org/3/library/collections.html#collections.ChainMap) * [`collections.abc.Sequence`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence) * [`collections.abc.MutableSequence`](https://docs.python.org/3/library/collections.abc.html#collections.abc.MutableSequence) for special primitives from the [`typing`](https://docs.python.org/3/library/typing.html) module: * [`Any`](https://docs.python.org/3/library/typing.html#typing.Any) * [`Optional`](https://docs.python.org/3/library/typing.html#typing.Optional) * [`Union`](https://docs.python.org/3/library/typing.html#typing.Union) * [`TypeVar`](https://docs.python.org/3/library/typing.html#typing.TypeVar) * [`TypeVarTuple`](https://docs.python.org/3/library/typing.html#typing.TypeVarTuple) * [`NewType`](https://docs.python.org/3/library/typing.html#newtype) * [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) * [`Literal`](https://docs.python.org/3/library/typing.html#typing.Literal) * [`LiteralString`](https://docs.python.org/3/library/typing.html#typing.LiteralString) * [`Final`](https://docs.python.org/3/library/typing.html#typing.Final) * [`Self`](https://docs.python.org/3/library/typing.html#typing.Self) * [`Unpack`](https://docs.python.org/3/library/typing.html#typing.Unpack) for standard interpreter types from [`types`](https://docs.python.org/3/library/types.html#standard-interpreter-types) module: * [`NoneType`](https://docs.python.org/3/library/types.html#types.NoneType) * [`UnionType`](https://docs.python.org/3/library/types.html#types.UnionType) * [`MappingProxyType`](https://docs.python.org/3/library/types.html#types.MappingProxyType) for enumerations based on classes from the standard [`enum`](https://docs.python.org/3/library/enum.html) module: * [`Enum`](https://docs.python.org/3/library/enum.html#enum.Enum) * [`IntEnum`](https://docs.python.org/3/library/enum.html#enum.IntEnum) * [`StrEnum`](https://docs.python.org/3/library/enum.html#enum.StrEnum) * [`Flag`](https://docs.python.org/3/library/enum.html#enum.Flag) * [`IntFlag`](https://docs.python.org/3/library/enum.html#enum.IntFlag) for common built-in types: * [`int`](https://docs.python.org/3/library/functions.html#int) * [`float`](https://docs.python.org/3/library/functions.html#float) * [`bool`](https://docs.python.org/3/library/stdtypes.html#bltin-boolean-values) * [`str`](https://docs.python.org/3/library/stdtypes.html#str) * [`bytes`](https://docs.python.org/3/library/stdtypes.html#bytes) * [`bytearray`](https://docs.python.org/3/library/stdtypes.html#bytearray) for built-in datetime oriented types (see [more](#deserialize-option) details): * [`datetime`](https://docs.python.org/3/library/datetime.html#datetime.datetime) * [`date`](https://docs.python.org/3/library/datetime.html#datetime.date) * [`time`](https://docs.python.org/3/library/datetime.html#datetime.time) * [`timedelta`](https://docs.python.org/3/library/datetime.html#datetime.timedelta) * [`timezone`](https://docs.python.org/3/library/datetime.html#datetime.timezone) * [`ZoneInfo`](https://docs.python.org/3/library/zoneinfo.html#zoneinfo.ZoneInfo) for pathlike types: * [`PurePath`](https://docs.python.org/3/library/pathlib.html#pathlib.PurePath) * [`Path`](https://docs.python.org/3/library/pathlib.html#pathlib.Path) * [`PurePosixPath`](https://docs.python.org/3/library/pathlib.html#pathlib.PurePosixPath) * [`PosixPath`](https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath) * [`PureWindowsPath`](https://docs.python.org/3/library/pathlib.html#pathlib.PureWindowsPath) * [`WindowsPath`](https://docs.python.org/3/library/pathlib.html#pathlib.WindowsPath) * [`os.PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) for other less popular built-in types: * [`uuid.UUID`](https://docs.python.org/3/library/uuid.html#uuid.UUID) * [`decimal.Decimal`](https://docs.python.org/3/library/decimal.html#decimal.Decimal) * [`fractions.Fraction`](https://docs.python.org/3/library/fractions.html#fractions.Fraction) * [`ipaddress.IPv4Address`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Address) * [`ipaddress.IPv6Address`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv6Address) * [`ipaddress.IPv4Network`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Network) * [`ipaddress.IPv6Network`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv6Network) * [`ipaddress.IPv4Interface`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Interface) * [`ipaddress.IPv6Interface`](https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv6Interface) for backported types from [`typing-extensions`](https://github.com/python/typing_extensions): * [`OrderedDict`](https://docs.python.org/3/library/typing.html#typing.OrderedDict) * [`TypedDict`](https://docs.python.org/3/library/typing.html#typing.TypedDict) * [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) * [`Literal`](https://docs.python.org/3/library/typing.html#typing.Literal) * [`LiteralString`](https://docs.python.org/3/library/typing.html#typing.LiteralString) * [`Self`](https://docs.python.org/3/library/typing.html#typing.Self) * [`TypeVarTuple`](https://docs.python.org/3/library/typing.html#typing.TypeVarTuple) * [`Unpack`](https://docs.python.org/3/library/typing.html#typing.Unpack) for arbitrary types: * [user-defined types](#user-defined-types) * [third-party types](#third-party-types) * [user-defined generic types](#user-defined-generic-types) * [third-party generic types](#third-party-generic-types) Usage example ------------------------------------------------------------------------------- Suppose we're developing a financial application and we operate with currencies and stocks: ```python from dataclasses import dataclass from enum import Enum class Currency(Enum): USD = "USD" EUR = "EUR" @dataclass class CurrencyPosition: currency: Currency balance: float @dataclass class StockPosition: ticker: str name: str balance: int ``` Now we want a dataclass for portfolio that will be serialized to and from JSON. We inherit `DataClassJSONMixin` that adds this functionality: ```python from mashumaro.mixins.json import DataClassJSONMixin ... @dataclass class Portfolio(DataClassJSONMixin): currencies: list[CurrencyPosition] stocks: list[StockPosition] ``` Let's create a portfolio instance and check methods `from_json` and `to_json`: ```python portfolio = Portfolio( currencies=[ CurrencyPosition(Currency.USD, 238.67), CurrencyPosition(Currency.EUR, 361.84), ], stocks=[ StockPosition("AAPL", "Apple", 10), StockPosition("AMZN", "Amazon", 10), ] ) portfolio_json = portfolio.to_json() assert Portfolio.from_json(portfolio_json) == portfolio ``` If we need to serialize something different from a root dataclass, we can use codecs. In the following example we create a JSON decoder and encoder for a list of currencies: ```python from mashumaro.codecs.json import JSONDecoder, JSONEncoder ... decoder = JSONDecoder(list[CurrencyPosition]) encoder = JSONEncoder(list[CurrencyPosition]) currencies = [ CurrencyPosition(Currency.USD, 238.67), CurrencyPosition(Currency.EUR, 361.84), ] currencies_json = encoder.encode(currencies) assert decoder.decode(currencies_json) == currencies ``` How does it work? ------------------------------------------------------------------------------- This library works by taking the schema of the data and generating a specific decoder and encoder for exactly that schema, taking into account the specifics of serialization format. This is much faster than inspection of data types on every call of decoding or encoding at runtime. These specific decoders and encoders are generated by [codecs and mixins](#supported-serialization-formats): * When using codecs, these methods are compiled during the creation of the decoder or encoder. * When using serialization mixins, these methods are compiled during import time (or at runtime in some cases) and are set as attributes to your dataclasses. To minimize the import time, you can explicitly enable [lazy compilation](#lazy_compilation-config-option). Benchmark ------------------------------------------------------------------------------- * macOS 14.0 Sonoma * Apple M1 * 16GB RAM * Python 3.12.0 Benchmark using [pyperf](https://github.com/psf/pyperf) with GitHub Issue model. Please note that the following charts use logarithmic scale, as it is convenient for displaying very large ranges of values. > [!NOTE]\ > Benchmark results may vary depending on the specific configuration and > parameters used for serialization and deserialization. However, we have made > an attempt to use the available options that can speed up and smooth out the > differences in how libraries work. To run benchmark in your environment: ```bash git clone git@github.com:Fatal1ty/mashumaro.git cd mashumaro python3 -m venv env && source env/bin/activate pip install -e . pip install -r requirements-dev.txt ./benchmark/run.sh ``` Supported serialization formats ------------------------------------------------------------------------------- This library has built-in support for multiple popular formats: * [JSON](https://www.json.org) * [YAML](https://yaml.org) * [TOML](https://toml.io) * [MessagePack](https://msgpack.org) There are preconfigured codecs and mixin classes. However, you're free to override some settings if necessary. > [!IMPORTANT]\ > As for codecs, you are > offered to choose between convenience and efficiency. When you need to decode > or encode typed data more than once, it's highly recommended to create > and reuse a decoder or encoder specifically for that data type. For one-time > use with default settings it may be convenient to use global functions that > create a disposable decoder or encoder under the hood. Remember that you > should not use these convenient global functions more that once for the same > data type if performance is important to you. ### Basic form Basic form denotes a python object consisting only of basic data types supported by most serialization formats. These types are: [`str`](https://docs.python.org/3/library/stdtypes.html#str), [`int`](https://docs.python.org/3/library/functions.html#int), [`float`](https://docs.python.org/3/library/functions.html#float), [`bool`](https://docs.python.org/3/library/stdtypes.html#bltin-boolean-values), [`list`](https://docs.python.org/3/library/stdtypes.html#list), [`dict`](https://docs.python.org/3/library/stdtypes.html#dict). This is also a starting point you can play with for a comprehensive transformation of your data. Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs import BasicDecoder, BasicEncoder # or from mashumaro.codecs.basic import BasicDecoder, BasicEncoder decoder = BasicDecoder(, ...) decoder.decode(...) encoder = BasicEncoder(, ...) encoder.encode(...) ``` Convenient functions are recommended to be used as follows: ```python import mashumaro.codecs.basic as basic_codec basic_codec.decode(..., ) basic_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro import DataClassDictMixin # or from mashumaro.mixins.dict import DataClassDictMixin @dataclass class MyModel(DataClassDictMixin): ... MyModel.from_dict(...) MyModel(...).to_dict() ``` > [!TIP]\ > You don't need to inherit `DataClassDictMixin` along with other serialization > mixins because it's a base class for them. ### JSON [JSON](https://www.json.org) is a lightweight data-interchange format. You can choose between standard library [json](https://docs.python.org/3/library/json.html) for compatibility and third-party dependency [orjson](https://pypi.org/project/orjson/) for better performance. #### json library Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs.json import JSONDecoder, JSONEncoder decoder = JSONDecoder(, ...) decoder.decode(...) encoder = JSONEncoder(, ...) encoder.encode(...) ``` Convenient functions can be used as follows: ```python from mashumaro.codecs.json import json_decode, json_encode json_decode(..., ) json_encode(..., ) ``` Convenient function aliases are recommended to be used as follows: ```python import mashumaro.codecs.json as json_codec json_codec.decode(...) json_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro.mixins.json import DataClassJSONMixin @dataclass class MyModel(DataClassJSONMixin): ... MyModel.from_json(...) MyModel(...).to_json() ``` #### orjson library In order to use [`orjson`](https://pypi.org/project/orjson/) library, it must be installed manually or using an extra option for `mashumaro`: ```shell pip install mashumaro[orjson] ``` The following data types will be handled by [`orjson`](https://pypi.org/project/orjson/) library by default: * [`datetime`](https://docs.python.org/3/library/datetime.html#datetime.datetime) * [`date`](https://docs.python.org/3/library/datetime.html#datetime.date) * [`time`](https://docs.python.org/3/library/datetime.html#datetime.time) * [`uuid.UUID`](https://docs.python.org/3/library/uuid.html#uuid.UUID) Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs.orjson import ORJSONDecoder, ORJSONEncoder decoder = ORJSONDecoder(, ...) decoder.decode(...) encoder = ORJSONEncoder(, ...) encoder.encode(...) ``` Convenient functions can be used as follows: ```python from mashumaro.codecs.orjson import json_decode, json_encode json_decode(..., ) json_encode(..., ) ``` Convenient function aliases are recommended to be used as follows: ```python import mashumaro.codecs.orjson as json_codec json_codec.decode(...) json_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro.mixins.orjson import DataClassORJSONMixin @dataclass class MyModel(DataClassORJSONMixin): ... MyModel.from_json(...) MyModel(...).to_json() MyModel(...).to_jsonb() ``` ### YAML [YAML](https://yaml.org) is a human-friendly data serialization language for all programming languages. In order to use this format, the [`pyyaml`](https://pypi.org/project/PyYAML/) package must be installed. You can install it manually or using an extra option for `mashumaro`: ```shell pip install mashumaro[yaml] ``` Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs.yaml import YAMLDecoder, YAMLEncoder decoder = YAMLDecoder(, ...) decoder.decode(...) encoder = YAMLEncoder(, ...) encoder.encode(...) ``` Convenient functions can be used as follows: ```python from mashumaro.codecs.yaml import yaml_decode, yaml_encode yaml_decode(..., ) yaml_encode(..., ) ``` Convenient function aliases are recommended to be used as follows: ```python import mashumaro.codecs.yaml as yaml_codec yaml_codec.decode(...) yaml_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro.mixins.yaml import DataClassYAMLMixin @dataclass class MyModel(DataClassYAMLMixin): ... MyModel.from_yaml(...) MyModel(...).to_yaml() ``` ### TOML [TOML](https://toml.io) is config file format for humans. In order to use this format, the [`tomli`](https://pypi.org/project/tomli/) and [`tomli-w`](https://pypi.org/project/tomli-w/) packages must be installed. In Python 3.11+, `tomli` is included as [`tomlib`](https://docs.python.org/3/library/tomllib.html) standard library module and is used for this format. You can install the missing packages manually or using an extra option for `mashumaro`: ```shell pip install mashumaro[toml] ``` The following data types will be handled by [`tomli`](https://pypi.org/project/tomli/)/ [`tomli-w`](https://pypi.org/project/tomli-w/) library by default: * [`datetime`](https://docs.python.org/3/library/datetime.html#datetime.datetime) * [`date`](https://docs.python.org/3/library/datetime.html#datetime.date) * [`time`](https://docs.python.org/3/library/datetime.html#datetime.time) Fields with value `None` will be omitted on serialization because TOML doesn't support null values. Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs.toml import TOMLDecoder, TOMLEncoder decoder = TOMLDecoder(, ...) decoder.decode(...) encoder = TOMLEncoder(, ...) encoder.encode(...) ``` Convenient functions can be used as follows: ```python from mashumaro.codecs.toml import toml_decode, toml_encode toml_decode(..., ) toml_encode(..., ) ``` Convenient function aliases are recommended to be used as follows: ```python import mashumaro.codecs.toml as toml_codec toml_codec.decode(...) toml_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro.mixins.toml import DataClassTOMLMixin @dataclass class MyModel(DataClassTOMLMixin): ... MyModel.from_toml(...) MyModel(...).to_toml() ``` ### MessagePack [MessagePack](https://msgpack.org) is an efficient binary serialization format. In order to use this mixin, the [`msgpack`](https://pypi.org/project/msgpack/) package must be installed. You can install it manually or using an extra option for `mashumaro`: ```shell pip install mashumaro[msgpack] ``` The following data types will be handled by [`msgpack`](https://pypi.org/project/msgpack/) library by default: * [`bytes`](https://docs.python.org/3/library/stdtypes.html#bytes) * [`bytearray`](https://docs.python.org/3/library/stdtypes.html#bytearray) Efficient decoder and encoder can be used as follows: ```python from mashumaro.codecs.msgpack import MessagePackDecoder, MessagePackEncoder decoder = MessagePackDecoder(, ...) decoder.decode(...) encoder = MessagePackEncoder(, ...) encoder.encode(...) ``` Convenient functions can be used as follows: ```python from mashumaro.codecs.msgpack import msgpack_decode, msgpack_encode msgpack_decode(..., ) msgpack_encode(..., ) ``` Convenient function aliases are recommended to be used as follows: ```python import mashumaro.codecs.msgpack as msgpack_codec msgpack_codec.decode(...) msgpack_codec.encode(..., ) ``` Mixin can be used as follows: ```python from mashumaro.mixins.msgpack import DataClassMessagePackMixin @dataclass class MyModel(DataClassMessagePackMixin): ... MyModel.from_msgpack(...) MyModel(...).to_msgpack() ``` Customization ------------------------------------------------------------------------------- Customization options of `mashumaro` are extensive and will most likely cover your needs. When it comes to non-standard data types and non-standard serialization support, you can do the following: * Turn an existing regular or generic class into a serializable one by inheriting the [`SerializableType`](#serializabletype-interface) class * Write different serialization strategies for an existing regular or generic type that is not under your control using [`SerializationStrategy`](#serializationstrategy) class * Define serialization / deserialization methods: * for a specific dataclass field by using [field options](#field-options) * for a specific data type used in the dataclass by using [`Config`](#config-options) class * Alter input and output data with serialization / deserialization [hooks](#serialization-hooks) * Separate serialization scheme from a dataclass in a reusable manner using [dialects](#dialects) * Choose from predefined serialization engines for the specific data types, e.g. `datetime` and `NamedTuple` ### SerializableType interface If you have a custom class or hierarchy of classes whose instances you want to serialize with `mashumaro`, the first option is to implement `SerializableType` interface. #### User-defined types Let's look at this not very practicable example: ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.types import SerializableType class Airport(SerializableType): def __init__(self, code, city): self.code, self.city = code, city def _serialize(self): return [self.code, self.city] @classmethod def _deserialize(cls, value): return cls(*value) def __eq__(self, other): return self.code, self.city == other.code, other.city @dataclass class Flight(DataClassDictMixin): origin: Airport destination: Airport JFK = Airport("JFK", "New York City") LAX = Airport("LAX", "Los Angeles") input_data = { "origin": ["JFK", "New York City"], "destination": ["LAX", "Los Angeles"] } my_flight = Flight.from_dict(input_data) assert my_flight == Flight(JFK, LAX) assert my_flight.to_dict() == input_data ``` You can see how `Airport` instances are seamlessly created from lists of two strings and serialized into them. By default `_deserialize` method will get raw input data without any transformations before. This should be enough in many cases, especially when you need to perform non-standard transformations yourself, but let's extend our example: ```python class Itinerary(SerializableType): def __init__(self, flights): self.flights = flights def _serialize(self): return self.flights @classmethod def _deserialize(cls, flights): return cls(flights) @dataclass class TravelPlan(DataClassDictMixin): budget: float itinerary: Itinerary input_data = { "budget": 10_000, "itinerary": [ { "origin": ["JFK", "New York City"], "destination": ["LAX", "Los Angeles"] }, { "origin": ["LAX", "Los Angeles"], "destination": ["SFO", "San Fransisco"] } ] } ``` If we pass the flight list as is into `Itinerary._deserialize`, our itinerary will have something that we may not expect — `list[dict]` instead of `list[Flight]`. The solution is quite simple. Instead of calling `Flight._deserialize` yourself, just use annotations: ```python class Itinerary(SerializableType, use_annotations=True): def __init__(self, flights): self.flights = flights def _serialize(self) -> list[Flight]: return self.flights @classmethod def _deserialize(cls, flights: list[Flight]): return cls(flights) my_plan = TravelPlan.from_dict(input_data) assert isinstance(my_plan.itinerary.flights[0], Flight) assert isinstance(my_plan.itinerary.flights[1], Flight) assert my_plan.to_dict() == input_data ``` Here we add annotations to the only argument of `_deserialize` method and to the return value of `_serialize` method as well. The latter is needed for correct serialization. > [!IMPORTANT]\ > The importance of explicit passing `use_annotations=True` when defining a > class is that otherwise implicit using annotations might break compatibility > with old code that wasn't aware of this feature. It will be enabled by > default in the future major release. #### User-defined generic types The great thing to note about using annotations in `SerializableType` is that they work seamlessly with [generic](https://docs.python.org/3/library/typing.html#user-defined-generic-types) and [variadic generic](https://peps.python.org/pep-0646/) types. Let's see how this can be useful: ```python from datetime import date from typing import TypeVar from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.types import SerializableType KT = TypeVar("KT") VT = TypeVar("VT") class DictWrapper(dict[KT, VT], SerializableType, use_annotations=True): def _serialize(self) -> dict[KT, VT]: return dict(self) @classmethod def _deserialize(cls, value: dict[KT, VT]) -> 'DictWrapper[KT, VT]': return cls(value) @dataclass class DataClass(DataClassDictMixin): x: DictWrapper[date, str] y: DictWrapper[str, date] input_data = { "x": {"2022-12-07": "2022-12-07"}, "y": {"2022-12-07": "2022-12-07"} } obj = DataClass.from_dict(input_data) assert obj == DataClass( x=DictWrapper({date(2022, 12, 7): "2022-12-07"}), y=DictWrapper({"2022-12-07": date(2022, 12, 7)}) ) assert obj.to_dict() == input_data ``` You can see that formatted date is deserialized to `date` object before passing to `DictWrapper._deserialize` in a key or value according to the generic parameters. If you have generic dataclass types, you can use `SerializableType` for them as well, but it's not necessary since they're [supported](#generic-dataclasses) out of the box. ### SerializationStrategy If you want to add support for a custom third-party type that is not under your control, you can write serialization and deserialization logic inside `SerializationStrategy` class, which will be reusable and so well suited in case that third-party type is widely used. `SerializationStrategy` is also good if you want to create strategies that are slightly different from each other, because you can add the strategy differentiator in the `__init__` method. #### Third-party types To demonstrate how `SerializationStrategy` works let's write a simple strategy for datetime serialization in different formats. In this example we will use the same strategy class for two dataclass fields, but a string representing the date and time will be different. ```python from dataclasses import dataclass, field from datetime import datetime from mashumaro import DataClassDictMixin, field_options from mashumaro.types import SerializationStrategy class FormattedDateTime(SerializationStrategy): def __init__(self, fmt): self.fmt = fmt def serialize(self, value: datetime) -> str: return value.strftime(self.fmt) def deserialize(self, value: str) -> datetime: return datetime.strptime(value, self.fmt) @dataclass class DateTimeFormats(DataClassDictMixin): short: datetime = field( metadata=field_options( serialization_strategy=FormattedDateTime("%d%m%Y%H%M%S") ) ) verbose: datetime = field( metadata=field_options( serialization_strategy=FormattedDateTime("%A %B %d, %Y, %H:%M:%S") ) ) formats = DateTimeFormats( short=datetime(2019, 1, 1, 12), verbose=datetime(2019, 1, 1, 12), ) dictionary = formats.to_dict() # {'short': '01012019120000', 'verbose': 'Tuesday January 01, 2019, 12:00:00'} assert DateTimeFormats.from_dict(dictionary) == formats ``` Similarly to `SerializableType`, `SerializationStrategy` could also take advantage of annotations: ```python from dataclasses import dataclass from datetime import datetime from mashumaro import DataClassDictMixin from mashumaro.types import SerializationStrategy class TsSerializationStrategy(SerializationStrategy, use_annotations=True): def serialize(self, value: datetime) -> float: return value.timestamp() def deserialize(self, value: float) -> datetime: # value will be converted to float before being passed to this method return datetime.fromtimestamp(value) @dataclass class Example(DataClassDictMixin): dt: datetime class Config: serialization_strategy = { datetime: TsSerializationStrategy(), } example = Example.from_dict({"dt": "1672531200"}) print(example) # Example(dt=datetime.datetime(2023, 1, 1, 3, 0)) print(example.to_dict()) # {'dt': 1672531200.0} ``` Here the passed string value `"1672531200"` will be converted to `float` before being passed to `deserialize` method thanks to the `float` annotation. > [!IMPORTANT]\ > As well as for `SerializableType`, the value of `use_annotatons` will be > `True` by default in the future major release. #### Third-party generic types To create a generic version of a serialization strategy you need to follow these steps: * inherit [`Generic[...]`](https://docs.python.org/3/library/typing.html#typing.Generic) type with the number of parameters matching the number of parameters of the target generic type * Write generic annotations for `serialize` method's return type and for `deserialize` method's argument type * Use the origin type of the target generic type in the [`serialization_strategy`](#serialization_strategy-config-option) config section ([`typing.get_origin`](https://docs.python.org/3/library/typing.html#typing.get_origin) might be helpful) There is no need to add `use_annotations=True` here because it's enabled implicitly for generic serialization strategies. For example, there is a third-party [multidict](https://pypi.org/project/multidict/) package that has a generic `MultiDict` type. A generic serialization strategy for it might look like this: ```python from dataclasses import dataclass from datetime import date from pprint import pprint from typing import Generic, List, Tuple, TypeVar from mashumaro import DataClassDictMixin from mashumaro.types import SerializationStrategy from multidict import MultiDict T = TypeVar("T") class MultiDictSerializationStrategy(SerializationStrategy, Generic[T]): def serialize(self, value: MultiDict[T]) -> List[Tuple[str, T]]: return [(k, v) for k, v in value.items()] def deserialize(self, value: List[Tuple[str, T]]) -> MultiDict[T]: return MultiDict(value) @dataclass class Example(DataClassDictMixin): floats: MultiDict[float] date_lists: MultiDict[List[date]] class Config: serialization_strategy = { MultiDict: MultiDictSerializationStrategy() } example = Example( floats=MultiDict([("x", 1.1), ("x", 2.2)]), date_lists=MultiDict( [("x", [date(2023, 1, 1), date(2023, 1, 2)]), ("x", [date(2023, 2, 1), date(2023, 2, 2)])] ), ) pprint(example.to_dict()) # {'date_lists': [['x', ['2023-01-01', '2023-01-02']], # ['x', ['2023-02-01', '2023-02-02']]], # 'floats': [['x', 1.1], ['x', 2.2]]} assert Example.from_dict(example.to_dict()) == example ``` ### Field options In some cases creating a new class just for one little thing could be excessive. Moreover, you may need to deal with third party classes that you are not allowed to change. You can use [`dataclasses.field`](https://docs.python.org/3/library/dataclasses.html#dataclasses.field) function to configure some serialization aspects through its `metadata` parameter. Next section describes all supported options to use in `metadata` mapping. If you don't want to remember the names of the options you can use `field_options` helper function: ```python from dataclasses import dataclass, field from mashumaro import field_options @dataclass class A: x: int = field(metadata=field_options(...)) ``` #### `serialize` option This option allows you to change the serialization method. When using this option, the serialization behaviour depends on what type of value the option has. It could be either `Callable[[Any], Any]` or `str`. A value of type `Callable[[Any], Any]` is a generic way to specify any callable object like a function, a class method, a class instance method, an instance of a callable class or even a lambda function to be called for serialization. A value of type `str` sets a specific engine for serialization. Keep in mind that all possible engines depend on the data type that this option is used with. At this moment there are next serialization engines to choose from: | Applicable data types | Supported engines | Description | |:---------------------------|:---------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `NamedTuple`, `namedtuple` | `as_list`, `as_dict` | How to pack named tuples. By default `as_list` engine is used that means your named tuple class instance will be packed into a list of its values. You can pack it into a dictionary using `as_dict` engine. | | `Any` | `omit` | Skip the field during serialization | > [!TIP]\ > You can pass a field value as is without changes on serialization using [`pass_through`](#passing-field-values-as-is). Example: ```python from datetime import datetime from dataclasses import dataclass, field from typing import NamedTuple from mashumaro import DataClassDictMixin class MyNamedTuple(NamedTuple): x: int y: float @dataclass class A(DataClassDictMixin): dt: datetime = field( metadata={ "serialize": lambda v: v.strftime('%Y-%m-%d %H:%M:%S') } ) t: MyNamedTuple = field(metadata={"serialize": "as_dict"}) ``` #### `deserialize` option This option allows you to change the deserialization method. When using this option, the deserialization behaviour depends on what type of value the option has. It could be either `Callable[[Any], Any]` or `str`. A value of type `Callable[[Any], Any]` is a generic way to specify any callable object like a function, a class method, a class instance method, an instance of a callable class or even a lambda function to be called for deserialization. A value of type `str` sets a specific engine for deserialization. Keep in mind that all possible engines depend on the data type that this option is used with. At this moment there are next deserialization engines to choose from: | Applicable data types | Supported engines | Description | |:---------------------------|:------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `datetime`, `date`, `time` | [`ciso8601`](https://github.com/closeio/ciso8601#supported-subset-of-iso-8601), [`pendulum`](https://github.com/sdispater/pendulum) | How to parse datetime string. By default native [`fromisoformat`](https://docs.python.org/3/library/datetime.html#datetime.datetime.fromisoformat) of corresponding class will be used for `datetime`, `date` and `time` fields. It's the fastest way in most cases, but you can choose an alternative. | | `NamedTuple`, `namedtuple` | `as_list`, `as_dict` | How to unpack named tuples. By default `as_list` engine is used that means your named tuple class instance will be created from a list of its values. You can unpack it from a dictionary using `as_dict` engine. | > [!TIP]\ > You can pass a field value as is without changes on deserialization using [`pass_through`](#passing-field-values-as-is). Example: ```python from datetime import datetime from dataclasses import dataclass, field from typing import List, NamedTuple from mashumaro import DataClassDictMixin import ciso8601 import dateutil class MyNamedTuple(NamedTuple): x: int y: float @dataclass class A(DataClassDictMixin): x: datetime = field( metadata={"deserialize": "pendulum"} ) class B(DataClassDictMixin): x: datetime = field( metadata={"deserialize": ciso8601.parse_datetime_as_naive} ) @dataclass class C(DataClassDictMixin): dt: List[datetime] = field( metadata={ "deserialize": lambda l: list(map(dateutil.parser.isoparse, l)) } ) @dataclass class D(DataClassDictMixin): x: MyNamedTuple = field(metadata={"deserialize": "as_dict"}) ``` #### `serialization_strategy` option This option is useful when you want to change the serialization logic for a dataclass field depending on some defined parameters using a reusable serialization scheme. You can find an example in the [`SerializationStrategy`](#serializationstrategy) chapter. > [!TIP]\ > You can pass a field value as is without changes on > serialization / deserialization using [`pass_through`](#passing-field-values-as-is). #### `alias` option This option can be used to assign [field aliases](#field-aliases): ```python from dataclasses import dataclass, field from mashumaro import DataClassDictMixin, field_options @dataclass class DataClass(DataClassDictMixin): a: int = field(metadata=field_options(alias="FieldA")) b: int = field(metadata=field_options(alias="#invalid")) x = DataClass.from_dict({"FieldA": 1, "#invalid": 2}) # DataClass(a=1, b=2) ``` ### Config options If inheritance is not an empty word for you, you'll fall in love with the `Config` class. You can register `serialize` and `deserialize` methods, define code generation options and other things just in one place. Or in some classes in different ways if you need flexibility. Inheritance is always on the first place. There is a base class `BaseConfig` that you can inherit for the sake of convenience, but it's not mandatory. In the following example you can see how the `debug` flag is changed from class to class: `ModelA` will have debug mode enabled but `ModelB` will not. ```python from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig class BaseModel(DataClassDictMixin): class Config(BaseConfig): debug = True class ModelA(BaseModel): a: int class ModelB(BaseModel): b: int class Config(BaseConfig): debug = False ``` Next section describes all supported options to use in the config. #### `debug` config option If you enable the `debug` option the generated code for your data class will be printed. #### `code_generation_options` config option Some users may need functionality that wouldn't exist without extra cost such as valuable cpu time to execute additional instructions. Since not everyone needs such instructions, they can be enabled by a constant in the list, so the fastest basic behavior of the library will always remain by default. The following table provides a brief overview of all the available constants described below. | Constant | Description | |:----------------------------------------------------------------|:---------------------------------------------------------------------| | [`TO_DICT_ADD_OMIT_NONE_FLAG`](#add-omit_none-keyword-argument) | Adds `omit_none` keyword-only argument to `to_*` methods. | | [`TO_DICT_ADD_BY_ALIAS_FLAG`](#add-by_alias-keyword-argument) | Adds `by_alias` keyword-only argument to `to_*` methods. | | [`ADD_DIALECT_SUPPORT`](#add-dialect-keyword-argument) | Adds `dialect` keyword-only argument to `from_*` and `to_*` methods. | | [`ADD_SERIALIZATION_CONTEXT`](#add-context-keyword-argument) | Adds `context` keyword-only argument to `to_*` methods. | #### `serialization_strategy` config option You can register custom [`SerializationStrategy`](#serializationstrategy), `serialize` and `deserialize` methods for specific types just in one place. It could be configured using a dictionary with types as keys. The value could be either a [`SerializationStrategy`](#serializationstrategy) instance or a dictionary with `serialize` and `deserialize` values with the same meaning as in the [field options](#field-options). ```python from dataclasses import dataclass from datetime import datetime, date from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig from mashumaro.types import SerializationStrategy class FormattedDateTime(SerializationStrategy): def __init__(self, fmt): self.fmt = fmt def serialize(self, value: datetime) -> str: return value.strftime(self.fmt) def deserialize(self, value: str) -> datetime: return datetime.strptime(value, self.fmt) @dataclass class DataClass(DataClassDictMixin): x: datetime y: date class Config(BaseConfig): serialization_strategy = { datetime: FormattedDateTime("%Y"), date: { # you can use specific str values for datetime here as well "deserialize": "pendulum", "serialize": date.isoformat, }, } instance = DataClass.from_dict({"x": "2021", "y": "2021"}) # DataClass(x=datetime.datetime(2021, 1, 1, 0, 0), y=Date(2021, 1, 1)) dictionary = instance.to_dict() # {'x': '2021', 'y': '2021-01-01'} ``` Note that you can register different methods for multiple logical types which are based on the same type using `NewType` and `Annotated`, see [Extending existing types](#extending-existing-types) for details. It's also possible to define a generic (de)serialization method for a generic type by registering a method for its [origin](https://docs.python.org/3/library/typing.html#typing.get_origin) type. Although this technique is widely used when working with [third-party generic types](#third-party-generic-types) using generic strategies, it can also be applied in simple scenarios: ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin @dataclass class C(DataClassDictMixin): ints: list[int] floats: list[float] class Config: serialization_strategy = { list: { # origin type for list[int] and list[float] is list "serialize": lambda x: list(map(str, x)), } } assert C([1], [2.2]).to_dict() == {'ints': ['1'], 'floats': ['2.2']} ``` #### `aliases` config option Sometimes it's better to write the [field aliases](#field-aliases) in one place. You can mix aliases here with [aliases in the field options](#alias-option), but the last ones will always take precedence. ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig @dataclass class DataClass(DataClassDictMixin): a: int b: int class Config(BaseConfig): aliases = { "a": "FieldA", "b": "FieldB", } DataClass.from_dict({"FieldA": 1, "FieldB": 2}) # DataClass(a=1, b=2) ``` #### `serialize_by_alias` config option All the fields with [aliases](#field-aliases) will be serialized by them by default when this option is enabled. You can mix this config option with [`by_alias`](#add-by_alias-keyword-argument) keyword argument. ```python from dataclasses import dataclass, field from mashumaro import DataClassDictMixin, field_options from mashumaro.config import BaseConfig @dataclass class DataClass(DataClassDictMixin): field_a: int = field(metadata=field_options(alias="FieldA")) class Config(BaseConfig): serialize_by_alias = True DataClass(field_a=1).to_dict() # {'FieldA': 1} ``` #### `allow_deserialization_not_by_alias` config option When using aliases, the deserializer defaults to requiring the keys to match what is defined as the alias. If the flexibility to deserialize aliased and unaliased keys is required then the config option `allow_deserialization_not_by_alias ` can be set to enable the feature. ```python from dataclasses import dataclass, field from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig @dataclass class AliasedDataClass(DataClassDictMixin): foo: int = field(metadata={"alias": "alias_foo"}) bar: int = field(metadata={"alias": "alias_bar"}) class Config(BaseConfig): allow_deserialization_not_by_alias = True alias_dict = {"alias_foo": 1, "alias_bar": 2} t1 = AliasedDataClass.from_dict(alias_dict) no_alias_dict = {"foo": 1, "bar": 2} # This would raise `mashumaro.exceptions.MissingField` # if allow_deserialization_not_by_alias was False t2 = AliasedDataClass.from_dict(no_alias_dict) assert t1 == t2 ``` #### `omit_none` config option All the fields with `None` values will be skipped during serialization by default when this option is enabled. You can mix this config option with [`omit_none`](#add-omit_none-keyword-argument) keyword argument. ```python from dataclasses import dataclass from typing import Optional from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig @dataclass class DataClass(DataClassDictMixin): x: Optional[int] = 42 class Config(BaseConfig): omit_none = True DataClass(x=None).to_dict() # {} ``` #### `omit_default` config option When this option enabled, all the fields that have values equal to the defaults or the default_factory results will be skipped during serialization. ```python from dataclasses import dataclass, field from typing import List, Optional, Tuple from mashumaro import DataClassDictMixin, field_options from mashumaro.config import BaseConfig @dataclass class Foo: foo: str @dataclass class DataClass(DataClassDictMixin): a: int = 42 b: Tuple[int, ...] = field(default=(1, 2, 3)) c: List[Foo] = field(default_factory=lambda: [Foo("foo")]) d: Optional[str] = None class Config(BaseConfig): omit_default = True DataClass(a=42, b=(1, 2, 3), c=[Foo("foo")]).to_dict() # {} ``` #### `namedtuple_as_dict` config option Dataclasses are a great way to declare and use data models. But it's not the only way. Python has a typed version of [namedtuple](https://docs.python.org/3/library/collections.html#collections.namedtuple) called [NamedTuple](https://docs.python.org/3/library/typing.html#typing.NamedTuple) which looks similar to dataclasses: ```python from typing import NamedTuple class Point(NamedTuple): x: int y: int ``` the same with a dataclass will look like this: ```python from dataclasses import dataclass @dataclass class Point: x: int y: int ``` At first glance, you can use both options. But imagine that you need to create a bunch of instances of the `Point` class. Due to how dataclasses work you will have more memory consumption compared to named tuples. In such a case it could be more appropriate to use named tuples. By default, all named tuples are packed into lists. But with `namedtuple_as_dict` option you have a drop-in replacement for dataclasses: ```python from dataclasses import dataclass from typing import List, NamedTuple from mashumaro import DataClassDictMixin class Point(NamedTuple): x: int y: int @dataclass class DataClass(DataClassDictMixin): points: List[Point] class Config: namedtuple_as_dict = True obj = DataClass.from_dict({"points": [{"x": 0, "y": 0}, {"x": 1, "y": 1}]}) print(obj.to_dict()) # {"points": [{"x": 0, "y": 0}, {"x": 1, "y": 1}]} ``` If you want to serialize only certain named tuple fields as dictionaries, you can use the corresponding [serialization](#serialize-option) and [deserialization](#deserialize-option) engines. #### `allow_postponed_evaluation` config option [PEP 563](https://www.python.org/dev/peps/pep-0563/) solved the problem of forward references by postponing the evaluation of annotations, so you can write the following code: ```python from __future__ import annotations from dataclasses import dataclass from mashumaro import DataClassDictMixin @dataclass class A(DataClassDictMixin): x: B @dataclass class B(DataClassDictMixin): y: int obj = A.from_dict({'x': {'y': 1}}) ``` You don't need to write anything special here, forward references work out of the box. If a field of a dataclass has a forward reference in the type annotations, building of `from_*` and `to_*` methods of this dataclass will be postponed until they are called once. However, if for some reason you don't want the evaluation to be possibly postponed, you can disable it using `allow_postponed_evaluation` option: ```python from __future__ import annotations from dataclasses import dataclass from mashumaro import DataClassDictMixin @dataclass class A(DataClassDictMixin): x: B class Config: allow_postponed_evaluation = False # UnresolvedTypeReferenceError: Class A has unresolved type reference B # in some of its fields @dataclass class B(DataClassDictMixin): y: int ``` In this case you will get `UnresolvedTypeReferenceError` regardless of whether class B is declared below or not. #### `dialect` config option This option is described [below](#changing-the-default-dialect) in the Dialects section. #### `orjson_options` config option This option changes default options for `orjson.dumps` encoder which is used in [`DataClassORJSONMixin`](#dataclassorjsonmixin). For example, you can tell orjson to handle non-`str` `dict` keys as the built-in `json.dumps` encoder does. See [orjson documentation](https://github.com/ijl/orjson#option) to read more about these options. ```python import orjson from dataclasses import dataclass from typing import Dict from mashumaro.config import BaseConfig from mashumaro.mixins.orjson import DataClassORJSONMixin @dataclass class MyClass(DataClassORJSONMixin): x: Dict[int, int] class Config(BaseConfig): orjson_options = orjson.OPT_NON_STR_KEYS assert MyClass({1: 2}).to_json() == {"1": 2} ``` #### `discriminator` config option This option is described in the [Class level discriminator](#class-level-discriminator) section. #### `lazy_compilation` config option By using this option, the compilation of the `from_*` and `to_*` methods will be deferred until they are called first time. This will reduce the import time and, in certain instances, may enhance the speed of deserialization by leveraging the data that is accessible after the class has been created. > [!CAUTION]\ > If you need to save a reference to `from_*` or `to_*` method, you should > do it after the method is compiled. To be safe, you can always use lambda > function: > ```python > from_dict = lambda x: MyModel.from_dict(x) > to_dict = lambda x: x.to_dict() > ``` #### `sort_keys` config option When set, the keys on serialized dataclasses will be sorted in alphabetical order. Unlike the `sort_keys` option in the standard library's `json.dumps` function, this option acts at class creation time and has no effect on the performance of serialization. ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig @dataclass class SortedDataClass(DataClassDictMixin): foo: int bar: int class Config(BaseConfig): sort_keys = True t = SortedDataClass(1, 2) assert t.to_dict() == {"bar": 2, "foo": 1} ``` #### `forbid_extra_keys` config option When set, the deserialization of dataclasses will fail if the input dictionary contains keys that are not present in the dataclass. ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig @dataclass class DataClass(DataClassDictMixin): a: int class Config(BaseConfig): forbid_extra_keys = True DataClass.from_dict({"a": 1, "b": 2}) # ExtraKeysError: Extra keys: {'b'} ``` It plays well with `aliases` and `allow_deserialization_not_by_alias` options. ### Passing field values as is In some cases it's needed to pass a field value as is without any changes during serialization / deserialization. There is a predefined [`pass_through`](https://github.com/Fatal1ty/mashumaro/blob/master/mashumaro/helper.py#L58) object that can be used as `serialization_strategy` or `serialize` / `deserialize` options: ```python from dataclasses import dataclass, field from mashumaro import DataClassDictMixin, pass_through class MyClass: def __init__(self, some_value): self.some_value = some_value @dataclass class A1(DataClassDictMixin): x: MyClass = field( metadata={ "serialize": pass_through, "deserialize": pass_through, } ) @dataclass class A2(DataClassDictMixin): x: MyClass = field( metadata={ "serialization_strategy": pass_through, } ) @dataclass class A3(DataClassDictMixin): x: MyClass class Config: serialization_strategy = { MyClass: pass_through, } @dataclass class A4(DataClassDictMixin): x: MyClass class Config: serialization_strategy = { MyClass: { "serialize": pass_through, "deserialize": pass_through, } } my_class_instance = MyClass(42) assert A1.from_dict({'x': my_class_instance}).x == my_class_instance assert A2.from_dict({'x': my_class_instance}).x == my_class_instance assert A3.from_dict({'x': my_class_instance}).x == my_class_instance assert A4.from_dict({'x': my_class_instance}).x == my_class_instance a1_dict = A1(my_class_instance).to_dict() a2_dict = A2(my_class_instance).to_dict() a3_dict = A3(my_class_instance).to_dict() a4_dict = A4(my_class_instance).to_dict() assert a1_dict == a2_dict == a3_dict == a4_dict == {"x": my_class_instance} ``` ### Extending existing types There are situations where you might want some values of the same type to be treated as their own type. You can create new logical types with [`NewType`](https://docs.python.org/3/library/typing.html#newtype), [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) or [`TypeAliasType`](https://docs.python.org/3/library/typing.html#typing.TypeAliasType) and register serialization strategies for them: ```python from typing import Mapping, NewType, Annotated from dataclasses import dataclass from mashumaro import DataClassDictMixin SessionID = NewType("SessionID", str) AccountID = Annotated[str, "AccountID"] type DeviceID = str @dataclass class Context(DataClassDictMixin): account_sessions: Mapping[AccountID, SessionID] account_devices: list[DeviceID] class Config: serialization_strategy = { AccountID: { "deserialize": lambda x: ..., "serialize": lambda x: ..., }, SessionID: { "deserialize": lambda x: ..., "serialize": lambda x: ..., }, DeviceID: { "deserialize": lambda x: ..., "serialize": lambda x: ..., } } ``` Although using `NewType` is usually the most reliable way to avoid logical errors, you have to pay for it with notable overhead. If you are creating dataclass instances manually, then you know that type checkers will enforce you to enclose a value in your `"NewType"` callable, which leads to performance degradation: ```python python -m timeit -s "from typing import NewType; MyInt = NewType('MyInt', int)" "MyInt(42)" 10000000 loops, best of 5: 31.1 nsec per loop python -m timeit -s "from typing import NewType; MyInt = NewType('MyInt', int)" "42" 50000000 loops, best of 5: 4.35 nsec per loop ``` However, when you create dataclass instances using the `from_*` method provided by one of the mixins or using one of the decoders, there will be no performance degradation, because the value won't be enclosed in the callable in the generated code. Therefore, if performance is more important to you than catching logical errors by type checkers, and you are actively creating or changing dataclasses manually, then you should take a closer look at using `Annotated`. ### Field aliases In some cases it's better to have different names for a field in your dataclass and in its serialized view. For example, a third-party legacy API you are working with might operate with camel case style, but you stick to snake case style in your code base. Or you want to load data with keys that are invalid identifiers in Python. Aliases can solve this problem. There are multiple ways to assign an alias: * Using `Alias(...)` annotation in a field type * Using `alias` parameter in field metadata * Using `aliases` parameter in a dataclass config By default, aliases only affect deserialization, but it can be extended to serialization as well. If you want to serialize all the fields by aliases you have two options to do so: * [`serialize_by_alias` config option](#serialize_by_alias-config-option) * [`serialize_by_alias` dialect option](#serialize_by_alias-dialect-option) * [`by_alias` keyword argument in `to_*` methods](#add-by_alias-keyword-argument) Here is an example with `Alias` annotation in a field type: ```python from dataclasses import dataclass from typing import Annotated from mashumaro import DataClassDictMixin from mashumaro.types import Alias @dataclass class DataClass(DataClassDictMixin): foo_bar: Annotated[int, Alias("fooBar")] obj = DataClass.from_dict({"fooBar": 42}) # DataClass(foo_bar=42) obj.to_dict() # {"foo_bar": 42} # no aliases on serialization by default ``` The same with field metadata: ```python from dataclasses import dataclass, field from mashumaro import field_options @dataclass class DataClass: foo_bar: str = field(metadata=field_options(alias="fooBar")) ``` And with a dataclass config: ```python from dataclasses import dataclass from mashumaro.config import BaseConfig @dataclass class DataClass: foo_bar: str class Config(BaseConfig): aliases = {"foo_bar": "fooBar"} ``` > [!TIP]\ > If you want to deserialize all the fields by its names along with aliases, > there is [a config option](#allow_deserialization_not_by_alias-config-option) > for that. ### Dialects Sometimes it's needed to have different serialization and deserialization methods depending on the data source where entities of the dataclass are stored or on the API to which the entities are being sent or received from. There is a special `Dialect` type that may contain all the differences from the default serialization and deserialization methods. You can create different dialects and use each of them for the same dataclass depending on the situation. Suppose we have the following dataclass with a field of type `date`: ```python @dataclass class Entity(DataClassDictMixin): dt: date ``` By default, a field of `date` type serializes to a string in ISO 8601 format, so the serialized entity will look like `{'dt': '2021-12-31'}`. But what if we have, for example, two sensitive legacy Ethiopian and Japanese APIs that use two different formats for dates — `dd/mm/yyyy` and `yyyy年mm月dd日`? Instead of creating two similar dataclasses we can have one dataclass and two dialects: ```python from dataclasses import dataclass from datetime import date, datetime from mashumaro import DataClassDictMixin from mashumaro.config import ADD_DIALECT_SUPPORT from mashumaro.dialect import Dialect from mashumaro.types import SerializationStrategy class DateTimeSerializationStrategy(SerializationStrategy): def __init__(self, fmt: str): self.fmt = fmt def serialize(self, value: date) -> str: return value.strftime(self.fmt) def deserialize(self, value: str) -> date: return datetime.strptime(value, self.fmt).date() class EthiopianDialect(Dialect): serialization_strategy = { date: DateTimeSerializationStrategy("%d/%m/%Y") } class JapaneseDialect(Dialect): serialization_strategy = { date: DateTimeSerializationStrategy("%Y年%m月%d日") } @dataclass class Entity(DataClassDictMixin): dt: date class Config: code_generation_options = [ADD_DIALECT_SUPPORT] entity = Entity(date(2021, 12, 31)) entity.to_dict(dialect=EthiopianDialect) # {'dt': '31/12/2021'} entity.to_dict(dialect=JapaneseDialect) # {'dt': '2021年12月31日'} Entity.from_dict({'dt': '2021年12月31日'}, dialect=JapaneseDialect) ``` #### `serialization_strategy` dialect option This dialect option has the same meaning as the [similar config option](#serialization_strategy-config-option) but for the dialect scope. You can register custom [`SerializationStrategy`](#serializationstrategy), `serialize` and `deserialize` methods for the specific types. #### `serialize_by_alias` dialect option This dialect option has the same meaning as the [similar config option](#serialize_by_alias-config-option) but for the dialect scope. #### `omit_none` dialect option This dialect option has the same meaning as the [similar config option](#omit_none-config-option) but for the dialect scope. #### `omit_default` dialect option This dialect option has the same meaning as the [similar config option](#omitdefault-config-option) but for the dialect scope. #### `namedtuple_as_dict` dialect option This dialect option has the same meaning as the [similar config option](#namedtuple_as_dict-config-option) but for the dialect scope. #### `no_copy_collections` dialect option By default, all collection data types are serialized as a copy to prevent mutation of the original collection. As an example, if a dataclass contains a field of type `list[str]`, then it will be serialized as a copy of the original list, so you can safely mutate it after. The downside is that copying is always slower than using a reference to the original collection. In some cases we know beforehand that mutation doesn't take place or is even desirable, so we can benefit from avoiding unnecessary copies by setting `no_copy_collections` to a sequence of origin collection data types. This is applicable only for collections containing elements that do not require conversion. ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig from mashumaro.dialect import Dialect class NoCopyDialect(Dialect): no_copy_collections = (list, dict, set) @dataclass class DataClass(DataClassDictMixin): simple_list: list[str] simple_dict: dict[str, str] simple_set: set[str] class Config(BaseConfig): dialect = NoCopyDialect obj = DataClass(["foo"], {"bar": "baz"}, {"foobar"}) data = obj.to_dict() assert data["simple_list"] is obj.simple_list assert data["simple_dict"] is obj.simple_dict assert data["simple_set"] is obj.simple_set ``` This option is enabled for `list` and `dict` in the default dialects that belong to mixins and codecs for the following formats: * [JSON (orjson library)](#orjson-library) * [TOML](#toml) * [MessagePack](#messagepack) #### Changing the default dialect You can change the default serialization and deserialization methods not only in the [`serialization_strategy`](#serialization_strategy-config-option) config option but also using the `dialect` config option. If you have multiple dataclasses without a common parent class the default dialect can help you to reduce the number of code lines written: ```python @dataclass class Entity(DataClassDictMixin): dt: date class Config: dialect = JapaneseDialect entity = Entity(date(2021, 12, 31)) entity.to_dict() # {'dt': '2021年12月31日'} assert Entity.from_dict({'dt': '2021年12月31日'}) == entity ``` Default dialect can also be set when using codecs: ```python from mashumaro.codecs import BasicDecoder, BasicEncoder @dataclass class Entity: dt: date decoder = BasicDecoder(Entity, default_dialect=JapaneseDialect) encoder = BasicEncoder(Entity, default_dialect=JapaneseDialect) entity = Entity(date(2021, 12, 31)) encoder.encode(entity) # {'dt': '2021年12月31日'} assert decoder.decode({'dt': '2021年12月31日'}) == entity ``` ### Discriminator There is a special `Discriminator` class that allows you to customize how a union of dataclasses or their hierarchy will be deserialized. It has the following parameters that affects class selection rules: * `field` — optional name of the input dictionary key (also known as tag) by which all the variants can be distinguished * `include_subtypes` — allow to deserialize subclasses * `include_supertypes` — allow to deserialize superclasses * `variant_tagger_fn` — a custom function used to generate tag values associated with a variant By default, each variant that you want to discriminate by tags should have a class-level attribute containing an associated tag value. This attribute should have a name defined by `field` parameter. The tag value coule be in the following forms: * without annotations: `type = 42` * annotated as ClassVar: `type: ClassVar[int] = 42` * annotated as Final: `type: Final[int] = 42` * annotated as Literal: `type: Literal[42] = 42` * annotated as StrEnum: `type: ResponseType = ResponseType.OK` > [!NOTE]\ > Keep in mind that by default only Final, Literal and StrEnum fields are > processed during serialization. However, it is possible to use discriminator without the class-level attribute. You can provide a custom function that generates one or many variant tag values. This function should take a class as the only argument and return either a single value of the basic type like `str` or `int` or a list of them to associate multiple tags with a variant. The common practice is to use a class name as a single tag value: ```python variant_tagger_fn = lambda cls: cls.__name__ ``` Next, we will look at different use cases, as well as their pros and cons. #### Subclasses distinguishable by a field Often you have a base dataclass and multiple subclasses that are easily distinguishable from each other by the value of a particular field. For example, there may be different events, messages or requests with a discriminator field "event_type", "message_type" or just "type". You could've listed all of them within `Union` type, but it would be too verbose and impractical. Moreover, deserialization of the union would be slow, since we need to iterate over each variant in the list until we find the right one. We can improve subclass deserialization using `Discriminator` as annotation within `Annotated` type. We will use `field` parameter and set `include_subtypes` to `True`. > [!IMPORTANT]\ > The discriminator field should be accessible from the `__dict__` attribute > of a specific descendant, i.e. defined at the level of that descendant. > A descendant class without a discriminator field will be ignored, but > its descendants won't. Suppose we have a hierarchy of client events distinguishable by a class attribute "type": ```python from dataclasses import dataclass from ipaddress import IPv4Address from mashumaro import DataClassDictMixin @dataclass class ClientEvent(DataClassDictMixin): pass @dataclass class ClientConnectedEvent(ClientEvent): type = "connected" client_ip: IPv4Address @dataclass class ClientDisconnectedEvent(ClientEvent): type = "disconnected" client_ip: IPv4Address ``` We use base dataclass `ClientEvent` for a field of another dataclass: ```python from typing import Annotated, List # or from typing_extensions import Annotated from mashumaro.types import Discriminator @dataclass class AggregatedEvents(DataClassDictMixin): list: List[ Annotated[ ClientEvent, Discriminator(field="type", include_subtypes=True) ] ] ``` Now we can deserialize events based on "type" value: ```python events = AggregatedEvents.from_dict( { "list": [ {"type": "connected", "client_ip": "10.0.0.42"}, {"type": "disconnected", "client_ip": "10.0.0.42"}, ] } ) assert events == AggregatedEvents( list=[ ClientConnectedEvent(client_ip=IPv4Address("10.0.0.42")), ClientDisconnectedEvent(client_ip=IPv4Address("10.0.0.42")), ] ) ``` #### Subclasses without a common field In rare cases you have to deal with subclasses that don't have a common field name which they can be distinguished by. Since `Discriminator` can be initialized without "field" parameter you can use it with only `include_subclasses` enabled. The drawback is that we will have to go through all the subclasses until we find the suitable one. It's almost like using `Union` type but with subclasses support. Suppose we're making a brunch. We have some ingredients: ```python @dataclass class Ingredient(DataClassDictMixin): name: str @dataclass class Hummus(Ingredient): made_of: Literal["chickpeas", "beet", "artichoke"] grams: int @dataclass class Celery(Ingredient): pieces: int ``` Let's create a plate: ```python @dataclass class Plate(DataClassDictMixin): ingredients: List[ Annotated[Ingredient, Discriminator(include_subtypes=True)] ] ``` And now we can put our ingredients on the plate: ```python plate = Plate.from_dict( { "ingredients": [ { "name": "hummus from the shop", "made_of": "chickpeas", "grams": 150, }, {"name": "celery from my garden", "pieces": 5}, ] } ) assert plate == Plate( ingredients=[ Hummus(name="hummus from the shop", made_of="chickpeas", grams=150), Celery(name="celery from my garden", pieces=5), ] ) ``` In some cases it's necessary to fall back to the base class if there is no suitable subclass. We can set `include_supertypes` to `True`: ```python @dataclass class Plate(DataClassDictMixin): ingredients: List[ Annotated[ Ingredient, Discriminator(include_subtypes=True, include_supertypes=True), ] ] plate = Plate.from_dict( { "ingredients": [ { "name": "hummus from the shop", "made_of": "chickpeas", "grams": 150, }, {"name": "celery from my garden", "pieces": 5}, {"name": "cumin"} # <- new unknown ingredient ] } ) assert plate == Plate( ingredients=[ Hummus(name="hummus from the shop", made_of="chickpeas", grams=150), Celery(name="celery from my garden", pieces=5), Ingredient(name="cumin"), # <- unknown ingredient added ] ) ``` #### Class level discriminator It may often be more convenient to specify a `Discriminator` once at the class level and use that class without `Annotated` type for subclass deserialization. Depending on the `Discriminator` parameters, it can be used as a replacement for [subclasses distinguishable by a field](#subclasses-distinguishable-by-a-field) as well as for [subclasses without a common field](#subclasses-without-a-common-field). The only difference is that you can't use `include_supertypes=True` because it would lead to a recursion error. Reworked example will look like this: ```python from dataclasses import dataclass from ipaddress import IPv4Address from typing import List from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig from mashumaro.types import Discriminator @dataclass class ClientEvent(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( # <- add discriminator field="type", include_subtypes=True, ) @dataclass class ClientConnectedEvent(ClientEvent): type = "connected" client_ip: IPv4Address @dataclass class ClientDisconnectedEvent(ClientEvent): type = "disconnected" client_ip: IPv4Address @dataclass class AggregatedEvents(DataClassDictMixin): list: List[ClientEvent] # <- use base class here ``` And now we can deserialize events based on "type" value as we did earlier: ```python events = AggregatedEvents.from_dict( { "list": [ {"type": "connected", "client_ip": "10.0.0.42"}, {"type": "disconnected", "client_ip": "10.0.0.42"}, ] } ) assert events == AggregatedEvents( list=[ ClientConnectedEvent(client_ip=IPv4Address("10.0.0.42")), ClientDisconnectedEvent(client_ip=IPv4Address("10.0.0.42")), ] ) ``` What's more interesting is that you can now deserialize subclasses simply by calling the superclass `from_*` method, which is very useful: ```python disconnected_event = ClientEvent.from_dict( {"type": "disconnected", "client_ip": "10.0.0.42"} ) assert disconnected_event == ClientDisconnectedEvent(IPv4Address("10.0.0.42")) ``` The same is applicable for subclasses without a common field: ```python @dataclass class Ingredient(DataClassDictMixin): name: str class Config: discriminator = Discriminator(include_subtypes=True) ... celery = Ingredient.from_dict({"name": "celery from my garden", "pieces": 5}) assert celery == Celery(name="celery from my garden", pieces=5) ``` #### Working with union of classes Deserialization of union of types distinguishable by a particular field will be much faster using `Discriminator` because there will be no traversal of all classes and an attempt to deserialize each of them. Usually this approach can be used when you have multiple classes without a common superclass or when you only need to deserialize some of the subclasses. In the following example we will use `include_supertypes=True` to deserialize two subclasses out of three: ```python from dataclasses import dataclass from typing import Annotated, Literal, Union # or from typing_extensions import Annotated from mashumaro import DataClassDictMixin from mashumaro.types import Discriminator @dataclass class Event(DataClassDictMixin): pass @dataclass class Event1(Event): code: Literal[1] = 1 ... @dataclass class Event2(Event): code: Literal[2] = 2 ... @dataclass class Event3(Event): code: Literal[3] = 3 ... @dataclass class Message(DataClassDictMixin): event: Annotated[ Union[Event1, Event2], Discriminator(field="code", include_supertypes=True), ] event1_msg = Message.from_dict({"event": {"code": 1, ...}}) event2_msg = Message.from_dict({"event": {"code": 2, ...}}) assert isinstance(event1_msg.event, Event1) assert isinstance(event2_msg.event, Event2) # raises InvalidFieldValue: Message.from_dict({"event": {"code": 3, ...}}) ``` Again, it's not necessary to have a common superclass. If you have a union of dataclasses without a field that they can be distinguishable by, you can still use `Discriminator`, but deserialization will almost be the same as for `Union` type without `Discriminator` except that it could be possible to deserialize subclasses with `include_subtypes=True`. > [!IMPORTANT]\ > When both `include_subtypes` and `include_supertypes` are enabled, > all subclasses will be attempted to be deserialized first, > superclasses — at the end. In the following example you can see how priority works — first we try to deserialize `ChickpeaHummus`, and if it fails, then we try `Hummus`: ```python @dataclass class Hummus(DataClassDictMixin): made_of: Literal["chickpeas", "artichoke"] grams: int @dataclass class ChickpeaHummus(Hummus): made_of: Literal["chickpeas"] @dataclass class Celery(DataClassDictMixin): pieces: int @dataclass class Plate(DataClassDictMixin): ingredients: List[ Annotated[ Union[Hummus, Celery], Discriminator(include_subtypes=True, include_supertypes=True), ] ] plate = Plate.from_dict( { "ingredients": [ {"made_of": "chickpeas", "grams": 100}, {"made_of": "artichoke", "grams": 50}, {"pieces": 4}, ] } ) assert plate == Plate( ingredients=[ ChickpeaHummus(made_of='chickpeas', grams=100), # <- subclass Hummus(made_of='artichoke', grams=50), # <- superclass Celery(pieces=4), ] ) ``` #### Using a custom variant tagger function Sometimes it is impractical to have a class-level attribute with a tag value, especially when you have a lot of classes. We can have a custom tagger function instead. This method is applicable for all scenarios of using the discriminator, but for demonstration purposes, let's focus only on one of them. Suppose we want to use the middle part of `Client*Event` as a tag value: ```python from dataclasses import dataclass from ipaddress import IPv4Address from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig from mashumaro.types import Discriminator def client_event_tagger(cls): # not the best way of doing it, it's just a demo return cls.__name__[6:-5].lower() @dataclass class ClientEvent(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( field="type", include_subtypes=True, variant_tagger_fn=client_event_tagger, ) @dataclass class ClientConnectedEvent(ClientEvent): client_ip: IPv4Address @dataclass class ClientDisconnectedEvent(ClientEvent): client_ip: IPv4Address ``` We can now deserialize subclasses as we did it earlier [without variant tagger](#class-level-discriminator): ```python disconnected_event = ClientEvent.from_dict( {"type": "disconnected", "client_ip": "10.0.0.42"} ) assert disconnected_event == ClientDisconnectedEvent(IPv4Address("10.0.0.42")) ``` If we need to associate multiple tags with a single variant, we can return a list of tags: ```python def client_event_tagger(cls): name = cls.__name__[6:-5] return [name.lower(), name.upper()] ``` ### Code generation options #### Add `omit_none` keyword argument If you want to have control over whether to skip `None` values on serialization you can add `omit_none` parameter to `to_*` methods using the `code_generation_options` list. The default value of `omit_none` parameter depends on whether the [`omit_none`](#omit_none-config-option) config option or [`omit_none`](#omit_none-dialect-option) dialect option is enabled. ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig, TO_DICT_ADD_OMIT_NONE_FLAG @dataclass class Inner(DataClassDictMixin): x: int = None # "x" won't be omitted since there is no TO_DICT_ADD_OMIT_NONE_FLAG here @dataclass class Model(DataClassDictMixin): x: Inner a: int = None b: str = None # will be omitted class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] Model(x=Inner(), a=1).to_dict(omit_none=True) # {'x': {'x': None}, 'a': 1} ``` #### Add `by_alias` keyword argument If you want to have control over whether to serialize fields by their [aliases](#field-aliases) you can add `by_alias` parameter to `to_*` methods using the `code_generation_options` list. The default value of `by_alias` parameter depends on whether the [`serialize_by_alias`](#serialize_by_alias-config-option) config option is enabled. ```python from dataclasses import dataclass, field from mashumaro import DataClassDictMixin, field_options from mashumaro.config import BaseConfig, TO_DICT_ADD_BY_ALIAS_FLAG @dataclass class DataClass(DataClassDictMixin): field_a: int = field(metadata=field_options(alias="FieldA")) class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] DataClass(field_a=1).to_dict() # {'field_a': 1} DataClass(field_a=1).to_dict(by_alias=True) # {'FieldA': 1} ``` #### Add `dialect` keyword argument Support for [dialects](#dialects) is disabled by default for performance reasons. You can enable it using a `ADD_DIALECT_SUPPORT` constant: ```python from dataclasses import dataclass from datetime import date from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig, ADD_DIALECT_SUPPORT @dataclass class Entity(DataClassDictMixin): dt: date class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] ``` #### Add `context` keyword argument Sometimes it's needed to pass a "context" object to the serialization hooks that will take it into account. For example, you could want to have an option to remove sensitive data from the serialization result if you need to. You can add `context` parameter to `to_*` methods that will be passed to [`__pre_serialize__`](#before-serialization) and [`__post_serialize__`](#after-serialization) hooks. The type of this context as well as its mutability is up to you. ```python from dataclasses import dataclass from typing import Dict, Optional from uuid import UUID from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig, ADD_SERIALIZATION_CONTEXT class BaseModel(DataClassDictMixin): class Config(BaseConfig): code_generation_options = [ADD_SERIALIZATION_CONTEXT] @dataclass class Account(BaseModel): id: UUID username: str name: str def __pre_serialize__(self, context: Optional[Dict] = None): return self def __post_serialize__(self, d: Dict, context: Optional[Dict] = None): if context and context.get("remove_sensitive_data"): d["username"] = "***" d["name"] = "***" return d @dataclass class Session(BaseModel): id: UUID key: str account: Account def __pre_serialize__(self, context: Optional[Dict] = None): return self def __post_serialize__(self, d: Dict, context: Optional[Dict] = None): if context and context.get("remove_sensitive_data"): d["key"] = "***" return d foo = Session( id=UUID('03321c9f-6a97-421e-9869-918ff2867a71'), key="VQ6Q9bX4c8s", account=Account( id=UUID('4ef2baa7-edef-4d6a-b496-71e6d72c58fb'), username="john_doe", name="John" ) ) assert foo.to_dict() == { 'id': '03321c9f-6a97-421e-9869-918ff2867a71', 'key': 'VQ6Q9bX4c8s', 'account': { 'id': '4ef2baa7-edef-4d6a-b496-71e6d72c58fb', 'username': 'john_doe', 'name': 'John' } } assert foo.to_dict(context={"remove_sensitive_data": True}) == { 'id': '03321c9f-6a97-421e-9869-918ff2867a71', 'key': '***', 'account': { 'id': '4ef2baa7-edef-4d6a-b496-71e6d72c58fb', 'username': '***', 'name': '***' } } ``` ### Generic dataclasses Along with [user-defined generic types](#user-defined-generic-types) implementing `SerializableType` interface, generic and variadic generic dataclasses can also be used. There are two applicable scenarios for them. #### Generic dataclass inheritance If you have a generic dataclass and want to serialize and deserialize its instances depending on the concrete types, you can use inheritance for that: ```python from dataclasses import dataclass from datetime import date from typing import Generic, Mapping, TypeVar, TypeVarTuple from mashumaro import DataClassDictMixin KT = TypeVar("KT") VT = TypeVar("VT", date, str) Ts = TypeVarTuple("Ts") @dataclass class GenericDataClass(Generic[KT, VT, *Ts]): x: Mapping[KT, VT] y: Tuple[*Ts, KT] @dataclass class ConcreteDataClass( GenericDataClass[str, date, *Tuple[float, ...]], DataClassDictMixin, ): pass ConcreteDataClass.from_dict({"x": {"a": "2021-01-01"}, "y": [1, 2, "a"]}) # ConcreteDataClass(x={'a': datetime.date(2021, 1, 1)}, y=(1.0, 2.0, 'a')) ``` You can override `TypeVar` field with a concrete type or another `TypeVar`. Partial specification of concrete types is also allowed. If a generic dataclass is inherited without type overriding the types of its fields remain untouched. #### Generic dataclass in a field type Another approach is to specify concrete types in the field type hints. This can help to have different versions of the same generic dataclass: ```python from dataclasses import dataclass from datetime import date from typing import Generic, TypeVar from mashumaro import DataClassDictMixin T = TypeVar('T') @dataclass class GenericDataClass(Generic[T], DataClassDictMixin): x: T @dataclass class DataClass(DataClassDictMixin): date: GenericDataClass[date] str: GenericDataClass[str] instance = DataClass( date=GenericDataClass(x=date(2021, 1, 1)), str=GenericDataClass(x='2021-01-01'), ) dictionary = {'date': {'x': '2021-01-01'}, 'str': {'x': '2021-01-01'}} assert DataClass.from_dict(dictionary) == instance ``` ### GenericSerializableType interface There is a generic alternative to [`SerializableType`](#serializabletype-interface) called `GenericSerializableType`. It makes it possible to decide yourself how to serialize and deserialize input data depending on the types provided: ```python from dataclasses import dataclass from datetime import date from typing import Dict, TypeVar from mashumaro import DataClassDictMixin from mashumaro.types import GenericSerializableType KT = TypeVar("KT") VT = TypeVar("VT") class DictWrapper(Dict[KT, VT], GenericSerializableType): __packers__ = {date: lambda x: x.isoformat(), str: str} __unpackers__ = {date: date.fromisoformat, str: str} def _serialize(self, types) -> Dict[KT, VT]: k_type, v_type = types k_conv = self.__packers__[k_type] v_conv = self.__packers__[v_type] return {k_conv(k): v_conv(v) for k, v in self.items()} @classmethod def _deserialize(cls, value, types) -> "DictWrapper[KT, VT]": k_type, v_type = types k_conv = cls.__unpackers__[k_type] v_conv = cls.__unpackers__[v_type] return cls({k_conv(k): v_conv(v) for k, v in value.items()}) @dataclass class DataClass(DataClassDictMixin): x: DictWrapper[date, str] y: DictWrapper[str, date] input_data = { "x": {"2022-12-07": "2022-12-07"}, "y": {"2022-12-07": "2022-12-07"}, } obj = DataClass.from_dict(input_data) assert obj == DataClass( x=DictWrapper({date(2022, 12, 7): "2022-12-07"}), y=DictWrapper({"2022-12-07": date(2022, 12, 7)}), ) assert obj.to_dict() == input_data ``` As you can see, the code turns out to be massive compared to the [alternative](#user-defined-generic-types) but in rare cases such flexibility can be useful. You should think twice about whether it's really worth using it. ### Serialization hooks In some cases you need to prepare input / output data or do some extraordinary actions at different stages of the deserialization / serialization lifecycle. You can do this with different types of hooks. #### Before deserialization For doing something with a dictionary that will be passed to deserialization you can use `__pre_deserialize__` class method: ```python @dataclass class A(DataClassJSONMixin): abc: int @classmethod def __pre_deserialize__(cls, d: Dict[Any, Any]) -> Dict[Any, Any]: return {k.lower(): v for k, v in d.items()} print(DataClass.from_dict({"ABC": 123})) # DataClass(abc=123) print(DataClass.from_json('{"ABC": 123}')) # DataClass(abc=123) ``` #### After deserialization For doing something with a dataclass instance that was created as a result of deserialization you can use `__post_deserialize__` class method: ```python @dataclass class A(DataClassJSONMixin): abc: int @classmethod def __post_deserialize__(cls, obj: 'A') -> 'A': obj.abc = 456 return obj print(DataClass.from_dict({"abc": 123})) # DataClass(abc=456) print(DataClass.from_json('{"abc": 123}')) # DataClass(abc=456) ``` #### Before serialization For doing something before serialization you can use `__pre_serialize__` method: ```python @dataclass class A(DataClassJSONMixin): abc: int counter: ClassVar[int] = 0 def __pre_serialize__(self) -> 'A': self.counter += 1 return self obj = DataClass(abc=123) obj.to_dict() obj.to_json() print(obj.counter) # 2 ``` Note that you can add an additional `context` argument using the [corresponding](#add-context-keyword-argument) code generation option. #### After serialization For doing something with a dictionary that was created as a result of serialization you can use `__post_serialize__` method: ```python @dataclass class A(DataClassJSONMixin): user: str password: str def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: d.pop('password') return d obj = DataClass(user="name", password="secret") print(obj.to_dict()) # {"user": "name"} print(obj.to_json()) # '{"user": "name"}' ``` Note that you can add an additional `context` argument using the [corresponding](#add-context-keyword-argument) code generation option. JSON Schema ------------------------------------------------------------------------------- You can build JSON Schema not only for dataclasses but also for any other [supported](#supported-data-types) data types. There is support for the following standards: * [Draft 2020-12](https://json-schema.org/specification.html) * [OpenAPI Specification 3.1.0](https://spec.openapis.org/oas/v3.1.0) ### Building JSON Schema For simple one-time cases it's recommended to start from using a configurable `build_json_schema` function. It returns `JSONSchema` object that can be serialized to json or to dict: ```python from dataclasses import dataclass, field from typing import List from uuid import UUID from mashumaro.jsonschema import build_json_schema @dataclass class User: id: UUID name: str = field(metadata={"description": "User name"}) print(build_json_schema(List[User]).to_json()) ```
Click to show the result ```json { "type": "array", "items": { "type": "object", "title": "User", "properties": { "id": { "type": "string", "format": "uuid" }, "name": { "type": "string", "description": "User name" } }, "additionalProperties": false, "required": [ "id", "name" ] } } ```
Additional validation keywords ([see below](#json-schema-constraints)) can be added using annotations: ```python from typing import Annotated, List from mashumaro.jsonschema import build_json_schema from mashumaro.jsonschema.annotations import Maximum, MaxItems print( build_json_schema( Annotated[ List[Annotated[int, Maximum(42)]], MaxItems(4) ] ).to_json() ) ```
Click to show the result ```json { "type": "array", "items": { "type": "integer", "maximum": 42 }, "maxItems": 4 } ```
The [`$schema`](https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-schema-keyword) keyword can be added by setting `with_dialect_uri` to True: ```python print(build_json_schema(str, with_dialect_uri=True).to_json()) ```
Click to show the result ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "string" } ```
By default, Draft 2022-12 dialect is being used, but you can change it to another one by setting `dialect` parameter: ```python from mashumaro.jsonschema import OPEN_API_3_1 print( build_json_schema( str, dialect=OPEN_API_3_1, with_dialect_uri=True ).to_json() ) ```
Click to show the result ```json { "$schema": "https://spec.openapis.org/oas/3.1/dialect/base", "type": "string" } ```
All dataclass JSON Schemas can or can not be placed in the [definitions](https://json-schema.org/draft/2020-12/json-schema-core.html#name-schema-re-use-with-defs) section, depending on the `all_refs` parameter, which default value comes from a dialect used (`False` for Draft 2022-12, `True` for OpenAPI Specification 3.1.0): ```python print(build_json_schema(List[User], all_refs=True).to_json()) ```
Click to show the result ```json { "type": "array", "$defs": { "User": { "type": "object", "title": "User", "properties": { "id": { "type": "string", "format": "uuid" }, "name": { "type": "string" } }, "additionalProperties": false, "required": [ "id", "name" ] } }, "items": { "$ref": "#/$defs/User" } } ```
The definitions section can be omitted from the final document by setting `with_definitions` parameter to `False`: ```python print( build_json_schema( List[User], dialect=OPEN_API_3_1, with_definitions=False ).to_json() ) ```
Click to show the result ```json { "type": "array", "items": { "$ref": "#/components/schemas/User" } } ```
Reference prefix can be changed by using `ref_prefix` parameter: ```python print( build_json_schema( List[User], all_refs=True, with_definitions=False, ref_prefix="#/components/responses", ).to_json() ) ```
Click to show the result ```json { "type": "array", "items": { "$ref": "#/components/responses/User" } } ```
The omitted definitions could be found later in the `Context` object that you could have created and passed to the function, but it could be easier to use `JSONSchemaBuilder` for that. For example, you might found it handy to build OpenAPI Specification step by step passing your models to the builder and get all the registered definitions later. This builder has reasonable defaults but can be customized if necessary. ```python from mashumaro.jsonschema import JSONSchemaBuilder, OPEN_API_3_1 builder = JSONSchemaBuilder(OPEN_API_3_1) @dataclass class User: id: UUID name: str @dataclass class Device: id: UUID model: str print(builder.build(List[User]).to_json()) print(builder.build(List[Device]).to_json()) print(builder.get_definitions().to_json()) ```
Click to show the result ```json { "type": "array", "items": { "$ref": "#/components/schemas/User" } } ``` ```json { "type": "array", "items": { "$ref": "#/components/schemas/Device" } } ``` ```json { "User": { "type": "object", "title": "User", "properties": { "id": { "type": "string", "format": "uuid" }, "name": { "type": "string" } }, "additionalProperties": false, "required": [ "id", "name" ] }, "Device": { "type": "object", "title": "Device", "properties": { "id": { "type": "string", "format": "uuid" }, "model": { "type": "string" } }, "additionalProperties": false, "required": [ "id", "model" ] } } ```
### JSON Schema constraints Apart from required keywords, that are added automatically for certain data types, you're free to use additional validation keywords. They're presented by the corresponding classes in [`mashumaro.jsonschema.annotations`](https://github.com/Fatal1ty/mashumaro/blob/master/mashumaro/jsonschema/annotations.py): Number constraints: * [`Minimum`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-minimum) * [`Maximum`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-maximum) * [`ExclusiveMinimum`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-exclusiveminimum) * [`ExclusiveMaximum`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-exclusivemaximum) * [`MultipleOf`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-multipleof) String constraints: * [`MinLength`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-minlength) * [`MaxLength`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-maxlength) * [`Pattern`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-pattern) Array constraints: * [`MinItems`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-minitems) * [`MaxItems`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-maxitems) * [`UniqueItems`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-uniqueitems) * [`Contains`](https://json-schema.org/draft/2020-12/json-schema-core.html#name-contains) * [`MinContains`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-mincontains) * [`MaxContains`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-maxcontains) Object constraints: * [`MaxProperties`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-maxproperties) * [`MinProperties`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-minproperties) * [`DependentRequired`](https://json-schema.org/draft/2020-12/json-schema-validation.html#name-dependentrequired) ### Extending JSON Schema Using a `Config` class it is possible to override some parts of the schema. Currently, you can do the following: * override some field schemas using the "properties" key * change `additionalProperties` using the "additionalProperties" key ```python from dataclasses import dataclass from mashumaro.jsonschema import build_json_schema @dataclass class FooBar: foo: str bar: int class Config: json_schema = { "properties": { "foo": { "type": "string", "description": "bar" } }, "additionalProperties": True, } print(build_json_schema(FooBar).to_json()) ```
Click to show the result ```json { "type": "object", "title": "FooBar", "properties": { "foo": { "type": "string", "description": "bar" }, "bar": { "type": "integer" } }, "additionalProperties": true, "required": [ "foo", "bar" ] } ```
You can also change the "additionalProperties" key to a specific schema by passing it a `JSONSchema` instance instead of a bool value. ### JSON Schema and custom serialization methods Mashumaro provides different ways to override default serialization methods for dataclass fields or specific data types. In order for these overrides to be reflected in the schema, you need to make sure that the methods have annotations of the return value type. ```python from dataclasses import dataclass, field from mashumaro.config import BaseConfig from mashumaro.jsonschema import build_json_schema def str_as_list(s: str) -> list[str]: return list(s) def int_as_str(i: int) -> str: return str(i) @dataclass class FooBar: foo: str = field(metadata={"serialize": str_as_list}) bar: int class Config(BaseConfig): serialization_strategy = { int: { "serialize": int_as_str } } print(build_json_schema(FooBar).to_json()) ```
Click to show the result ```json { "type": "object", "title": "FooBar", "properties": { "foo": { "type": "array", "items": { "type": "string" } }, "bar": { "type": "string" } }, "additionalProperties": false, "required": [ "foo", "bar" ] } ```
mashumaro-3.13.1/benchmark/000077500000000000000000000000001463331001200155165ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/__init__.py000066400000000000000000000000001463331001200176150ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/charts/000077500000000000000000000000001463331001200170025ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/charts/dump_dark.svg000066400000000000000000000253311463331001200214750ustar00rootroot000000000000001 us10 us100 us1000 usmashumarocattrspydantic v2asdictmarshmallowpydantic v1dataclasses-json9.74 us18.4 us46.3 us141 us155 us642 us7.66 usConverting GitHub Issue object to dictmashumaro-3.13.1/benchmark/charts/dump_light.svg000066400000000000000000000253311463331001200216630ustar00rootroot000000000000001 us10 us100 us1000 usmashumarocattrspydantic v2asdictmarshmallowpydantic v1dataclasses-json9.74 us18.4 us46.3 us141 us155 us642 us7.66 usConverting GitHub Issue object to dictmashumaro-3.13.1/benchmark/charts/load_dark.svg000066400000000000000000000253161463331001200214520ustar00rootroot0000000000000010 us100 us1000 us10000 usmashumarocattrspydantic v2pydantic v1marshmallowdataclasses-jsondacite16.1 us19.2 us128 us322 us1.17 ms1.30 ms12.4 usCreating GitHub Issue object from dictmashumaro-3.13.1/benchmark/charts/load_light.svg000066400000000000000000000253161463331001200216400ustar00rootroot0000000000000010 us100 us1000 us10000 usmashumarocattrspydantic v2pydantic v1marshmallowdataclasses-jsondacite16.1 us19.2 us128 us322 us1.17 ms1.30 ms12.4 usCreating GitHub Issue object from dictmashumaro-3.13.1/benchmark/common.py000066400000000000000000000031711463331001200173620ustar00rootroot00000000000000import json import pathlib from abc import ABC from typing import Any, Dict, Literal, Type import pyperf def load_data(): with open(pathlib.Path(__file__).parent / "data" / "issue.json") as f: return json.load(f) class AbstractBenchmark(ABC): LIBRARY: str def __init__(self, runner: pyperf.Runner) -> None: self.runner = runner def warmup(self, data: Dict[str, Any]) -> None: pass def run_loader(self, data: Dict[str, Any]) -> pyperf.Benchmark: pass def run_dumper(self, data: Dict[str, Any]) -> pyperf.Benchmark: pass def _bench_loader_func(self, func, *args, **kwargs) -> pyperf.Benchmark: return self.runner.bench_func( self.get_name("load"), func, *args, **kwargs ) def _bench_dumper_func(self, func, *args, **kwargs) -> pyperf.Benchmark: return self.runner.bench_func( self.get_name("dump"), func, *args, **kwargs ) def get_name(self, bench_type: Literal["dump", "load"]) -> str: return f"{self.LIBRARY}[{bench_type}]" class BenchmarkRunner: def __init__(self, benchmark_cls: Type[AbstractBenchmark]) -> None: self._runner = pyperf.Runner() self._benchmark = benchmark_cls(self._runner) self._data = load_data() def run(self, benchmark_type: str) -> None: self._benchmark.warmup(self._data) if benchmark_type == "load": self._benchmark.run_loader(self._data) elif benchmark_type == "dump": self._benchmark.run_dumper(self._data) else: raise ValueError(f"Unknown benchmark_type: {benchmark_type}") mashumaro-3.13.1/benchmark/create_chart_specs.py000066400000000000000000000022011463331001200217040ustar00rootroot00000000000000import json from pathlib import Path from pyperf import Benchmark def create_spec(benchmark_type: str) -> None: data_dir = Path(Path.cwd() / "benchmark" / "data") with open(data_dir / "spec_template.json") as f: spec = json.load(f) if benchmark_type == "load": spec["title"]["text"] = "Creating GitHub Issue object from dict" elif benchmark_type == "dump": spec["title"]["text"] = "Converting GitHub Issue object to dict" values = spec["data"]["values"] for file in Path(Path.cwd() / "benchmark" / "data" / "results").glob( f"{benchmark_type}_*.json" ): benchmark: Benchmark = Benchmark.load(str(file)) library_name = benchmark.get_name()[:-6] values.append( { "library": library_name, "time": benchmark.mean(), "timeFormat": benchmark.format_value(benchmark.mean()), } ) values.sort(key=lambda v: v["time"]) with open(data_dir / f"spec_{benchmark_type}.json", "w") as f: json.dump(spec, f) if __name__ == "__main__": create_spec("load") create_spec("dump") mashumaro-3.13.1/benchmark/data/000077500000000000000000000000001463331001200164275ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/data/issue.json000066400000000000000000000204521463331001200204550ustar00rootroot00000000000000{ "url": "https://api.github.com/repos/microsoft/vscode/issues/179086", "repository_url": "https://api.github.com/repos/microsoft/vscode", "labels_url": "https://api.github.com/repos/microsoft/vscode/issues/179086/labels{/name}", "comments_url": "https://api.github.com/repos/microsoft/vscode/issues/179086/comments", "events_url": "https://api.github.com/repos/microsoft/vscode/issues/179086/events", "html_url": "https://github.com/microsoft/vscode/issues/179086", "id": 1653235513, "node_id": "I_kwDOAn8RLM5iil85", "number": 179086, "title": "GTK+ module libcanberra-gtk-module.so cannot be loaded", "user": { "login": "Petros626", "id": 62354721, "node_id": "MDQ6VXNlcjYyMzU0NzIx", "avatar_url": "https://avatars.githubusercontent.com/u/62354721?v=4", "gravatar_id": "", "url": "https://api.github.com/users/Petros626", "html_url": "https://github.com/Petros626", "followers_url": "https://api.github.com/users/Petros626/followers", "following_url": "https://api.github.com/users/Petros626/following{/other_user}", "gists_url": "https://api.github.com/users/Petros626/gists{/gist_id}", "starred_url": "https://api.github.com/users/Petros626/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Petros626/subscriptions", "organizations_url": "https://api.github.com/users/Petros626/orgs", "repos_url": "https://api.github.com/users/Petros626/repos", "events_url": "https://api.github.com/users/Petros626/events{/privacy}", "received_events_url": "https://api.github.com/users/Petros626/received_events", "type": "User", "site_admin": false }, "labels": [ { "id": 256129993, "node_id": "MDU6TGFiZWwyNTYxMjk5OTM=", "url": "https://api.github.com/repos/microsoft/vscode/labels/bug", "name": "bug", "color": "8D6673", "default": true, "description": "Issue identified by VS Code Team member as probable bug" }, { "id": 300334786, "node_id": "MDU6TGFiZWwzMDAzMzQ3ODY=", "url": "https://api.github.com/repos/microsoft/vscode/labels/verification-found", "name": "verification-found", "color": "f7c6c7", "default": false, "description": "Issue verification failed" }, { "id": 321092510, "node_id": "MDU6TGFiZWwzMjEwOTI1MTA=", "url": "https://api.github.com/repos/microsoft/vscode/labels/linux", "name": "linux", "color": "006b75", "default": false, "description": "Issues with VS Code on Linux" }, { "id": 362694632, "node_id": "MDU6TGFiZWwzNjI2OTQ2MzI=", "url": "https://api.github.com/repos/microsoft/vscode/labels/terminal", "name": "terminal", "color": "c5def5", "default": false, "description": "Integrated terminal issues" }, { "id": 1034445948, "node_id": "MDU6TGFiZWwxMDM0NDQ1OTQ4", "url": "https://api.github.com/repos/microsoft/vscode/labels/snap", "name": "snap", "color": "c5def5", "default": false, "description": "Issues related to the snap package" }, { "id": 1119794474, "node_id": "MDU6TGFiZWwxMTE5Nzk0NDc0", "url": "https://api.github.com/repos/microsoft/vscode/labels/confirmed", "name": "confirmed", "color": "009800", "default": false, "description": "Issue has been confirmed by VS Code Team member" } ], "state": "open", "locked": false, "assignee": { "login": "deepak1556", "id": 964386, "node_id": "MDQ6VXNlcjk2NDM4Ng==", "avatar_url": "https://avatars.githubusercontent.com/u/964386?v=4", "gravatar_id": "", "url": "https://api.github.com/users/deepak1556", "html_url": "https://github.com/deepak1556", "followers_url": "https://api.github.com/users/deepak1556/followers", "following_url": "https://api.github.com/users/deepak1556/following{/other_user}", "gists_url": "https://api.github.com/users/deepak1556/gists{/gist_id}", "starred_url": "https://api.github.com/users/deepak1556/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/deepak1556/subscriptions", "organizations_url": "https://api.github.com/users/deepak1556/orgs", "repos_url": "https://api.github.com/users/deepak1556/repos", "events_url": "https://api.github.com/users/deepak1556/events{/privacy}", "received_events_url": "https://api.github.com/users/deepak1556/received_events", "type": "User", "site_admin": false }, "assignees": [ { "login": "deepak1556", "id": 964386, "node_id": "MDQ6VXNlcjk2NDM4Ng==", "avatar_url": "https://avatars.githubusercontent.com/u/964386?v=4", "gravatar_id": "", "url": "https://api.github.com/users/deepak1556", "html_url": "https://github.com/deepak1556", "followers_url": "https://api.github.com/users/deepak1556/followers", "following_url": "https://api.github.com/users/deepak1556/following{/other_user}", "gists_url": "https://api.github.com/users/deepak1556/gists{/gist_id}", "starred_url": "https://api.github.com/users/deepak1556/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/deepak1556/subscriptions", "organizations_url": "https://api.github.com/users/deepak1556/orgs", "repos_url": "https://api.github.com/users/deepak1556/repos", "events_url": "https://api.github.com/users/deepak1556/events{/privacy}", "received_events_url": "https://api.github.com/users/deepak1556/received_events", "type": "User", "site_admin": false } ], "milestone": null, "comments": 15, "created_at": "2023-04-04T05:56:47Z", "updated_at": "2023-07-25T13:38:50Z", "closed_at": null, "author_association": "NONE", "active_lock_reason": null, "body": "Hello guys,\r\n\r\nI'm using _OpenCV_ in VSCode (Ubuntu Software installation) and everything worked fine, until I updated my libraries (`sudo apt-get update/upgrade/full-upgrade`) I guess. But now VSCode or rather _OpenCV_ can't find the _GTK_ library. \r\n\r\nI've tested this succesfully:\r\n `GTK_PATH=/usr/lib/x86_64-linux-gnu/gtk-2.0 ./test` --> works\r\n\r\nBut when I specify the version gtk-3.0:\r\n`GTK_PATH=/usr/lib/x86_64-linux-gnu/gtk-3.0 ./test` --> fails\r\n\r\n Normally the loaded version without using GTK_PATH is gtk-3.0 and then it shows me this error:\r\n![image](https://user-images.githubusercontent.com/62354721/229435079-a85c5ff1-a8c3-4870-9340-bdc9404e595b.png)\r\n\r\n\r\nI verified that the in `/usr/lib/x86_64-linux-gnu/gtk-3.0/modules` is:\r\n\r\n![image](https://user-images.githubusercontent.com/62354721/229059188-4171f730-d2d9-4b6c-b103-447ca8ee727b.png)\r\n\r\nWould appreciate to solve this warning and to understand why the .so cannot be loaded.\r\n\r\nThanks in advance\r\n\r\n\r\n**UPDATE:** the example code can be executed without warnings in the normal gnome terminal, but VSCode seems to habe problems to find the specific libraries, so it's definitely _VSCode_.", "closed_by": { "login": "deepak1556", "id": 964386, "node_id": "MDQ6VXNlcjk2NDM4Ng==", "avatar_url": "https://avatars.githubusercontent.com/u/964386?v=4", "gravatar_id": "", "url": "https://api.github.com/users/deepak1556", "html_url": "https://github.com/deepak1556", "followers_url": "https://api.github.com/users/deepak1556/followers", "following_url": "https://api.github.com/users/deepak1556/following{/other_user}", "gists_url": "https://api.github.com/users/deepak1556/gists{/gist_id}", "starred_url": "https://api.github.com/users/deepak1556/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/deepak1556/subscriptions", "organizations_url": "https://api.github.com/users/deepak1556/orgs", "repos_url": "https://api.github.com/users/deepak1556/repos", "events_url": "https://api.github.com/users/deepak1556/events{/privacy}", "received_events_url": "https://api.github.com/users/deepak1556/received_events", "type": "User", "site_admin": false }, "reactions": { "url": "https://api.github.com/repos/microsoft/vscode/issues/179086/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 }, "timeline_url": "https://api.github.com/repos/microsoft/vscode/issues/179086/timeline", "state_reason": "reopened" } mashumaro-3.13.1/benchmark/data/spec_template.json000066400000000000000000000071411463331001200221520ustar00rootroot00000000000000{ "$schema": "https://vega.github.io/schema/vega-lite/v5.json", "title": { "text": "Creating GitHub Issue object from dict", "fontSize": 13, "fontWeight": "normal", "dy": -15, "color": "#706D6C" }, "data": { "values": [ ] }, "config": { "params": [ { "name": "defaultFont", "value": "-apple-system,BlinkMacSystemFont,\"Segoe UI\",Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"" }, { "name": "titleColor", "value": "#333333" }, { "name": "labelColor", "value": "#333333" } ], "header": { "labelFont": { "expr": "defaultFont" }, "titleFont": { "expr": "defaultFont" }, "titleFontWeight": 500 }, "text": { "font": { "expr": "defaultFont" }, "color": { "expr": "labelColor" } }, "mark": { "font": { "expr": "defaultFont" }, "color": { "expr": "labelColor" } }, "title": { "font": { "expr": "defaultFont" }, "subtitleFont": { "expr": "defaultFont" }, "fontWeight": 500 }, "axis": { "labelColor": { "expr": "labelColor" }, "labelFont": { "expr": "defaultFont" }, "titleFont": { "expr": "defaultFont" }, "titleFontWeight": 500, "titleColor": { "expr": "titleColor" }, "titleFontSize": 12 }, "legend": { "titleFontWeight": 500, "titleColor": { "expr": "titleColor" }, "titleFontSize": 12, "labelColor": { "expr": "labelColor" }, "labelFont": { "expr": "defaultFont" }, "titleFont": { "expr": "defaultFont" } }, "view": { "stroke": null }, "background": "transparent" }, "background": "transparent", "encoding": { "y": { "field": "library", "type": "nominal", "axis": { "grid": false, "title": null, "labelFontSize": 13, "ticks": false, "labelPadding": 10, "domain": false }, "sort": null }, "x": { "field": "time", "type": "quantitative", "axis": { "title": null, "labelExpr": "datum.value * 1000000 + ' us'", "tickCount": 3, "tickSize": 0, "labelPadding": 6, "labelAlign": "center", "labelFontSize": 12, "tickColor": "rgba(127,127,127,0.25)", "gridColor": "rgba(127,127,127,0.25)", "domain": false }, "scale": { "type": "log" } } }, "height": 140, "width": "container", "layer": [ { "mark": "bar", "encoding": { "size": { "value": 13 }, "color": { "value": "#E15759" } } }, { "transform": [ { "filter": "datum.library !== 'mashumaro'" } ], "mark": { "type": "text", "align": "left", "baseline": "middle", "dx": 6, "fontSize": 12 }, "encoding": { "text": { "field": "timeFormat" } } }, { "transform": [ { "filter": "datum.library === 'mashumaro'" } ], "mark": { "type": "text", "align": "left", "baseline": "middle", "dx": 6, "fontSize": 12, "fontWeight": "bold" }, "encoding": { "text": { "field": "timeFormat" } } } ] } mashumaro-3.13.1/benchmark/libs/000077500000000000000000000000001463331001200164475ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/__init__.py000066400000000000000000000000001463331001200205460ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/asdict/000077500000000000000000000000001463331001200177165ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/asdict/__init__.py000066400000000000000000000000001463331001200220150ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/asdict/common.py000066400000000000000000000011521463331001200215570ustar00rootroot00000000000000from dataclasses import asdict import pyperf from benchmark.common import AbstractBenchmark from benchmark.libs.mashumaro.common import BasicDecoder, DefaultDialect, Issue class Benchmark(AbstractBenchmark): LIBRARY = "asdict" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.decoder = BasicDecoder(Issue, default_dialect=DefaultDialect) def warmup(self, data) -> None: asdict(self.decoder.decode(data)) def run_dumper(self, data) -> pyperf.Benchmark: obj = self.decoder.decode(data) return self._bench_dumper_func(asdict, obj) mashumaro-3.13.1/benchmark/libs/asdict/dump.py000066400000000000000000000002101463331001200212260ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.asdict.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/cattrs/000077500000000000000000000000001463331001200177475ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/cattrs/__init__.py000066400000000000000000000000001463331001200220460ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/cattrs/common.py000066400000000000000000000111341463331001200216110ustar00rootroot00000000000000from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Any, Dict, Optional, Union import cattr import cattrs.gen import pyperf from benchmark.common import AbstractBenchmark class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass(slots=True) class User: id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None @dataclass(slots=True) class IssueLabel: id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool @dataclass(slots=True) class Milestone: url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN @dataclass(slots=True) class Reactions: url: str total_count: int plus_one: int minus_one: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass(slots=True) class Issue: id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "cattrs" def __init__(self, runner: pyperf.Runner): super().__init__(runner) self._converter = cattr.Converter(detailed_validation=False) self._converter.register_structure_hook( Reactions, cattr.gen.make_dict_structure_fn( Reactions, self._converter, plus_one=cattrs.gen.override(rename="+1"), minus_one=cattrs.gen.override(rename="-1"), ), ) self._converter.register_unstructure_hook( Reactions, cattr.gen.make_dict_unstructure_fn( Reactions, self._converter, plus_one=cattrs.gen.override(rename="+1"), minus_one=cattrs.gen.override(rename="-1"), ), ) self._converter.register_structure_hook( datetime, lambda o, _: datetime.fromisoformat(o) ) self._converter.register_unstructure_hook( datetime, lambda o: o.isoformat() ) self._converter.register_structure_hook( Union[IssueLabel, str], lambda o, _: self._converter.structure( o, IssueLabel if isinstance(o, dict) else str ), ) def warmup(self, data: Dict[str, Any]): self._converter.unstructure(self._converter.structure(data, Issue)) def run_loader(self, data: Dict[str, Any]) -> pyperf.Benchmark: return self._bench_loader_func(self._converter.structure, data, Issue) def run_dumper(self, data: Dict[str, Any]) -> pyperf.Benchmark: obj = self._converter.structure(data, Issue) return self._bench_dumper_func(self._converter.unstructure, obj) mashumaro-3.13.1/benchmark/libs/cattrs/dump.py000066400000000000000000000002101463331001200212570ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.cattrs.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/cattrs/load.py000066400000000000000000000002101463331001200212310ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.cattrs.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/dacite/000077500000000000000000000000001463331001200177005ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/dacite/__init__.py000066400000000000000000000000001463331001200217770ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/dacite/common.py000066400000000000000000000067071463331001200215540ustar00rootroot00000000000000from dataclasses import dataclass from datetime import datetime from enum import Enum from functools import partial from typing import Optional, Union import pyperf from dacite import Config, from_dict from benchmark.common import AbstractBenchmark class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass class User: login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None @dataclass class IssueLabel: id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool @dataclass class Milestone: url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN @dataclass class Reactions: url: str total_count: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int # dacite doesn't have aliases, so we're using default plus_one: int = 0 minus_one: int = 0 @dataclass class Issue: id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "dacite" def __init__(self, runner: pyperf.Runner): super().__init__(runner) self._config = Config( type_hooks={datetime: datetime.fromisoformat}, cast=[Enum], check_types=False, ) def warmup(self, data) -> None: from_dict(Issue, data, config=self._config) def run_loader(self, data) -> pyperf.Benchmark: return self._bench_loader_func( partial(from_dict, Issue, config=self._config), data ) mashumaro-3.13.1/benchmark/libs/dacite/load.py000066400000000000000000000002101463331001200211620ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.dacite.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/dataclasses_json/000077500000000000000000000000001463331001200217675ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/dataclasses_json/__init__.py000066400000000000000000000000001463331001200240660ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/dataclasses_json/common.py000066400000000000000000000104541463331001200236350ustar00rootroot00000000000000from dataclasses import dataclass, field from datetime import datetime from enum import Enum from typing import Optional import pyperf from dataclasses_json import DataClassJsonMixin, config from benchmark.common import AbstractBenchmark datetime_config = config( decoder=datetime.fromisoformat, encoder=datetime.isoformat ) optional_datetime_config = config( decoder=lambda o: datetime.fromisoformat(o) if o is not None else None, encoder=lambda o: o.isoformat() if isinstance(o, datetime) else None, ) enum_config = config(encoder=lambda o: o.value) optional_enum_config = config( encoder=lambda o: o.value if isinstance(o, Enum) else None ) class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass(slots=True) class User(DataClassJsonMixin): login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = field( default=None, metadata=optional_datetime_config ) @dataclass(slots=True) class IssueLabel(DataClassJsonMixin): id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool @dataclass(slots=True) class Milestone(DataClassJsonMixin): url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime = field(metadata=datetime_config) updated_at: datetime = field(metadata=datetime_config) closed_at: Optional[datetime] = field(metadata=optional_datetime_config) due_on: Optional[datetime] = field(metadata=optional_datetime_config) state: MilestoneState = MilestoneState.OPEN @dataclass(slots=True) class Reactions(DataClassJsonMixin): url: str total_count: int plus_one: int = field(metadata=config(field_name="+1")) minus_one: int = field(metadata=config(field_name="-1")) laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass(slots=True) class Issue(DataClassJsonMixin): id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState = field(metadata=enum_config) state_reason: Optional[IssueStateReason] = field( metadata=optional_enum_config ) title: str body: Optional[str] user: Optional[User] labels: list[IssueLabel] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] = field(metadata=optional_datetime_config) created_at: datetime = field(metadata=datetime_config) updated_at: datetime = field(metadata=datetime_config) closed_by: Optional[User] author_association: AuthorAssociation = field(metadata=enum_config) draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "dataclasses-json" def warmup(self, data) -> None: Issue.from_dict(data).to_dict() def run_loader(self, data) -> pyperf.Benchmark: return self._bench_loader_func(Issue.from_dict, data) def run_dumper(self, data) -> pyperf.Benchmark: obj = Issue.from_dict(data) return self._bench_dumper_func(obj.to_dict) mashumaro-3.13.1/benchmark/libs/dataclasses_json/dump.py000066400000000000000000000002221463331001200233020ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.dataclasses_json.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/dataclasses_json/load.py000066400000000000000000000002221463331001200232540ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.dataclasses_json.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/marshmallow/000077500000000000000000000000001463331001200207755ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/marshmallow/__init__.py000066400000000000000000000000001463331001200230740ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/marshmallow/common.py000066400000000000000000000173531463331001200226500ustar00rootroot00000000000000from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Optional, Union import pyperf from marshmallow import Schema, fields, post_load from benchmark.common import AbstractBenchmark class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass class User: login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None @dataclass class IssueLabel: id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool @dataclass class Milestone: url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN @dataclass class Reactions: url: str total_count: int plus_one: int minus_one: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass class Issue: id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class UserSchema(Schema): login = fields.Str() id = fields.Int() node_id = fields.Str() avatar_url = fields.Str() gravatar_id = fields.Str(allow_none=True) url = fields.Str() html_url = fields.Str() followers_url = fields.Str() following_url = fields.Str() gists_url = fields.Str() starred_url = fields.Str() subscriptions_url = fields.Str() organizations_url = fields.Str() repos_url = fields.Str() events_url = fields.Str() received_events_url = fields.Str() type = fields.Str() site_admin = fields.Bool() name = fields.Str(allow_none=True, load_default=True) email = fields.Str(allow_none=True, load_default=True) starred_at = fields.DateTime(allow_none=True, load_default=None) @post_load def _make_model(self, data, **kwargs): return User(**data) class IssueLabelSchema(Schema): id = fields.Int() node_id = fields.Str() url = fields.Str() name = fields.Str() description = fields.Str(allow_none=True) color = fields.Str(allow_none=True) default = fields.Bool() @post_load def _make_model(self, data, **kwargs): return IssueLabel(**data) class MilestoneSchema(Schema): url = fields.Str() html_url = fields.Str() labels_url = fields.Str() id = fields.Int() node_id = fields.Str() number = fields.Int() title = fields.Str() description = fields.Str(allow_none=True) creator = fields.Nested(UserSchema(), allow_none=True) open_issues = fields.Int() closed_issues = fields.Int() created_at = fields.DateTime() updated_at = fields.DateTime() closed_at = fields.DateTime(allow_none=True) due_on = fields.DateTime(allow_none=True) state = fields.Enum( MilestoneState, load_default=MilestoneState.OPEN, by_value=True ) @post_load def _make_model(self, data, **kwargs): return Milestone(**data) class ReactionsSchema(Schema): url = fields.Str() total_count = fields.Int() plus_one = fields.Int(data_key="+1") minus_one = fields.Int(data_key="-1") laugh = fields.Int() confused = fields.Int() heart = fields.Int() hooray = fields.Int() eyes = fields.Int() rocket = fields.Int() @post_load def _make_model(self, data, **kwargs): return Reactions(**data) class IssueLabelSchemaOrStr(fields.Method): def __init__(self): self._issue_label_schema = IssueLabelSchema() super().__init__() def _deserialize(self, value, attr, data, **kwargs): try: return self._issue_label_schema.load(value) except ValueError: return str(value) def _serialize(self, value, attr, obj, **kwargs): return ( self._issue_label_schema.dump(value) if isinstance(value, IssueLabel) else value ) class IssueSchema(Schema): id = fields.Int() node_id = fields.Str() url = fields.Str() repository_url = fields.Str() labels_url = fields.Str() comments_url = fields.Str() events_url = fields.Str() html_url = fields.Str() number = fields.Int() state = fields.Enum(IssueState, by_value=True) state_reason = fields.Enum( IssueStateReason, allow_none=True, by_value=True ) title = fields.Str() body = fields.Str(allow_none=True) user = fields.Nested(UserSchema(), allow_none=True) labels = fields.List(IssueLabelSchemaOrStr()) assignee = fields.Nested(UserSchema(), allow_none=True) assignees = fields.List(fields.Nested(UserSchema())) milestone = fields.Nested(MilestoneSchema(), allow_none=True) locked = fields.Bool() active_lock_reason = fields.Str(allow_none=True) comments = fields.Int() closed_at = fields.DateTime(allow_none=True) created_at = fields.DateTime() updated_at = fields.DateTime() closed_by = fields.Nested(UserSchema(), allow_none=True) author_association = fields.Enum(AuthorAssociation, by_value=True) draft = fields.Bool(load_default=False) body_html = fields.Str(allow_none=True, load_default=None) body_text = fields.Str(allow_none=True, load_default=None) timeline_url = fields.Str(allow_none=True, load_default=None) reactions = fields.Nested( ReactionsSchema(), allow_none=True, load_default=None ) @post_load def _make_model(self, data, **kwargs): return Issue(**data) class Benchmark(AbstractBenchmark): LIBRARY = "marshmallow" def __init__(self, runner: pyperf.Runner): super().__init__(runner) self._issue_schema = IssueSchema() def warmup(self, data) -> None: self._issue_schema.dump(self._issue_schema.load(data)) def run_loader(self, data) -> pyperf.Benchmark: return self._bench_loader_func(self._issue_schema.load, data) def run_dumper(self, data) -> pyperf.Benchmark: obj = self._issue_schema.load(data) return self._bench_dumper_func(self._issue_schema.dump, obj) mashumaro-3.13.1/benchmark/libs/marshmallow/dump.py000066400000000000000000000002151463331001200223120ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.marshmallow.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/marshmallow/load.py000066400000000000000000000002151463331001200222640ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.marshmallow.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/mashumaro/000077500000000000000000000000001463331001200204435ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/mashumaro/__init__.py000066400000000000000000000000001463331001200225420ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/mashumaro/common.py000066400000000000000000000076511463331001200223160ustar00rootroot00000000000000from dataclasses import dataclass, field from datetime import datetime from enum import Enum from typing import Optional, Union import pyperf from benchmark.common import AbstractBenchmark from mashumaro import field_options, pass_through from mashumaro.codecs import BasicDecoder, BasicEncoder from mashumaro.dialect import Dialect class DefaultDialect(Dialect): serialize_by_alias = True serialization_strategy = { str: {"deserialize": str, "serialize": pass_through}, int: {"serialize": pass_through}, } class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass(slots=True) class User: login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None @dataclass(slots=True) class IssueLabel: id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool @dataclass(slots=True) class Milestone: url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN @dataclass(slots=True) class Reactions: url: str total_count: int plus_one: int = field(metadata=field_options(alias="+1")) minus_one: int = field(metadata=field_options(alias="-1")) laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass(slots=True) class Issue: id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "mashumaro" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.decoder = BasicDecoder(Issue, default_dialect=DefaultDialect) self.encoder = BasicEncoder(Issue, default_dialect=DefaultDialect) def warmup(self, data) -> None: self.encoder.encode(self.decoder.decode(data)) def run_loader(self, data) -> pyperf.Benchmark: return self._bench_loader_func(self.decoder.decode, data) def run_dumper(self, data) -> pyperf.Benchmark: obj = self.decoder.decode(data) return self._bench_dumper_func(self.encoder.encode, obj) mashumaro-3.13.1/benchmark/libs/mashumaro/dump.py000066400000000000000000000002131463331001200217560ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.mashumaro.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/mashumaro/load.py000066400000000000000000000002131463331001200217300ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.mashumaro.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/pydantic_v1/000077500000000000000000000000001463331001200206705ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/pydantic_v1/__init__.py000066400000000000000000000000001463331001200227670ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/pydantic_v1/common.py000066400000000000000000000064631463331001200225430ustar00rootroot00000000000000from datetime import datetime from enum import Enum from typing import Any, Dict, Optional, Union import pyperf from pydantic.v1 import BaseModel, Field from benchmark.common import AbstractBenchmark class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" class User(BaseModel): login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None class IssueLabel(BaseModel): id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool class Milestone(BaseModel): url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN class Reactions(BaseModel): url: str total_count: int plus_one: int = Field(..., alias="+1") minus_one: int = Field(..., alias="-1") laugh: int confused: int heart: int hooray: int eyes: int rocket: int class Issue(BaseModel): id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "pydantic v1" def warmup(self, data: Dict[str, Any]) -> None: Issue(**data).dict(by_alias=True) def run_loader(self, data: Dict[str, Any]) -> pyperf.Benchmark: return self._bench_loader_func(lambda data: Issue(**data), data) def run_dumper(self, data: Dict[str, Any]) -> pyperf.Benchmark: obj = Issue(**data) return self._bench_dumper_func(lambda: obj.dict(by_alias=True)) mashumaro-3.13.1/benchmark/libs/pydantic_v1/dump.py000066400000000000000000000002151463331001200222050ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.pydantic_v1.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/pydantic_v1/load.py000066400000000000000000000002151463331001200221570ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.pydantic_v1.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/libs/pydantic_v2/000077500000000000000000000000001463331001200206715ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/pydantic_v2/__init__.py000066400000000000000000000000001463331001200227700ustar00rootroot00000000000000mashumaro-3.13.1/benchmark/libs/pydantic_v2/common.py000066400000000000000000000065371463331001200225460ustar00rootroot00000000000000from datetime import datetime from enum import Enum from typing import Any, Dict, Optional, Union import pyperf from pydantic import BaseModel, Field from benchmark.common import AbstractBenchmark class IssueState(Enum): OPEN = "open" CLOSED = "closed" class MilestoneState(Enum): OPEN = "open" CLOSED = "closed" class IssueStateReason(Enum): COMPLETED = "completed" REOPENED = "reopened" NOT_PLANNED = "not_planned" class AuthorAssociation(Enum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" class User(BaseModel): login: str id: int node_id: str avatar_url: str gravatar_id: Optional[str] url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool name: Optional[str] = None email: Optional[str] = None starred_at: Optional[datetime] = None class IssueLabel(BaseModel): id: int node_id: str url: str name: str description: Optional[str] color: Optional[str] default: bool class Milestone(BaseModel): url: str html_url: str labels_url: str id: int node_id: str number: int title: str description: Optional[str] creator: Optional[User] open_issues: int closed_issues: int created_at: datetime updated_at: datetime closed_at: Optional[datetime] due_on: Optional[datetime] state: MilestoneState = MilestoneState.OPEN class Reactions(BaseModel): url: str total_count: int plus_one: int = Field(..., alias="+1") minus_one: int = Field(..., alias="-1") laugh: int confused: int heart: int hooray: int eyes: int rocket: int class Issue(BaseModel): id: int node_id: str url: str repository_url: str labels_url: str comments_url: str events_url: str html_url: str number: int state: IssueState state_reason: Optional[IssueStateReason] title: str body: Optional[str] user: Optional[User] labels: list[Union[IssueLabel, str]] assignee: Optional[User] assignees: Optional[list[User]] milestone: Optional[Milestone] locked: bool active_lock_reason: Optional[str] comments: int closed_at: Optional[datetime] created_at: datetime updated_at: datetime closed_by: Optional[User] author_association: AuthorAssociation draft: bool = False body_html: Optional[str] = None body_text: Optional[str] = None timeline_url: Optional[str] = None reactions: Optional[Reactions] = None class Benchmark(AbstractBenchmark): LIBRARY = "pydantic v2" def warmup(self, data: Dict[str, Any]) -> None: Issue(**data).model_dump(by_alias=True) def run_loader(self, data: Dict[str, Any]) -> pyperf.Benchmark: return self._bench_loader_func(lambda data: Issue(**data), data) def run_dumper(self, data: Dict[str, Any]) -> pyperf.Benchmark: obj = Issue(**data) return self._bench_dumper_func( lambda: obj.model_dump(by_alias=True, mode="json") ) mashumaro-3.13.1/benchmark/libs/pydantic_v2/dump.py000066400000000000000000000002151463331001200222060ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.pydantic_v2.common import Benchmark BenchmarkRunner(Benchmark).run("dump") mashumaro-3.13.1/benchmark/libs/pydantic_v2/load.py000066400000000000000000000002151463331001200221600ustar00rootroot00000000000000from benchmark.common import BenchmarkRunner from benchmark.libs.pydantic_v2.common import Benchmark BenchmarkRunner(Benchmark).run("load") mashumaro-3.13.1/benchmark/prepare_svg_for_darkness.py000066400000000000000000000011171463331001200231450ustar00rootroot00000000000000import argparse if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("input", help="input file name") parser.add_argument("output", help="output file name") args = parser.parse_args() if not args.input or not args.output: print("Please provide both input and output file names.") exit(1) # Replace all occurrences of "#333333" with "#C9D1D9" for dark theme with open(args.input) as infile, open(args.output, "w") as outfile: for line in infile: outfile.write(line.replace("#333333", "#C9D1D9")) mashumaro-3.13.1/benchmark/run.sh000077500000000000000000000013521463331001200166620ustar00rootroot00000000000000#!/bin/bash set -e export PYTHONPATH=$PYTHONPATH:. rm -rf benchmark/data/results mkdir -p benchmark/data/results for library_name in mashumaro cattrs pydantic_v2 pydantic_v1 marshmallow dataclasses_json dacite asdict; do if [ -f "benchmark/libs/$library_name/load.py" ]; then python benchmark/libs/$library_name/load.py -o benchmark/data/results/load_$library_name.json fi if [ -f "benchmark/libs/$library_name/dump.py" ]; then python benchmark/libs/$library_name/dump.py -o benchmark/data/results/dump_$library_name.json fi done python benchmark/create_chart_specs.py echo "You can now render chart specs with https://vega.github.io/editor/ * benchmark/data/spec_load.json * benchmark/data/spec_dump.json" mashumaro-3.13.1/docs/000077500000000000000000000000001463331001200145145ustar00rootroot00000000000000mashumaro-3.13.1/docs/2to3.md000066400000000000000000000140451463331001200156310ustar00rootroot00000000000000Migration from version 2 to version 3 -------------------------------------------------------------------------------- * [Moving serialization format mixins](#moving-serialization-format-mixins) * [Removing `use_bytes` parameter](#removing-use_bytes-parameter) * [Removing `use_enum` parameter](#removing-use_enum-parameter) * [Removing `use_datetime` parameter](#removing-use_datetime-parameter) * [Changing `from_json`, `from_msgpack`, `from_yaml` signature](#changing-from_json-from_msgpack-from_yaml-signature) * [Changing `to_json`, `to_msgpack`, `to_yaml` signature](#changing-to_json-to_msgpack-to_yaml-signature) ### Moving serialization format mixins You might need to alter your imports if you've used the following mixins: * `DataClassJSONMixin` * `DataClassMessagePackMixin` * `DataClassYAMLMixin` Tne new imports will look like this: ```python from mashumaro.mixins.json import DataClassJSONMixin from mashumaro.mixins.msgpack import DataClassMessagePackMixin from mashumaro.mixins.yaml import DataClassYAMLMixin ``` ### Removing `use_bytes` parameter Parameter `use_bytes` was removed from `from_dict` / `to_dict` methods. If you've used it to pass bytes or bytearray values as is, you can do the same with [dialect](https://github.com/Fatal1ty/mashumaro#dialects) and [pass_through](https://github.com/Fatal1ty/mashumaro/tree/master#passing-field-values-as-is) features: ```python from dataclasses import dataclass from mashumaro import DataClassDictMixin, pass_through from mashumaro.config import BaseConfig, ADD_DIALECT_SUPPORT from mashumaro.dialect import Dialect class BytesDialect(Dialect): serialization_strategy = { bytes: pass_through, bytearray: pass_through, } @dataclass class A(DataClassDictMixin): bytes: bytes bytearray: bytearray class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] obj = A(b"\x00", bytearray(b"\x00")) dct = {"bytes": b"\x00", "bytearray": bytearray(b"\x00")} assert A.from_dict(dct, dialect=BytesDialect) == obj assert obj.to_dict(dialect=BytesDialect) == dct ``` ### Removing `use_enum` parameter Parameter `use_enum` was removed from `from_dict` / `to_dict` methods. If you've used it to pass enum values as is, you can do the same with [dialect](https://github.com/Fatal1ty/mashumaro#dialects) and [pass_through](https://github.com/Fatal1ty/mashumaro/tree/master#passing-field-values-as-is) features: ```python from dataclasses import dataclass from enum import Enum from mashumaro import DataClassDictMixin, pass_through from mashumaro.config import BaseConfig, ADD_DIALECT_SUPPORT from mashumaro.dialect import Dialect class MyEnum(Enum): a = 1 b = 2 class EnumDialect(Dialect): serialization_strategy = { MyEnum: pass_through, } @dataclass class A(DataClassDictMixin): my_enum: MyEnum class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] obj = A(MyEnum.a) dct = {"my_enum": MyEnum.a} assert A.from_dict(dct, dialect=EnumDialect) == obj assert obj.to_dict(dialect=EnumDialect) == dct ``` ### Removing `use_datetime` parameter Parameter `use_datetime` was removed from `from_dict` / `to_dict` methods. If you've used it to pass datetime, date and time values as is, you can do the same with [dialect](https://github.com/Fatal1ty/mashumaro#dialects) and [pass_through](https://github.com/Fatal1ty/mashumaro/tree/master#passing-field-values-as-is) features: ```python from dataclasses import dataclass from datetime import date, datetime, time from mashumaro import DataClassDictMixin, pass_through from mashumaro.config import BaseConfig, ADD_DIALECT_SUPPORT from mashumaro.dialect import Dialect class DatetimeDialect(Dialect): serialization_strategy = { date: pass_through, datetime: pass_through, time: pass_through, } @dataclass class A(DataClassDictMixin): datetime: datetime date: date time: time class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] obj = A( datetime=datetime(2022, 2, 9, 12, 0), date=date(2022, 2, 9), time=time(12, 0), ) dct = { "datetime": datetime(2022, 2, 9, 12, 0), "date": date(2022, 2, 9), "time": time(12, 0), } assert A.from_dict(dct, dialect=DatetimeDialect) == obj assert obj.to_dict(dialect=DatetimeDialect) == dct ``` ### Changing `from_json`, `from_msgpack`, `from_yaml` signature In version 2 methods `from_json`, `from_msgpack`, `from_yaml` had the following signature: ```python @classmethod def from_*( # where * is json, msgpack, yaml cls, data: EncodedData, decoder: Decoder = ..., dict_params: Mapping = ..., **decoder_kwargs, ) ``` In version 3 these methods have a slightly different signature: ```python @classmethod def from_*( # where * is json, msgpack, yaml cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs, ) ``` As you can see, the `dict_params` positional argument was removed in order to pass keyword arguments to underlying `from_dict` method. Decoder parameters were removed because they can be easily passed to decoder using a lambda function, a partial object or something else: ```python A.from_json( data, decoder=lambda data: json.loads(data, parse_float=decimal.Decimal), ) ``` ### Changing `to_json`, `to_msgpack`, `to_yaml` signature In version 2 methods `to_json`, `to_msgpack`, `to_yaml` had the following signature: ```python def to_*( # where * is json, msgpack, yaml self, encoder: Encoder = ... dict_params: Mapping = ..., **encoder_kwargs, ) ``` In version 3 these methods have a slightly different signature: ```python def to_*( # where * is json, msgpack, yaml self, encoder: Encoder = ..., **to_dict_kwargs, ) ``` As you can see, the `dict_params` positional argument was removed in order to pass keyword arguments to underlying `to_dict` method. Encoder parameters were removed because they can be easily passed to encoder using a lambda function, a partial object or something else: ```python dataclass_obj.to_json( encoder=lambda data: json.dumps(data, ensure_ascii=False), ) ``` mashumaro-3.13.1/img/000077500000000000000000000000001463331001200143405ustar00rootroot00000000000000mashumaro-3.13.1/img/logo.svg000066400000000000000000000214071463331001200160250ustar00rootroot00000000000000 mashumaro-3.13.1/justfile000066400000000000000000000010031463331001200153260ustar00rootroot00000000000000default: build lint build: pip install -r requirements-dev.txt pip install -e . lint: ruff check mashumaro black --check mashumaro tests mypy mashumaro codespell mashumaro tests README.md .github/*.md format: black mashumaro tests isort mashumaro tests test: pytest tests test-with-coverage: pytest --cov . tests benchmark: ./benchmark/run.sh clean: rm -rf benchmark/data/results rm -f benchmark/data/spec_dump.json rm -f benchmark/data/spec_load.json mashumaro-3.13.1/mashumaro/000077500000000000000000000000001463331001200155605ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/__init__.py000066400000000000000000000004021463331001200176650ustar00rootroot00000000000000from mashumaro.exceptions import MissingField from mashumaro.helper import field_options, pass_through from mashumaro.mixins.dict import DataClassDictMixin __all__ = [ "MissingField", "DataClassDictMixin", "field_options", "pass_through", ] mashumaro-3.13.1/mashumaro/codecs/000077500000000000000000000000001463331001200170205ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/codecs/__init__.py000066400000000000000000000001451463331001200211310ustar00rootroot00000000000000from .basic import BasicDecoder, BasicEncoder __all__ = [ "BasicDecoder", "BasicEncoder", ] mashumaro-3.13.1/mashumaro/codecs/_builder.py000066400000000000000000000077061463331001200211710ustar00rootroot00000000000000import re from typing import Any, Callable, Optional, Type from mashumaro.core.meta.code.builder import CodeBuilder from mashumaro.core.meta.helpers import is_optional, is_type_var_any from mashumaro.core.meta.types.common import ( AttrsHolder, FieldContext, ValueSpec, ) from mashumaro.core.meta.types.pack import PackerRegistry from mashumaro.core.meta.types.unpack import UnpackerRegistry CALL_EXPR = re.compile(r"^([^ ]+)\(value\)$") class CodecCodeBuilder(CodeBuilder): @classmethod def new(cls, **kwargs: Any) -> "CodecCodeBuilder": if "attrs" not in kwargs: kwargs["attrs"] = AttrsHolder() return cls(AttrsHolder("__root__"), **kwargs) # type: ignore def add_decode_method( self, shape_type: Type, decoder_obj: Any, pre_decoder_func: Optional[Callable[[Any], Any]] = None, ) -> None: self.reset() with self.indent("def decode(value):"): if pre_decoder_func: self.ensure_object_imported(pre_decoder_func, "decoder") self.add_line("value = decoder(value)") could_be_none = ( shape_type in (Any, type(None), None) or is_type_var_any(self.get_real_type("", shape_type)) or is_optional( shape_type, self.get_field_resolved_type_params("") ) ) unpacked_value = UnpackerRegistry.get( ValueSpec( type=shape_type, expression="value", builder=self, field_ctx=FieldContext(name="", metadata={}), could_be_none=could_be_none, ) ) self.add_line(f"return {unpacked_value}") self.add_line("setattr(decoder_obj, 'decode', decode)") if pre_decoder_func is None: m = CALL_EXPR.match(unpacked_value) if m: method_name = m.group(1) self.lines.reset() self.add_line(f"setattr(decoder_obj, 'decode', {method_name})") self.ensure_object_imported(decoder_obj, "decoder_obj") self.ensure_object_imported(self.cls, "cls") self.compile() def add_encode_method( self, shape_type: Type, encoder_obj: Any, post_encoder_func: Optional[Callable[[Any], Any]] = None, ) -> None: self.reset() with self.indent("def encode(value):"): could_be_none = ( shape_type in (Any, type(None), None) or is_type_var_any(self.get_real_type("", shape_type)) or is_optional( shape_type, self.get_field_resolved_type_params("") ) ) packed_value = PackerRegistry.get( ValueSpec( type=shape_type, expression="value", builder=self, field_ctx=FieldContext(name="", metadata={}), could_be_none=could_be_none, no_copy_collections=self.get_dialect_or_config_option( "no_copy_collections", () ), ) ) if post_encoder_func: self.ensure_object_imported(post_encoder_func, "encoder") self.add_line(f"return encoder({packed_value})") else: self.add_line(f"return {packed_value}") self.add_line("setattr(encoder_obj, 'encode', encode)") if post_encoder_func is None: m = CALL_EXPR.match(packed_value) if m: method_name = m.group(1) self.lines.reset() self.add_line(f"setattr(encoder_obj, 'encode', {method_name})") self.ensure_object_imported(encoder_obj, "encoder_obj") self.ensure_object_imported(self.cls, "cls") self.ensure_object_imported(self.cls, "self") self.compile() mashumaro-3.13.1/mashumaro/codecs/basic.py000066400000000000000000000046701463331001200204620ustar00rootroot00000000000000from typing import ( Any, Callable, Generic, Optional, Type, TypeVar, Union, final, overload, ) from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect T = TypeVar("T") class BasicDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[Callable[[Any], Any]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[Callable[[Any], Any]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[Callable[[Any], Any]] = None, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, pre_decoder_func) @final def decode(self, data: Any) -> T: ... class BasicEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[Callable[[Any], Any]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[Callable[[Any], Any]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[Callable[[Any], Any]] = None, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, post_encoder_func) @final def encode(self, obj: T) -> Any: ... def decode(data: Any, shape_type: Union[Type[T], Any]) -> T: return BasicDecoder(shape_type).decode(data) def encode(obj: T, shape_type: Union[Type[T], Any]) -> Any: return BasicEncoder(shape_type).encode(obj) __all__ = [ "BasicDecoder", "BasicEncoder", "decode", "encode", ] mashumaro-3.13.1/mashumaro/codecs/json.py000066400000000000000000000055121463331001200203460ustar00rootroot00000000000000import json from typing import ( Any, Callable, Generic, Optional, Type, TypeVar, Union, final, overload, ) from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect T = TypeVar("T") EncodedData = Union[str, bytes, bytearray] class JSONDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Callable[[EncodedData], Any] = json.loads, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Callable[[EncodedData], Any] = json.loads, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Callable[[EncodedData], Any] = json.loads, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, pre_decoder_func) @final def decode(self, data: EncodedData) -> T: ... class JSONEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Callable[[Any], str] = json.dumps, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Callable[[Any], str] = json.dumps, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Callable[[Any], str] = json.dumps, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, post_encoder_func) @final def encode(self, obj: T) -> str: ... def json_decode( data: EncodedData, shape_type: Union[Type[T], Any], pre_decoder_func: Callable[[EncodedData], Any] = json.loads, ) -> T: return JSONDecoder(shape_type, pre_decoder_func=pre_decoder_func).decode( data ) def json_encode( obj: T, shape_type: Union[Type[T], Any], post_encoder_func: Callable[[Any], str] = json.dumps, ) -> str: return JSONEncoder(shape_type, post_encoder_func=post_encoder_func).encode( obj ) decode = json_decode encode = json_encode __all__ = [ "JSONDecoder", "JSONEncoder", "json_decode", "json_encode", "decode", "encode", ] mashumaro-3.13.1/mashumaro/codecs/msgpack.py000066400000000000000000000065531463331001200210300ustar00rootroot00000000000000from typing import ( Any, Callable, Generic, Optional, Type, TypeVar, Union, final, overload, ) import msgpack from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect from mashumaro.mixins.msgpack import MessagePackDialect T = TypeVar("T") EncodedData = bytes PostEncoderFunc = Callable[[Any], EncodedData] PreDecoderFunc = Callable[[EncodedData], Any] def _default_decoder(data: EncodedData) -> Any: return msgpack.unpackb(data, raw=False) def _default_encoder(data: Any) -> EncodedData: return msgpack.packb(data, use_bin_type=True) class MessagePackDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): if default_dialect is not None: default_dialect = MessagePackDialect.merge(default_dialect) else: default_dialect = MessagePackDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, pre_decoder_func) @final def decode(self, data: EncodedData) -> T: ... class MessagePackEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): if default_dialect is not None: default_dialect = MessagePackDialect.merge(default_dialect) else: default_dialect = MessagePackDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, post_encoder_func) @final def encode(self, obj: T) -> EncodedData: ... def msgpack_decode(data: EncodedData, shape_type: Union[Type[T], Any]) -> T: return MessagePackDecoder(shape_type).decode(data) def msgpack_encode(obj: T, shape_type: Union[Type[T], Any]) -> EncodedData: return MessagePackEncoder(shape_type).encode(obj) decode = msgpack_decode encode = msgpack_encode __all__ = [ "MessagePackDecoder", "MessagePackEncoder", "msgpack_decode", "msgpack_encode", "decode", "encode", ] mashumaro-3.13.1/mashumaro/codecs/orjson.py000066400000000000000000000050701463331001200207060ustar00rootroot00000000000000from typing import ( Any, Generic, Optional, Type, TypeVar, Union, final, overload, ) import orjson from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect from mashumaro.mixins.orjson import OrjsonDialect T = TypeVar("T") EncodedData = Union[str, bytes, bytearray] class ORJSONDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, ): if default_dialect is not None: default_dialect = OrjsonDialect.merge(default_dialect) else: default_dialect = OrjsonDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, orjson.loads) @final def decode(self, data: EncodedData) -> T: ... class ORJSONEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, ): if default_dialect is not None: default_dialect = OrjsonDialect.merge(default_dialect) else: default_dialect = OrjsonDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, orjson.dumps) @final def encode(self, obj: T) -> bytes: ... def json_decode(data: EncodedData, shape_type: Type[T]) -> T: return ORJSONDecoder(shape_type).decode(data) def json_encode(obj: T, shape_type: Union[Type[T], Any]) -> bytes: return ORJSONEncoder(shape_type).encode(obj) decode = json_decode encode = json_encode __all__ = [ "ORJSONDecoder", "ORJSONEncoder", "json_decode", "json_encode", "decode", "encode", ] mashumaro-3.13.1/mashumaro/codecs/toml.py000066400000000000000000000051531463331001200203510ustar00rootroot00000000000000from typing import ( Any, Generic, Optional, Type, TypeVar, Union, final, overload, ) import tomli_w from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect from mashumaro.mixins.toml import TOMLDialect try: import tomllib except ModuleNotFoundError: import tomli as tomllib # type: ignore T = TypeVar("T") EncodedData = str class TOMLDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, ): if default_dialect is not None: default_dialect = TOMLDialect.merge(default_dialect) else: default_dialect = TOMLDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, tomllib.loads) @final def decode(self, data: EncodedData) -> T: ... class TOMLEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, ): if default_dialect is not None: default_dialect = TOMLDialect.merge(default_dialect) else: default_dialect = TOMLDialect code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, tomli_w.dumps) @final def encode(self, obj: T) -> bytes: ... def toml_decode(data: EncodedData, shape_type: Type[T]) -> T: return TOMLDecoder(shape_type).decode(data) def toml_encode(obj: T, shape_type: Union[Type[T], Any]) -> bytes: return TOMLEncoder(shape_type).encode(obj) decode = toml_decode encode = toml_encode __all__ = [ "TOMLDecoder", "TOMLEncoder", "toml_decode", "toml_encode", "decode", "encode", ] mashumaro-3.13.1/mashumaro/codecs/yaml.py000066400000000000000000000060261463331001200203400ustar00rootroot00000000000000from typing import ( Any, Callable, Generic, Optional, Type, TypeVar, Union, final, overload, ) import yaml from mashumaro.codecs._builder import CodecCodeBuilder from mashumaro.core.meta.helpers import get_args from mashumaro.dialect import Dialect T = TypeVar("T") EncodedData = Union[str, bytes] PostEncoderFunc = Callable[[Any], EncodedData] PreDecoderFunc = Callable[[EncodedData], Any] DefaultLoader = getattr(yaml, "CSafeLoader", yaml.SafeLoader) DefaultDumper = getattr(yaml, "CDumper", yaml.Dumper) def _default_encoder(data: Any) -> EncodedData: return yaml.dump(data, Dumper=DefaultDumper) def _default_decoder(data: EncodedData) -> Any: return yaml.load(data, DefaultLoader) class YAMLDecoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, pre_decoder_func: Optional[PreDecoderFunc] = _default_decoder, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_decode_method(shape_type, self, pre_decoder_func) @final def decode(self, data: EncodedData) -> T: ... class YAMLEncoder(Generic[T]): @overload def __init__( self, shape_type: Type[T], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): ... @overload def __init__( self, shape_type: Any, *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): ... def __init__( self, shape_type: Union[Type[T], Any], *, default_dialect: Optional[Type[Dialect]] = None, post_encoder_func: Optional[PostEncoderFunc] = _default_encoder, ): code_builder = CodecCodeBuilder.new( type_args=get_args(shape_type), default_dialect=default_dialect ) code_builder.add_encode_method(shape_type, self, post_encoder_func) @final def encode(self, obj: T) -> EncodedData: ... def yaml_decode(data: EncodedData, shape_type: Union[Type[T], Any]) -> T: return YAMLDecoder(shape_type).decode(data) def yaml_encode(obj: T, shape_type: Union[Type[T], Any]) -> EncodedData: return YAMLEncoder(shape_type).encode(obj) decode = yaml_decode encode = yaml_encode __all__ = [ "YAMLDecoder", "YAMLEncoder", "yaml_decode", "yaml_encode", "decode", "encode", ] mashumaro-3.13.1/mashumaro/config.py000066400000000000000000000037251463331001200174060ustar00rootroot00000000000000from typing import ( Any, Callable, Dict, List, Literal, Optional, Type, TypedDict, Union, ) from mashumaro.core.const import Sentinel from mashumaro.dialect import Dialect from mashumaro.types import Discriminator, SerializationStrategy __all__ = [ "BaseConfig", "TO_DICT_ADD_BY_ALIAS_FLAG", "TO_DICT_ADD_OMIT_NONE_FLAG", "ADD_DIALECT_SUPPORT", "ADD_SERIALIZATION_CONTEXT", "SerializationStrategyValueType", ] TO_DICT_ADD_BY_ALIAS_FLAG = "TO_DICT_ADD_BY_ALIAS_FLAG" TO_DICT_ADD_OMIT_NONE_FLAG = "TO_DICT_ADD_OMIT_NONE_FLAG" ADD_DIALECT_SUPPORT = "ADD_DIALECT_SUPPORT" ADD_SERIALIZATION_CONTEXT = "ADD_SERIALIZATION_CONTEXT" CodeGenerationOption = Literal[ "TO_DICT_ADD_BY_ALIAS_FLAG", "TO_DICT_ADD_OMIT_NONE_FLAG", "ADD_DIALECT_SUPPORT", "ADD_SERIALIZATION_CONTEXT", ] class SerializationStrategyDict(TypedDict, total=False): serialize: Union[str, Callable] deserialize: Union[str, Callable] SerializationStrategyValueType = Union[ SerializationStrategy, SerializationStrategyDict ] class BaseConfig: debug: bool = False code_generation_options: List[CodeGenerationOption] = [] serialization_strategy: Dict[Any, SerializationStrategyValueType] = {} aliases: Dict[str, str] = {} serialize_by_alias: Union[bool, Literal[Sentinel.MISSING]] = ( Sentinel.MISSING ) namedtuple_as_dict: Union[bool, Literal[Sentinel.MISSING]] = ( Sentinel.MISSING ) allow_postponed_evaluation: bool = True dialect: Optional[Type[Dialect]] = None omit_none: Union[bool, Literal[Sentinel.MISSING]] = Sentinel.MISSING omit_default: Union[bool, Literal[Sentinel.MISSING]] = Sentinel.MISSING orjson_options: Optional[int] = 0 json_schema: Dict[str, Any] = {} discriminator: Optional[Discriminator] = None lazy_compilation: bool = False sort_keys: bool = False allow_deserialization_not_by_alias: bool = False forbid_extra_keys: bool = False mashumaro-3.13.1/mashumaro/core/000077500000000000000000000000001463331001200165105ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/__init__.py000066400000000000000000000000001463331001200206070ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/const.py000066400000000000000000000016061463331001200202130ustar00rootroot00000000000000import enum import sys __all__ = [ "PY_38", "PY_39", "PY_310", "PY_39_MIN", "PY_310_MIN", "PY_311_MIN", "PY_312_MIN", "PY_313_MIN", "PEP_585_COMPATIBLE", "Sentinel", ] PY_38 = sys.version_info.major == 3 and sys.version_info.minor == 8 PY_39 = sys.version_info.major == 3 and sys.version_info.minor == 9 PY_310 = sys.version_info.major == 3 and sys.version_info.minor == 10 PY_311 = sys.version_info.major == 3 and sys.version_info.minor == 11 PY_312 = sys.version_info.major == 3 and sys.version_info.minor == 12 PY_313_MIN = sys.version_info.major == 3 and sys.version_info.minor >= 13 PY_312_MIN = PY_312 or PY_313_MIN PY_311_MIN = PY_311 or PY_312_MIN PY_310_MIN = PY_310 or PY_311_MIN PY_39_MIN = PY_39 or PY_310_MIN PEP_585_COMPATIBLE = PY_39_MIN # Type Hinting Generics In Standard Collections class Sentinel(enum.Enum): MISSING = enum.auto() mashumaro-3.13.1/mashumaro/core/helpers.py000066400000000000000000000014651463331001200205320ustar00rootroot00000000000000import datetime import re __all__ = [ "parse_timezone", "ConfigValue", "UTC_OFFSET_PATTERN", ] UTC_OFFSET_PATTERN = r"^UTC(([+-][0-2][0-9]):([0-5][0-9]))?$" UTC_OFFSET_RE = re.compile(UTC_OFFSET_PATTERN) def parse_timezone(s: str) -> datetime.timezone: match = UTC_OFFSET_RE.match(s) if not match: raise ValueError( f"Time zone {s} must be either UTC or in format UTC[+-]hh:mm" ) if match.group(1): hours = int(match.group(2)) minutes = int(match.group(3)) return datetime.timezone( datetime.timedelta( hours=hours, minutes=minutes if hours >= 0 else -minutes ) ) else: return datetime.timezone.utc class ConfigValue: def __init__(self, name: str): self.name = name mashumaro-3.13.1/mashumaro/core/meta/000077500000000000000000000000001463331001200174365ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/__init__.py000066400000000000000000000000001463331001200215350ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/code/000077500000000000000000000000001463331001200203505ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/code/__init__.py000066400000000000000000000000001463331001200224470ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/code/builder.py000066400000000000000000001524721463331001200223630ustar00rootroot00000000000000import enum import importlib import inspect import math import types import typing import uuid from contextlib import contextmanager # noinspection PyProtectedMember from dataclasses import _FIELDS # type: ignore from dataclasses import MISSING, Field, is_dataclass from functools import lru_cache try: from dataclasses import KW_ONLY # type: ignore except ImportError: KW_ONLY = object() # type: ignore import typing_extensions from mashumaro.config import ( ADD_DIALECT_SUPPORT, ADD_SERIALIZATION_CONTEXT, TO_DICT_ADD_BY_ALIAS_FLAG, TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig, SerializationStrategyValueType, ) from mashumaro.core.const import Sentinel from mashumaro.core.helpers import ConfigValue from mashumaro.core.meta.code.lines import CodeLines from mashumaro.core.meta.helpers import ( evaluate_forward_ref, get_args, get_class_that_defines_field, get_class_that_defines_method, get_forward_ref_referencing_globals, get_literal_values, get_name_error_name, get_type_annotations, hash_type_args, is_annotated, is_class_var, is_dataclass_dict_mixin, is_dataclass_dict_mixin_subclass, is_dialect_subclass, is_hashable, is_init_var, is_literal, is_local_type_name, is_named_tuple, is_optional, is_type_var_any, resolve_type_params, substitute_type_params, type_name, ) from mashumaro.core.meta.types.common import ( FieldContext, NoneType, ValueSpec, clean_id, ) from mashumaro.core.meta.types.pack import PackerRegistry from mashumaro.core.meta.types.unpack import ( SubtypeUnpackerBuilder, UnpackerRegistry, ) from mashumaro.dialect import Dialect from mashumaro.exceptions import ( # noqa BadDialect, BadHookSignature, ExtraKeysError, InvalidFieldValue, MissingDiscriminatorError, MissingField, SuitableVariantNotFoundError, ThirdPartyModuleNotFoundError, UnresolvedTypeReferenceError, UnserializableDataError, UnserializableField, UnsupportedDeserializationEngine, UnsupportedSerializationEngine, ) from mashumaro.types import Alias, Discriminator __PRE_SERIALIZE__ = "__pre_serialize__" __PRE_DESERIALIZE__ = "__pre_deserialize__" __POST_SERIALIZE__ = "__post_serialize__" __POST_DESERIALIZE__ = "__post_deserialize__" SIMPLE_TYPES = (int, float, bool, str, NoneType) class InternalMethodName(str): _PREFIX = "__mashumaro_" _SUFFIX = "__" @classmethod def from_public(cls, value: str) -> "InternalMethodName": return cls(f"{cls._PREFIX}{value}{cls._SUFFIX}") @property def public(self) -> str: return self[len(self._PREFIX) : -len(self._SUFFIX)] class CodeBuilder: def __init__( self, cls: typing.Type, type_args: typing.Tuple[typing.Type, ...] = (), dialect: typing.Optional[typing.Type[Dialect]] = None, first_method: str = "from_dict", allow_postponed_evaluation: bool = True, format_name: str = "dict", decoder: typing.Optional[typing.Any] = None, encoder: typing.Optional[typing.Any] = None, encoder_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = None, default_dialect: typing.Optional[typing.Type[Dialect]] = None, attrs: typing.Any = None, attrs_registry: typing.Optional[ typing.Dict[typing.Any, typing.Any] ] = None, ): self.cls = cls self.lines: CodeLines = CodeLines() self.globals: typing.Dict[str, typing.Any] = {} self.resolved_type_params: typing.Dict[ typing.Type, typing.Dict[typing.Type, typing.Type] ] = {} self.field_classes: typing.Dict = {} self.initial_type_args = type_args if dialect is not None and not is_dialect_subclass(dialect): raise BadDialect( 'Keyword argument "dialect" must be a subclass of Dialect ' f"in {type_name(self.cls)}.{first_method}" ) self.dialect = dialect self.default_dialect = default_dialect self.allow_postponed_evaluation = allow_postponed_evaluation self.format_name = format_name self.decoder = decoder self.encoder = encoder self.encoder_kwargs = encoder_kwargs or {} if attrs is not None: self.attrs = attrs else: self.attrs = cls if attrs_registry is not None: self.attrs_registry = attrs_registry else: self.attrs_registry = {} def reset(self) -> None: self.lines.reset() self.globals = globals().copy() self.resolved_type_params = resolve_type_params( self.cls, self.initial_type_args ) self.field_classes = {} @property def namespace(self) -> typing.Mapping[typing.Any, typing.Any]: return self.cls.__dict__ @property def annotations(self) -> typing.Dict[str, typing.Any]: return self.namespace.get("__annotations__", {}) @property def is_nailed(self) -> bool: return self.attrs is self.cls def __get_field_types( self, recursive: bool = True, include_extras: bool = False ) -> typing.Dict[str, typing.Any]: fields = {} try: field_type_hints = typing_extensions.get_type_hints( self.cls, include_extras=include_extras ) except NameError as e: name = get_name_error_name(e) raise UnresolvedTypeReferenceError(self.cls, name) from None for fname, ftype in field_type_hints.items(): if is_class_var(ftype) or is_init_var(ftype) or ftype is KW_ONLY: continue if recursive or fname in self.annotations: fields[fname] = ftype return fields def _get_field_class(self, field_name: str) -> typing.Any: try: cls = self.field_classes[field_name] except KeyError: cls = get_class_that_defines_field(field_name, self.cls) self.field_classes[field_name] = cls return cls def get_real_type( self, field_name: str, field_type: typing.Type ) -> typing.Type: cls = self._get_field_class(field_name) return substitute_type_params( field_type, self.resolved_type_params[cls] ) def get_field_resolved_type_params( self, field_name: str ) -> typing.Dict[typing.Type, typing.Type]: cls = self._get_field_class(field_name) return self.resolved_type_params[cls] def get_field_types( self, include_extras: bool = False ) -> typing.Dict[str, typing.Any]: return self.__get_field_types(include_extras=include_extras) def get_type_name_identifier( self, typ: typing.Optional[typing.Type], resolved_type_params: typing.Optional[ typing.Dict[typing.Type, typing.Type] ] = None, ) -> str: field_type = type_name(typ, resolved_type_params=resolved_type_params) if is_local_type_name(field_type): field_type = clean_id(field_type) self.ensure_object_imported(typ, field_type) return field_type @property @lru_cache() def dataclass_fields(self) -> typing.Dict[str, Field]: d = {} for ancestor in self.cls.__mro__[-1:0:-1]: if is_dataclass(ancestor): for field in getattr(ancestor, _FIELDS).values(): d[field.name] = field for name in self.__get_field_types(recursive=False): field = self.namespace.get(name, MISSING) if isinstance(field, Field): d[name] = field else: field = self.namespace.get(_FIELDS, {}).get(name, MISSING) if isinstance(field, Field): d[name] = field else: d.pop(name, None) return d @property def metadatas(self) -> typing.Dict[str, typing.Mapping[str, typing.Any]]: return { name: field.metadata for name, field in self.dataclass_fields.items() } @lru_cache(None) def get_field_default( self, name: str, call_factory: bool = False ) -> typing.Any: field = self.dataclass_fields.get(name) if field: if field.default is not MISSING: return field.default else: if call_factory and field.default_factory is not MISSING: return field.default_factory() else: return field.default_factory else: return self.namespace.get(name, MISSING) def add_type_modules(self, *types_: typing.Type) -> None: for t in types_: module = inspect.getmodule(t) if not module: continue self.ensure_module_imported(module) if is_literal(t): literal_args = get_literal_values(t) self.add_type_modules(*literal_args) else: args = get_args(t) if args: self.add_type_modules(*args) constraints = getattr(t, "__constraints__", ()) if constraints: self.add_type_modules(*constraints) bound = getattr(t, "__bound__", ()) if bound: self.add_type_modules(bound) def ensure_module_imported(self, module: types.ModuleType) -> None: self.globals.setdefault(module.__name__, module) package = module.__name__.split(".")[0] self.globals.setdefault(package, importlib.import_module(package)) def ensure_object_imported( self, obj: typing.Any, name: typing.Optional[str] = None, ) -> None: self.globals.setdefault(name or obj.__name__, obj) def add_line(self, line: str) -> None: self.lines.append(line) @contextmanager def indent( self, expr: typing.Optional[str] = None, ) -> typing.Generator[None, None, None]: with self.lines.indent(expr): yield def compile(self) -> None: code = self.lines.as_text() if self.get_config().debug: if self.dialect is not None: print(f"{type_name(self.cls)}[{type_name(self.dialect)}]:") else: print(f"{type_name(self.cls)}:") print(code) exec(code, self.globals, self.__dict__) def evaluate_forward_ref( self, typ: typing.ForwardRef, owner: typing.Optional[typing.Type], ) -> typing.Optional[typing.Type]: globalns = get_forward_ref_referencing_globals( typ, owner, self.globals ) return evaluate_forward_ref(typ, globalns, self.__dict__) def get_declared_hook(self, method_name: str) -> typing.Any: cls = get_class_that_defines_method(method_name, self.cls) if cls is not None and not is_dataclass_dict_mixin(cls): return cls.__dict__[method_name] def _add_unpack_method_lines_lazy(self, method_name: str) -> None: if self.default_dialect is not None: self.add_type_modules(self.default_dialect) self.add_line( f"CodeBuilder(" f"cls," f"first_method='{method_name}'," f"allow_postponed_evaluation=False," f"format_name='{self.format_name}'," f"decoder={type_name(self.decoder)}," f"default_dialect={type_name(self.default_dialect)}" f").add_unpack_method()" ) unpacker_args = [ "d", self.get_unpack_method_flags(pass_decoder=True), ] unpacker_args_s = ", ".join(filter(None, unpacker_args)) self.add_line(f"return cls.{method_name}({unpacker_args_s})") def _add_unpack_method_lines(self, method_name: str) -> None: config = self.get_config() if ( config.lazy_compilation and self.allow_postponed_evaluation and self.is_nailed ): self._add_unpack_method_lines_lazy(method_name) return try: field_types = self.get_field_types(include_extras=True) except UnresolvedTypeReferenceError: if ( not self.allow_postponed_evaluation or not config.allow_postponed_evaluation ): raise self._add_unpack_method_lines_lazy(method_name) else: if self.decoder is not None: self.add_line("d = decoder(d)") discr = self.get_discriminator() if discr: if not discr.include_subtypes: raise ValueError( "Config based discriminator must have " "'include_subtypes' enabled" ) discr = Discriminator( # prevent RecursionError field=discr.field, include_subtypes=discr.include_subtypes, variant_tagger_fn=discr.variant_tagger_fn, ) self.add_type_modules(self.cls) method = SubtypeUnpackerBuilder(discr).build( spec=ValueSpec( type=self.cls, expression="d", builder=self, field_ctx=FieldContext("", {}), ) ) self.add_line(f"return {method}") return pre_deserialize = self.get_declared_hook(__PRE_DESERIALIZE__) if pre_deserialize: if not isinstance(pre_deserialize, classmethod): raise BadHookSignature( f"`{__PRE_DESERIALIZE__}` must be a class method with " "Callable[[Dict[Any, Any]], Dict[Any, Any]] signature" ) else: self.add_line(f"d = cls.{__PRE_DESERIALIZE__}(d)") post_deserialize = self.get_declared_hook(__POST_DESERIALIZE__) if post_deserialize: if not isinstance(post_deserialize, classmethod): raise BadHookSignature( f"`{__POST_DESERIALIZE__}` must be a class method " f"with Callable[[{type_name(self.cls)}], " f"{type_name(self.cls)}] signature" ) filtered_fields = [] pos_args = [] kw_args = [] missing_kw_only = False add_kwargs = False kw_only_fields = set() field_blocks = [] for fname, ftype in field_types.items(): field = self.dataclass_fields.get(fname) if field and not field.init: continue if missing_kw_only: kw_only_fields.add(fname) elif field: kw_only = getattr(field, "kw_only", MISSING) if kw_only is MISSING: missing_kw_only = True kw_only_fields.add(fname) elif kw_only: kw_only_fields.add(fname) else: missing_kw_only = True kw_only_fields.add(fname) metadata = self.metadatas.get(fname, {}) alias = self.__get_field_alias(fname, ftype, metadata, config) filtered_fields.append((fname, alias, ftype)) if filtered_fields: if config.forbid_extra_keys: allowed_keys = {f[1] or f[0] for f in filtered_fields} # If a discriminator with a field is set via config, # we should allow this field to be present in the input # This will not work for annotated discriminators though... discr = self.get_discriminator(look_in_parents=True) if discr and discr.field: allowed_keys.add(discr.field) if config.allow_deserialization_not_by_alias: allowed_keys |= {f[0] for f in filtered_fields} allowed_keys_str = "'" + "', '".join(allowed_keys) + "'" self.add_line("d_keys = set(d.keys())") self.add_line( f"forbidden_keys = d_keys - {{{allowed_keys_str}}}" ) with self.indent("if forbidden_keys:"): self.add_line( "raise ExtraKeysError(forbidden_keys,cls) " "from None" ) with self.indent("try:"): for fname, alias, ftype in filtered_fields: self.add_type_modules(ftype) metadata = self.metadatas.get(fname, {}) field_block = FieldUnpackerCodeBlockBuilder( self, self.lines.branch_off() ).build( fname=fname, ftype=ftype, metadata=metadata, alias=alias, ) if field_block.in_kwargs: add_kwargs = True field_blocks.append(field_block) if add_kwargs: self.add_line("kwargs = {}") in_kwargs = False for field_block in field_blocks: self.lines.extend(field_block.lines) if field_block.in_kwargs: in_kwargs = True else: if ( field_block.fname in kw_only_fields or in_kwargs ): kw_args.append(field_block.fname) else: pos_args.append(field_block.fname) with self.indent("except AttributeError:"): with self.indent("if not isinstance(d, dict):"): self.add_line( "raise ValueError('Argument for " f"{type_name(self.cls)}.{method_name} method " "should be a dict instance') from None" ) with self.indent("else:"): self.add_line("raise") args = [f"__{f}" for f in pos_args] for kw_arg in kw_args: args.append(f"{kw_arg}=__{kw_arg}") if add_kwargs: args.append("**kwargs") cls_inst = f"cls({', '.join(args)})" if post_deserialize: self.add_line(f"return cls.{__POST_DESERIALIZE__}({cls_inst})") else: self.add_line(f"return {cls_inst}") def _add_unpack_method_with_dialect_lines(self, method_name: str) -> None: if self.decoder is not None: self.add_line("d = decoder(d)") unpacker_args = ", ".join( filter(None, ("cls", "d", self.get_unpack_method_flags())) ) cache_name = f"__dialect_{self.format_name}_unpacker_cache__" self.add_line(f"unpacker = cls.{cache_name}.get(dialect)") with self.indent("if unpacker is not None:"): self.add_line(f"return unpacker({unpacker_args})") if self.default_dialect: self.add_type_modules(self.default_dialect) self.add_line( "CodeBuilder(" "cls,dialect=dialect," f"first_method='{method_name}'," f"format_name='{self.format_name}'," f"default_dialect={type_name(self.default_dialect)}" ").add_unpack_method()" ) self.add_line(f"return cls.{cache_name}[dialect]({unpacker_args})") def add_unpack_method(self) -> None: self.reset() method_name = self.get_unpack_method_name( type_args=self.initial_type_args, format_name=self.format_name, decoder=self.decoder, ) if self.decoder is not None: self.add_type_modules(self.decoder) dialects_feature = self.is_code_generation_option_enabled( ADD_DIALECT_SUPPORT ) cache_name = f"__dialect_{self.format_name}_unpacker_cache__" if dialects_feature: with self.indent(f"if not '{cache_name}' in cls.__dict__:"): self.add_line(f"cls.{cache_name} = {{}}") if self.dialect is None and self.is_nailed: self.add_line("@classmethod") self._add_unpack_method_definition(method_name) with self.indent(): if dialects_feature and self.dialect is None: with self.indent("if dialect is None:"): self._add_unpack_method_lines(method_name) with self.indent("else:"): self._add_unpack_method_with_dialect_lines(method_name) else: self._add_unpack_method_lines(method_name) self._add_setattr_method(method_name, cache_name) self.compile() def _add_unpack_method_definition(self, method_name: str) -> None: kwargs = "" default_kwargs = self.get_unpack_method_default_flag_values( pass_decoder=True ) if default_kwargs: kwargs += f", {default_kwargs}" if self.is_nailed: self.add_line(f"def {method_name}(cls, d{kwargs}):") else: self.add_line(f"def {method_name}(d{kwargs}):") @lru_cache() @typing.no_type_check def get_config( self, cls: typing.Optional[typing.Type] = None, look_in_parents: bool = True, ) -> typing.Type[BaseConfig]: if cls is None: cls = self.cls if look_in_parents: config_cls = getattr(cls, "Config", BaseConfig) else: config_cls = cls.__dict__.get("Config", BaseConfig) if not issubclass(config_cls, BaseConfig): config_cls = type( "Config", (BaseConfig, config_cls), {**BaseConfig.__dict__, **config_cls.__dict__}, ) return config_cls def get_discriminator( self, look_in_parents: bool = False ) -> typing.Optional[Discriminator]: if look_in_parents: classes = self.cls.__mro__ else: classes = (self.cls,) for cls in classes: discriminator = self.get_config( cls, look_in_parents=False ).discriminator if discriminator: return discriminator return None def get_pack_method_flags( self, cls: typing.Optional[typing.Type] = None, pass_encoder: bool = False, ) -> str: pluggable_flags = [] if pass_encoder and self.encoder is not None: pluggable_flags.append("encoder=encoder") for value in self._get_encoder_kwargs(cls).values(): pluggable_flags.append(f"{value[0]}={value[0]}") for option, flag in ( (TO_DICT_ADD_OMIT_NONE_FLAG, "omit_none"), (TO_DICT_ADD_BY_ALIAS_FLAG, "by_alias"), (ADD_DIALECT_SUPPORT, "dialect"), (ADD_SERIALIZATION_CONTEXT, "context"), ): if self.is_code_generation_option_enabled(option, cls): if self.is_code_generation_option_enabled(option): pluggable_flags.append(f"{flag}={flag}") return ", ".join(pluggable_flags) def get_unpack_method_flags( self, cls: typing.Optional[typing.Type] = None, pass_decoder: bool = False, ) -> str: pluggable_flags = [] if pass_decoder and self.decoder is not None: pluggable_flags.append("decoder=decoder") for option, flag in ((ADD_DIALECT_SUPPORT, "dialect"),): if self.is_code_generation_option_enabled(option, cls): if self.is_code_generation_option_enabled(option): pluggable_flags.append(f"{flag}={flag}") return ", ".join(pluggable_flags) def get_pack_method_default_flag_values( self, cls: typing.Optional[typing.Type] = None, pass_encoder: bool = False, ) -> str: pos_param_names = [] pos_param_values = [] kw_param_names = [] kw_param_values = [] if pass_encoder and self.encoder is not None: pos_param_names.append("encoder") pos_param_values.append(type_name(self.encoder)) for value in self._get_encoder_kwargs(cls).values(): kw_param_names.append(value[0]) kw_param_values.append(value[1]) omit_none_feature = self.is_code_generation_option_enabled( TO_DICT_ADD_OMIT_NONE_FLAG, cls ) if omit_none_feature: omit_none = self.get_dialect_or_config_option("omit_none", False) kw_param_names.append("omit_none") kw_param_values.append("True" if omit_none else "False") by_alias_feature = self.is_code_generation_option_enabled( TO_DICT_ADD_BY_ALIAS_FLAG, cls ) if by_alias_feature: serialize_by_alias = self.get_dialect_or_config_option( "serialize_by_alias", False, cls ) kw_param_names.append("by_alias") kw_param_values.append("True" if serialize_by_alias else "False") dialects_feature = self.is_code_generation_option_enabled( ADD_DIALECT_SUPPORT, cls ) if dialects_feature: kw_param_names.append("dialect") kw_param_values.append("None") context_feature = self.is_code_generation_option_enabled( ADD_SERIALIZATION_CONTEXT, cls ) if context_feature: kw_param_names.append("context") kw_param_values.append("None") if pos_param_names: pluggable_flags_str = ", ".join( [f"{n}={v}" for n, v in zip(pos_param_names, pos_param_values)] ) else: pluggable_flags_str = "" if kw_param_names: if pos_param_names: pluggable_flags_str += ", " pluggable_flags_str += "*, " + ", ".join( [f"{n}={v}" for n, v in zip(kw_param_names, kw_param_values)] ) return pluggable_flags_str def get_unpack_method_default_flag_values( self, pass_decoder: bool = False ) -> str: pos_param_names = [] pos_param_values = [] kw_param_names = [] kw_param_values = [] if pass_decoder and self.decoder is not None: pos_param_names.append("decoder") pos_param_values.append(type_name(self.decoder)) kw_param_names.append("dialect") kw_param_values.append("None") if pos_param_names: pluggable_flags_str = ", ".join( [f"{n}={v}" for n, v in zip(pos_param_names, pos_param_values)] ) else: pluggable_flags_str = "" if kw_param_names: if pos_param_names: pluggable_flags_str += ", " pluggable_flags_str += "*, " + ", ".join( [f"{n}={v}" for n, v in zip(kw_param_names, kw_param_values)] ) return pluggable_flags_str def is_code_generation_option_enabled( self, option: str, cls: typing.Optional[typing.Type] = None ) -> bool: if cls is None: cls = self.cls return option in self.get_config(cls).code_generation_options @classmethod def get_unpack_method_name( cls, type_args: typing.Iterable = (), format_name: str = "dict", decoder: typing.Optional[typing.Any] = None, ) -> InternalMethodName: if format_name != "dict" and decoder is not None: return InternalMethodName.from_public(f"from_{format_name}") else: method_name = "from_dict" if format_name != "dict": method_name += f"_{format_name}" if type_args: method_name += f"_{hash_type_args(type_args)}" return InternalMethodName.from_public(method_name) @classmethod def get_pack_method_name( cls, type_args: typing.Tuple[typing.Type, ...] = (), format_name: str = "dict", encoder: typing.Optional[typing.Any] = None, ) -> InternalMethodName: if format_name != "dict" and encoder is not None: return InternalMethodName.from_public(f"to_{format_name}") else: method_name = "to_dict" if format_name != "dict": method_name += f"_{format_name}" if type_args: method_name += f"_{hash_type_args(type_args)}" return InternalMethodName.from_public(f"{method_name}") def _add_pack_method_lines_lazy(self, method_name: str) -> None: if self.default_dialect is not None: self.add_type_modules(self.default_dialect) self.add_line( "CodeBuilder(" "self.__class__," f"first_method='{method_name}'," "allow_postponed_evaluation=False," f"format_name='{self.format_name}'," f"encoder={type_name(self.encoder)}," f"encoder_kwargs={self._get_encoder_kwargs()}," f"default_dialect={type_name(self.default_dialect)}" ").add_pack_method()" ) packer_args = self.get_pack_method_flags(pass_encoder=True) self.add_line(f"return self.{method_name}({packer_args})") def _add_pack_method_lines(self, method_name: str) -> None: config = self.get_config() if ( config.lazy_compilation and self.allow_postponed_evaluation and self.is_nailed ): self._add_pack_method_lines_lazy(method_name) return try: field_types = self.get_field_types(include_extras=True) except UnresolvedTypeReferenceError: if ( not self.allow_postponed_evaluation or not config.allow_postponed_evaluation ): raise self._add_pack_method_lines_lazy(method_name) else: pre_serialize = self.get_declared_hook(__PRE_SERIALIZE__) if pre_serialize: if self.is_code_generation_option_enabled( ADD_SERIALIZATION_CONTEXT ): pre_serialize_args = "context=context" else: pre_serialize_args = "" self.add_line( f"self = self.{__PRE_SERIALIZE__}({pre_serialize_args})" ) by_alias_feature = self.is_code_generation_option_enabled( TO_DICT_ADD_BY_ALIAS_FLAG ) omit_none_feature = self.is_code_generation_option_enabled( TO_DICT_ADD_OMIT_NONE_FLAG ) serialize_by_alias = self.get_dialect_or_config_option( "serialize_by_alias", False ) omit_none = self.get_dialect_or_config_option("omit_none", False) omit_default = self.get_dialect_or_config_option( "omit_default", False ) force_value = omit_default packers = {} aliases = {} nullable_fields = set() nontrivial_nullable_fields = set() fnames_and_types: typing.Iterable[ typing.Tuple[str, typing.Any] ] = field_types.items() if self.get_config().sort_keys: fnames_and_types = sorted(fnames_and_types, key=lambda x: x[0]) for fname, ftype in fnames_and_types: if self.metadatas.get(fname, {}).get("serialize") == "omit": continue packer, alias, could_be_none = self._get_field_packer( fname, ftype, config, force_value ) packers[fname] = packer if alias: aliases[fname] = alias if could_be_none: nullable_fields.add(fname) if packer != "value": nontrivial_nullable_fields.add(fname) if ( nontrivial_nullable_fields or nullable_fields and (omit_none or omit_none_feature) or by_alias_feature and aliases or omit_default ): kwargs = "kwargs" self.add_line("kwargs = {}") for fname, packer in packers.items(): if force_value: self.add_line(f"value = self.{fname}") alias = aliases.get(fname) if omit_default: # do not call default_factory if we don't need to default = self.get_field_default( fname, call_factory=True ) else: default = None if fname in nullable_fields: if ( packer == "value" and not omit_none and not omit_none_feature and not (omit_default and default is None) ): self._pack_method_set_value( fname=fname, alias=alias, by_alias_feature=by_alias_feature, packed_value=( "value" if force_value else f"self.{fname}" ), omit_default=omit_default, ) continue if not force_value: # to add it only once self.add_line(f"value = self.{fname}") with self.indent("if value is not None:"): self._pack_method_set_value( fname=fname, alias=alias, by_alias_feature=by_alias_feature, packed_value=packer, omit_default=( omit_default and default is not None ), ) if omit_none and not omit_none_feature: continue elif omit_default and default is None: continue with self.indent("else:"): if omit_none_feature: with self.indent("if not omit_none:"): self._pack_method_set_value( fname=fname, alias=alias, by_alias_feature=by_alias_feature, packed_value="None", omit_default=False, ) else: self._pack_method_set_value( fname=fname, alias=alias, by_alias_feature=by_alias_feature, packed_value="None", omit_default=False, ) else: self._pack_method_set_value( fname=fname, alias=alias, by_alias_feature=by_alias_feature, packed_value=packer, omit_default=omit_default, ) else: kwargs_parts = [] for fname, packer in packers.items(): if serialize_by_alias: fname_or_alias = aliases.get(fname, fname) else: fname_or_alias = fname kwargs_parts.append( ( fname_or_alias, packer if packer != "value" else f"self.{fname}", ) ) kwargs = ", ".join(f"'{k}': {v}" for k, v in kwargs_parts) kwargs = f"{{{kwargs}}}" post_serialize = self.get_declared_hook(__POST_SERIALIZE__) if self.encoder is not None: if self.encoder_kwargs: encoder_options = ", ".join( f"{k}={v[0]}" for k, v in self.encoder_kwargs.items() ) return_statement = ( f"return encoder({{}}, {encoder_options})" ) else: return_statement = "return encoder({})" else: return_statement = "return {}" if post_serialize: if self.is_code_generation_option_enabled( ADD_SERIALIZATION_CONTEXT ): kwargs = f"{kwargs}, context=context" self.add_line( return_statement.format( f"self.{__POST_SERIALIZE__}({kwargs})" ) ) else: self.add_line(return_statement.format(kwargs)) def _pack_method_set_value( self, fname: str, alias: typing.Optional[str], by_alias_feature: bool, packed_value: str, omit_default: bool, ) -> None: if omit_default: default = self.get_field_default(fname, call_factory=True) if default is not MISSING: default_literal = self.get_field_default_literal( self.get_field_default(fname, call_factory=True) ) # if default is None: # comp_expr = f"value is not {default_literal}" if isinstance(default, float) and math.isnan(default): self.ensure_object_imported(math.isnan, "isnan") comp_expr = "not isnan(value)" else: comp_expr = f"value != {default_literal}" with self.indent(f"if {comp_expr}:"): return self.__pack_method_set_value( fname, alias, by_alias_feature, packed_value ) return self.__pack_method_set_value( fname, alias, by_alias_feature, packed_value ) def __pack_method_set_value( self, fname: str, alias: typing.Optional[str], by_alias_feature: bool, packed_value: str, ) -> None: if by_alias_feature and alias is not None: with self.indent("if by_alias:"): self.add_line(f"kwargs['{alias}'] = {packed_value}") with self.indent("else:"): self.add_line(f"kwargs['{fname}'] = {packed_value}") else: serialize_by_alias = self.get_dialect_or_config_option( "serialize_by_alias", False ) if serialize_by_alias and alias is not None: fname_or_alias = alias else: fname_or_alias = fname self.add_line(f"kwargs['{fname_or_alias}'] = {packed_value}") def _add_pack_method_with_dialect_lines(self, method_name: str) -> None: packer_args = ", ".join( filter(None, ("self", self.get_pack_method_flags())) ) cache_name = f"__dialect_{self.format_name}_packer_cache__" self.add_line(f"packer = self.__class__.{cache_name}.get(dialect)") self.add_line("if packer is not None:") if self.encoder is not None: return_statement = "return encoder({})" else: return_statement = "return {}" with self.indent(): self.add_line(return_statement.format(f"packer({packer_args})")) if self.default_dialect: self.add_type_modules(self.default_dialect) self.add_line( "CodeBuilder(" "self.__class__,dialect=dialect," f"first_method='{method_name}'," f"format_name='{self.format_name}'," f"default_dialect={type_name(self.default_dialect)}" ").add_pack_method()" ) self.add_line( return_statement.format( f"self.__class__.{cache_name}[dialect]({packer_args})" ) ) def _get_encoder_kwargs( self, cls: typing.Optional[typing.Type] = None ) -> typing.Dict[str, typing.Any]: result = {} for encoder_param, value in self.encoder_kwargs.items(): packer_param = value[0] packer_value = value[1] if isinstance(packer_value, ConfigValue): packer_value = getattr(self.get_config(cls), packer_value.name) result[encoder_param] = (packer_param, packer_value) return result def _add_pack_method_definition(self, method_name: str) -> None: kwargs = "" default_kwargs = self.get_pack_method_default_flag_values( pass_encoder=True ) if default_kwargs: kwargs += f", {default_kwargs}" self.add_line(f"def {method_name}(self{kwargs}):") def add_pack_method(self) -> None: self.reset() method_name = self.get_pack_method_name( type_args=self.initial_type_args, format_name=self.format_name, encoder=self.encoder, ) if self.encoder is not None: self.add_type_modules(self.encoder) dialects_feature = self.is_code_generation_option_enabled( ADD_DIALECT_SUPPORT ) cache_name = f"__dialect_{self.format_name}_packer_cache__" if dialects_feature: with self.indent(f"if not '{cache_name}' in cls.__dict__:"): self.add_line(f"cls.{cache_name} = {{}}") self._add_pack_method_definition(method_name) with self.indent(): if dialects_feature and self.dialect is None: with self.indent("if dialect is None:"): self._add_pack_method_lines(method_name) with self.indent("else:"): self._add_pack_method_with_dialect_lines(method_name) else: self._add_pack_method_lines(method_name) self._add_setattr_method(method_name, cache_name) self.compile() def _add_setattr_method( self, method_name: InternalMethodName, cache_name: str ) -> None: if self.dialect is None: if not self.is_nailed: self.ensure_object_imported(self.attrs, "_cls") self.ensure_object_imported(self.cls, "cls") self.add_line(f"setattr(_cls, '{method_name}', {method_name})") else: self.add_line(f"setattr(cls, '{method_name}', {method_name})") if is_dataclass_dict_mixin_subclass(self.cls): self.add_line( f"setattr(cls, '{method_name.public}', {method_name})" ) else: self.add_line(f"cls.{cache_name}[dialect] = {method_name}") def _get_field_packer( self, fname: str, ftype: typing.Type, config: typing.Type[BaseConfig], force_value: bool = False, ) -> typing.Tuple[str, typing.Optional[str], bool]: metadata = self.metadatas.get(fname, {}) alias = self.__get_field_alias(fname, ftype, metadata, config) could_be_none = ( ftype in (typing.Any, type(None), None) or is_type_var_any(self.get_real_type(fname, ftype)) or is_optional(ftype, self.get_field_resolved_type_params(fname)) or self.get_field_default(fname) is None ) value = "value" if could_be_none or force_value else f"self.{fname}" packer = PackerRegistry.get( ValueSpec( type=ftype, expression=value, builder=self, field_ctx=FieldContext( name=fname, metadata=metadata, ), could_be_none=False, no_copy_collections=self.get_dialect_or_config_option( "no_copy_collections", () ), ) ) return packer, alias, could_be_none @staticmethod def __get_field_alias( fname: str, ftype: typing.Type, metadata: typing.Mapping[str, typing.Any], config: typing.Type[BaseConfig], ) -> typing.Optional[str]: alias = metadata.get("alias") if alias is None and is_annotated(ftype): annotations = get_type_annotations(ftype) for ann in annotations: if isinstance(ann, Alias): alias = ann.name if alias is None: alias = config.aliases.get(fname) return alias @typing.no_type_check def iter_serialization_strategies( self, metadata: typing.Mapping, ftype: typing.Type ) -> typing.Iterator[SerializationStrategyValueType]: if is_hashable(ftype): yield metadata.get("serialization_strategy") yield from self.__iter_serialization_strategies(ftype) @typing.no_type_check def __iter_serialization_strategies( self, ftype: typing.Type ) -> typing.Iterator[SerializationStrategyValueType]: if self.dialect is not None: yield self.dialect.serialization_strategy.get(ftype) default_dialect = self.get_config().dialect if default_dialect is not None: if not is_dialect_subclass(default_dialect): raise BadDialect( 'Config option "dialect" of ' f"{type_name(self.cls)} must be a subclass of Dialect" ) yield default_dialect.serialization_strategy.get(ftype) yield self.get_config().serialization_strategy.get(ftype) if self.default_dialect is not None: yield self.default_dialect.serialization_strategy.get(ftype) def get_dialect_or_config_option( self, option: str, default: typing.Any, cls: typing.Optional[typing.Type] = None, ) -> typing.Any: for ns in ( self.dialect, self.get_config(cls).dialect, self.get_config(cls), self.default_dialect, ): value = getattr(ns, option, Sentinel.MISSING) if value is not Sentinel.MISSING: return value return default def get_field_default_literal(self, value: typing.Any) -> str: if isinstance(value, enum.IntFlag): return str(value.value) elif type(value) in (str, int, bool, NoneType): # type: ignore return repr(value) elif ( isinstance(value, float) and not math.isnan(value) and not math.isinf(value) ): return repr(value) elif isinstance(value, tuple) and not is_named_tuple(type(value)): return repr(value) else: name = f"v_{uuid.uuid4().hex}" self.ensure_object_imported(value, name) return name class FieldUnpackerCodeBlock: def __init__(self, lines: CodeLines, fname: str, in_kwargs: bool): self.lines = lines self.fname = fname self.in_kwargs = in_kwargs class FieldUnpackerCodeBlockBuilder: def __init__(self, parent: CodeBuilder, lines: CodeLines): self.parent = parent self.lines = lines def _try_set_value( self, field_name: str, field_type_name: str, unpacked_value: str, in_kwargs: bool, ) -> None: with self.lines.indent("try:"): self._set_value(field_name, unpacked_value, in_kwargs) with self.lines.indent("except:"): self.lines.append( "raise InvalidFieldValue(" f"'{field_name}',{field_type_name},value,cls)" ) def _set_value( self, fname: str, unpacked_value: str, in_kwargs: bool = False ) -> None: if in_kwargs: self.lines.append(f"kwargs['{fname}'] = {unpacked_value}") else: self.lines.append(f"__{fname} = {unpacked_value}") def build( self, fname: str, ftype: typing.Type, metadata: typing.Mapping, *, alias: typing.Optional[str] = None, ) -> FieldUnpackerCodeBlock: default = self.parent.get_field_default(fname) has_default = default is not MISSING field_type = self.parent.get_type_name_identifier( ftype, resolved_type_params=self.parent.get_field_resolved_type_params( fname ), ) could_be_none = ( ftype in (typing.Any, type(None), None) or is_type_var_any(self.parent.get_real_type(fname, ftype)) or is_optional( ftype, self.parent.get_field_resolved_type_params(fname) ) or default is None ) unpacked_value = UnpackerRegistry.get( ValueSpec( type=ftype, expression="value", builder=self.parent, field_ctx=FieldContext( name=fname, metadata=metadata, ), could_be_none=False if could_be_none else True, ) ) if self.parent.get_config().allow_deserialization_not_by_alias: if unpacked_value != "value": self.add_line(f"value = d.get('{alias}', MISSING)") with self.indent("if value is MISSING:"): self.add_line(f"value = d.get('{fname}', MISSING)") packed_value = "value" elif has_default: self.add_line(f"value = d.get('{alias}', MISSING)") with self.indent("if value is MISSING:"): self.add_line(f"value = d.get('{fname}', MISSING)") packed_value = "value" else: self.add_line(f"__{fname} = d.get('{alias}', MISSING)") with self.indent(f"if __{fname} is MISSING:"): self.add_line(f"__{fname} = d.get('{fname}', MISSING)") packed_value = f"__{fname}" unpacked_value = packed_value else: if unpacked_value != "value": self.add_line(f"value = d.get('{alias or fname}', MISSING)") packed_value = "value" elif has_default: self.add_line(f"value = d.get('{alias or fname}', MISSING)") packed_value = "value" else: self.add_line( f"__{fname} = d.get('{alias or fname}', MISSING)" ) packed_value = f"__{fname}" unpacked_value = packed_value if not has_default: with self.indent(f"if {packed_value} is MISSING:"): self.add_line( f"raise MissingField('{fname}',{field_type},cls) from None" ) if packed_value != unpacked_value: if could_be_none: with self.indent(f"if {packed_value} is not None:"): self._try_set_value( fname, field_type, unpacked_value, has_default ) with self.indent("else:"): self._set_value(fname, "None", has_default) else: self._try_set_value( fname, field_type, unpacked_value, has_default ) else: with self.indent(f"if {packed_value} is not MISSING:"): if could_be_none: if unpacked_value != "value": with self.indent(f"if {packed_value} is not None:"): self._try_set_value( fname, field_type, unpacked_value, has_default ) if default is not None: with self.indent("else:"): self._set_value(fname, "None", has_default) else: self._set_value(fname, unpacked_value, has_default) else: if unpacked_value != "value": self._try_set_value( fname, field_type, unpacked_value, has_default ) else: self._set_value(fname, unpacked_value, has_default) return FieldUnpackerCodeBlock(self.lines, fname, has_default) def add_line(self, line: str) -> None: self.lines.append(line) @contextmanager def indent( self, expr: typing.Optional[str] = None, ) -> typing.Generator[None, None, None]: with self.lines.indent(expr): yield mashumaro-3.13.1/mashumaro/core/meta/code/lines.py000066400000000000000000000020361463331001200220350ustar00rootroot00000000000000from contextlib import contextmanager from typing import Generator, List, Optional __all__ = ["CodeLines"] class CodeLines: def __init__(self) -> None: self._lines: List[str] = [] self._current_indent: str = "" def append(self, line: str) -> None: self._lines.append(f"{self._current_indent}{line}") def extend(self, lines: "CodeLines") -> None: self._lines.extend(lines._lines) @contextmanager def indent( self, expr: Optional[str] = None, ) -> Generator[None, None, None]: if expr: self.append(expr) self._current_indent += " " * 4 try: yield finally: self._current_indent = self._current_indent[:-4] def as_text(self) -> str: return "\n".join(self._lines) def reset(self) -> None: self._lines = [] self._current_indent = "" def branch_off(self) -> "CodeLines": branch = CodeLines() branch._current_indent = self._current_indent return branch mashumaro-3.13.1/mashumaro/core/meta/helpers.py000066400000000000000000000603631463331001200214620ustar00rootroot00000000000000import dataclasses import enum import inspect import re import sys import types import typing from contextlib import suppress # noinspection PyProtectedMember from dataclasses import _FIELDS # type: ignore from hashlib import md5 from typing import ( Any, ClassVar, Dict, ForwardRef, List, Optional, Sequence, Tuple, Type, Union, ) try: from typing import Unpack # type: ignore[attr-defined] except ImportError: from typing_extensions import Unpack import typing_extensions from mashumaro.core.const import ( PEP_585_COMPATIBLE, PY_38, PY_39, PY_39_MIN, PY_310_MIN, PY_311_MIN, PY_312_MIN, ) from mashumaro.dialect import Dialect __all__ = [ "get_type_origin", "get_args", "type_name", "is_special_typing_primitive", "is_generic", "is_typed_dict", "is_named_tuple", "is_optional", "is_union", "not_none_type_arg", "is_type_var", "is_type_var_any", "is_class_var", "is_final", "is_init_var", "get_class_that_defines_method", "get_class_that_defines_field", "is_dataclass_dict_mixin", "is_dataclass_dict_mixin_subclass", "collect_type_params", "resolve_type_params", "substitute_type_params", "get_generic_name", "get_name_error_name", "is_dialect_subclass", "is_new_type", "is_annotated", "get_type_annotations", "is_literal", "is_local_type_name", "get_literal_values", "is_self", "is_required", "is_not_required", "get_function_arg_annotation", "get_function_return_annotation", "is_unpack", "is_type_var_tuple", "hash_type_args", "iter_all_subclasses", "is_hashable", "is_hashable_type", "evaluate_forward_ref", "get_forward_ref_referencing_globals", "is_type_alias_type", ] NoneType = type(None) DataClassDictMixinPath = ( f"{__name__.rsplit('.', 3)[:-3][0]}.mixins.dict.DataClassDictMixin" ) def get_type_origin(typ: Type) -> Type: try: return typ.__origin__ except AttributeError: return typ def is_builtin_type(typ: Type) -> bool: try: return typ.__module__ == "builtins" except AttributeError: return False def get_generic_name(typ: Type, short: bool = False) -> str: name = getattr(typ, "_name", None) if name is None: origin = get_type_origin(typ) if origin is typ: return type_name(origin, short, is_type_origin=True) else: return get_generic_name(origin, short) if short: return name else: return f"{typ.__module__}.{name}" def get_args(typ: Optional[Type]) -> Tuple[Type, ...]: return getattr(typ, "__args__", ()) def _get_args_str( typ: Type, short: bool, resolved_type_params: Optional[Dict[Type, Type]] = None, limit: Optional[int] = None, none_type_as_none: bool = False, sep: str = ", ", ) -> str: if typ == Tuple[()]: return "()" elif PEP_585_COMPATIBLE and typ == tuple[()]: # type: ignore return "()" args = _flatten_type_args(get_args(typ)[:limit]) to_join = [] for arg in args: to_join.append( type_name( typ=arg, short=short, resolved_type_params=resolved_type_params, none_type_as_none=none_type_as_none, ) ) if len(to_join) > 1: return sep.join(s for s in to_join if s != "()") else: return sep.join(to_join) def get_literal_values(typ: Type) -> Tuple[Any, ...]: values = typ.__args__ result: List[Any] = [] for value in values: if is_literal(value): result.extend(get_literal_values(value)) else: result.append(value) return tuple(result) def _get_literal_values_str(typ: Type, short: bool) -> str: values_str = [] for value in get_literal_values(typ): if isinstance(value, enum.Enum): values_str.append(f"{type_name(type(value), short)}.{value.name}") elif isinstance( value, (int, str, bytes, bool, NoneType), # type: ignore ): values_str.append(repr(value)) return ", ".join(values_str) def _typing_name( typ_name: str, short: bool = False, module_name: str = "typing", ) -> str: return typ_name if short else f"{module_name}.{typ_name}" def type_name( typ: Optional[Type], short: bool = False, resolved_type_params: Optional[Dict[Type, Type]] = None, is_type_origin: bool = False, none_type_as_none: bool = False, ) -> str: if resolved_type_params is None: resolved_type_params = {} if typ is None: return "None" elif typ is NoneType and none_type_as_none: return "None" elif typ is Ellipsis: return "..." elif typ is Any: return _typing_name("Any", short) elif is_optional(typ, resolved_type_params): args_str = type_name( typ=not_none_type_arg(get_args(typ), resolved_type_params), short=short, resolved_type_params=resolved_type_params, ) return f"{_typing_name('Optional', short)}[{args_str}]" elif is_union(typ): args_str = _get_args_str( typ, short, resolved_type_params, none_type_as_none=True ) return f"{_typing_name('Union', short)}[{args_str}]" elif is_annotated(typ): return type_name(get_args(typ)[0], short, resolved_type_params) elif not is_type_origin and is_literal(typ): args_str = _get_literal_values_str(typ, short) return f"{_typing_name('Literal', short, typ.__module__)}[{args_str}]" elif not is_type_origin and is_unpack(typ): if ( typ in resolved_type_params and resolved_type_params[typ] is not typ ): return type_name( resolved_type_params[typ], short, resolved_type_params ) else: unpacked_type_arg = get_args(typ)[0] if not is_variable_length_tuple( unpacked_type_arg ) and not is_type_var_tuple(unpacked_type_arg): return _get_args_str( unpacked_type_arg, short, resolved_type_params ) unpacked_type_name = type_name( unpacked_type_arg, short, resolved_type_params ) if PY_311_MIN: return f"*{unpacked_type_name}" else: _unpack = _typing_name("Unpack", short, typ.__module__) return f"{_unpack}[{unpacked_type_name}]" elif not is_type_origin and is_generic(typ): args_str = _get_args_str(typ, short, resolved_type_params) if not args_str: return get_generic_name(typ, short) else: return f"{get_generic_name(typ, short)}[{args_str}]" elif is_builtin_type(typ): return typ.__qualname__ elif is_type_var(typ): if ( typ in resolved_type_params and resolved_type_params[typ] is not typ ): return type_name( resolved_type_params[typ], short, resolved_type_params ) elif is_type_var_any(typ): return _typing_name("Any", short) constraints = getattr(typ, "__constraints__") if constraints: args_str = ", ".join( type_name(c, short, resolved_type_params) for c in constraints ) return f"{_typing_name('Union', short)}[{args_str}]" else: if type_var_has_default(typ): bound = get_type_var_default(typ) else: bound = getattr(typ, "__bound__") return type_name(bound, short, resolved_type_params) elif is_new_type(typ) and not PY_310_MIN: # because __qualname__ and __module__ are messed up typ = typ.__supertype__ try: if short: return typ.__qualname__ # type: ignore else: return f"{typ.__module__}.{typ.__qualname__}" # type: ignore except AttributeError: return str(typ) def is_special_typing_primitive(typ: Any) -> bool: try: issubclass(typ, object) return False except TypeError: return True def is_generic(typ: Type) -> bool: with suppress(Exception): if hasattr(typ, "__class_getitem__"): return True if PY_38: # noinspection PyProtectedMember # noinspection PyUnresolvedReferences return issubclass(typ.__class__, typing._GenericAlias) # type: ignore elif PY_39_MIN: # noinspection PyProtectedMember # noinspection PyUnresolvedReferences if ( issubclass(typ.__class__, typing._BaseGenericAlias) # type: ignore or type(typ) is types.GenericAlias # type: ignore # noqa: E721 ): return True else: return False # else: # for PEP 585 generics without args # try: # return ( # hasattr(typ, "__class_getitem__") # and type(typ[str]) is types.GenericAlias # type: ignore # ) # except (TypeError, AttributeError): # return False else: raise NotImplementedError def is_typed_dict(typ: Type) -> bool: for module in (typing, typing_extensions): with suppress(AttributeError): if type(typ) is getattr(module, "_TypedDictMeta"): return True return False def is_named_tuple(typ: Type) -> bool: try: return issubclass(typ, typing.Tuple) and hasattr( # type: ignore typ, "_fields" ) except TypeError: return False def is_new_type(typ: Type) -> bool: return hasattr(typ, "__supertype__") def is_union(typ: Type) -> bool: try: if PY_310_MIN and isinstance(typ, types.UnionType): # type: ignore return True return typ.__origin__ is Union except AttributeError: return False def is_optional( typ: Type, resolved_type_params: Optional[Dict[Type, Type]] = None ) -> bool: if resolved_type_params is None: resolved_type_params = {} if not is_union(typ): return False args = get_args(typ) if len(args) != 2: return False for arg in args: if resolved_type_params.get(arg, arg) is NoneType: return True return False def is_annotated(typ: Type) -> bool: for module in (typing, typing_extensions): with suppress(AttributeError): if type(typ) is getattr(module, "_AnnotatedAlias"): return True return False def get_type_annotations(typ: Type) -> Sequence[Any]: return getattr(typ, "__metadata__", []) def is_literal(typ: Type) -> bool: if PY_38 or PY_39: with suppress(AttributeError): return is_generic(typ) and get_generic_name(typ, True) == "Literal" elif PY_310_MIN: with suppress(AttributeError): # noinspection PyProtectedMember # noinspection PyUnresolvedReferences return type(typ) is typing._LiteralGenericAlias # type: ignore return False def is_local_type_name(typ_name: str) -> bool: return "" in typ_name def not_none_type_arg( type_args: Tuple[Type, ...], resolved_type_params: Optional[Dict[Type, Type]] = None, ) -> Optional[Type]: if resolved_type_params is None: resolved_type_params = {} for type_arg in type_args: if resolved_type_params.get(type_arg, type_arg) is not NoneType: return type_arg return None def is_type_var(typ: Type) -> bool: return hasattr(typ, "__constraints__") def is_type_var_any(typ: Type) -> bool: if not is_type_var(typ): return False elif typ.__constraints__ != (): return False elif typ.__bound__ not in (None, Any): return False elif type_var_has_default(typ): return False else: return True def is_class_var(typ: Type) -> bool: return get_type_origin(typ) is ClassVar def is_final(typ: Type) -> bool: return get_type_origin(typ) is typing_extensions.Final def is_init_var(typ: Type) -> bool: return isinstance(typ, dataclasses.InitVar) def get_class_that_defines_method( method_name: str, cls: Type ) -> Optional[Type]: for cls in cls.__mro__: if method_name in cls.__dict__: return cls return None def get_class_that_defines_field(field_name: str, cls: Type) -> Optional[Type]: prev_cls = None prev_field = None for base in reversed(cls.__mro__): if dataclasses.is_dataclass(base): field = getattr(base, _FIELDS).get(field_name) if field and field != prev_field: prev_field = field prev_cls = base return prev_cls or cls def is_dataclass_dict_mixin(typ: Type) -> bool: return type_name(typ) == DataClassDictMixinPath def is_dataclass_dict_mixin_subclass(typ: Type) -> bool: with suppress(AttributeError): for cls in typ.__mro__: if is_dataclass_dict_mixin(cls): return True return False def get_orig_bases(typ: Type) -> Tuple[Type, ...]: return getattr(typ, "__orig_bases__", ()) def collect_type_params(typ: Type) -> Sequence[Type]: type_params = [] for type_arg in get_args(typ): if type_arg in type_params: continue elif is_type_var(type_arg): type_params.append(type_arg) elif is_unpack(type_arg) and is_type_var_tuple(get_args(type_arg)[0]): type_params.append(type_arg) else: for _type_param in collect_type_params(type_arg): if _type_param not in type_params: type_params.append(_type_param) return type_params def _check_generic( typ: Type, type_params: Sequence[Type], type_args: Sequence[Type] ) -> None: # https://github.com/python/cpython/issues/99382 unpacks = len(list(filter(is_unpack, type_params))) if unpacks > 1: raise TypeError( "Multiple unpacks are disallowed within a single type parameter " f"list for {type_name(typ)}" ) elif unpacks == 1: expected_count = len(type_params) - 1 expected_msg = f"at least {len(type_params) - 1}" else: expected_count = len(type_params) expected_msg = f"{expected_count}" args_len = len(type_args) if 0 < args_len < expected_count: raise TypeError( f"Too few arguments for {type_name(typ)}; " f"actual {args_len}, expected {expected_msg}" ) def _flatten_type_args( type_args: Sequence[Type], allow_ellipsis_if_many_args: bool = False, ) -> Sequence[Type]: result = [] for type_arg in type_args: if is_unpack(type_arg): unpacked_type = get_args(type_arg)[0] if is_type_var_tuple(unpacked_type): result.append(type_arg) elif is_variable_length_tuple(unpacked_type): if len(type_args) == 1: result.extend(_flatten_type_args(get_args(unpacked_type))) elif allow_ellipsis_if_many_args: result.extend(_flatten_type_args(get_args(unpacked_type))) else: result.append(type_arg) elif unpacked_type == Tuple[()]: if len(type_args) == 1: result.append(()) # type: ignore elif ( PEP_585_COMPATIBLE and unpacked_type == tuple[()] # type: ignore ): if len(type_args) == 1: result.append(()) # type: ignore else: result.extend(_flatten_type_args(get_args(unpacked_type))) else: result.append(type_arg) return result def resolve_type_params( typ: Type, type_args: Sequence[Type] = (), include_bases: bool = True, ) -> Dict[Type, Dict[Type, Type]]: resolved_type_params: Dict[Type, Type] = {} result = {typ: resolved_type_params} type_params = [] for base in get_orig_bases(typ): base_type_params = collect_type_params(base) for type_param in base_type_params: if type_param not in type_params: type_params.append(type_param) _check_generic(typ, type_params, type_args) type_args = _flatten_type_args(type_args, allow_ellipsis_if_many_args=True) param_idx = 0 unpack_param_idx = -1 arg_idx = 0 while param_idx < len(type_params): type_param = type_params[param_idx] if not is_unpack(type_param): if type_param not in resolved_type_params: try: next_type_arg = type_args[arg_idx] if next_type_arg is Ellipsis: next_type_arg = type_args[arg_idx - 1] else: if unpack_param_idx < 0: arg_idx += 1 else: arg_idx -= 1 except IndexError: next_type_arg = type_param resolved_type_params[type_param] = next_type_arg if unpack_param_idx < 0: param_idx += 1 else: param_idx -= 1 elif unpack_param_idx < 0: unpack_param_idx = param_idx param_idx = -1 arg_idx = -1 unpacked_param = get_args(type_param)[0] for y in reversed(get_args(unpacked_param)): # pragma: no cover # We turn Tuple[x,y] to x, y, but leave this here just in case type_params.insert(param_idx, y) else: if not type_args and is_type_var_tuple(get_args(type_param)[0]): resolved_type_params[type_param] = Unpack[ Tuple[Any, ...] # type: ignore ] break t_args = type_args[unpack_param_idx : len(type_args) + arg_idx + 1] if len(t_args) == 1 and t_args[0] == (): x: Any = () elif len(t_args) > 2 and t_args[-1] is Ellipsis: x = (*t_args[:-2], Unpack[Tuple[t_args[-2], ...]]) else: x = tuple(t_args) resolved_type_params[type_param] = Unpack[Tuple[x]] # type: ignore break if include_bases: orig_bases = { get_type_origin(orig_base): orig_base for orig_base in get_orig_bases(typ) } for base in getattr(typ, "__bases__", ()): orig_base = orig_bases.get(get_type_origin(base)) base_type_params = get_args(orig_base) base_type_args = tuple( [resolved_type_params.get(a, a) for a in base_type_params] ) result.update(resolve_type_params(base, base_type_args)) return result def substitute_type_params(typ: Type, substitutions: Dict[Type, Type]) -> Type: if is_annotated(typ): origin = get_type_origin(typ) subst = substitutions.get(origin, origin) return typing_extensions.Annotated[ (subst, *get_type_annotations(typ)) # type: ignore ] else: new_type_args = [] for type_param in collect_type_params(typ): new_type_args.append(substitutions.get(type_param, type_param)) if new_type_args: with suppress(TypeError, KeyError): return typ[tuple(new_type_args)] if is_hashable(typ): return substitutions.get(typ, typ) else: return typ def get_name_error_name(e: NameError) -> str: if PY_310_MIN: return e.name # type: ignore else: match = re.search("'(.*)'", e.args[0]) return match.group(1) if match else "" def is_dialect_subclass(typ: Type) -> bool: try: return issubclass(typ, Dialect) except TypeError: return False def is_self(typ: Type) -> bool: return typ is typing_extensions.Self def is_required(typ: Type) -> bool: return get_type_origin(typ) is typing_extensions.Required # noqa def is_not_required(typ: Type) -> bool: return get_type_origin(typ) is typing_extensions.NotRequired # noqa def get_function_arg_annotation( function: typing.Callable[..., Any], arg_name: typing.Optional[str] = None, arg_pos: typing.Optional[int] = None, ) -> typing.Type: parameters = inspect.signature(function).parameters if arg_name is not None: parameter = parameters[arg_name] elif arg_pos is not None: parameter = parameters[list(parameters.keys())[arg_pos]] else: raise ValueError("arg_name or arg_pos must be passed") annotation = parameter.annotation if annotation is inspect.Signature.empty: raise ValueError(f"Argument {arg_name} doesn't have annotation") if isinstance(annotation, str): annotation = str_to_forward_ref( annotation, inspect.getmodule(function) ) return annotation def get_function_return_annotation( function: typing.Callable[[typing.Any], typing.Any] ) -> typing.Type: annotation = inspect.signature(function).return_annotation if annotation is inspect.Signature.empty: raise ValueError("Function doesn't have return annotation") if isinstance(annotation, str): annotation = str_to_forward_ref( annotation, inspect.getmodule(function) ) return annotation def is_unpack(typ: Type) -> bool: for module in (typing, typing_extensions): with suppress(AttributeError): if get_type_origin(typ) is getattr(module, "Unpack"): return True return False def is_type_var_tuple(typ: Type) -> bool: for module in (typing, typing_extensions): with suppress(AttributeError): if type(typ) is getattr(module, "TypeVarTuple"): return True return False def is_variable_length_tuple(typ: Type) -> bool: type_args = get_args(typ) return len(type_args) == 2 and type_args[1] is Ellipsis def hash_type_args(type_args: typing.Iterable[typing.Type]) -> str: return md5(",".join(map(type_name, type_args)).encode()).hexdigest() def iter_all_subclasses(cls: Type) -> typing.Iterator[Type]: for subclass in cls.__subclasses__(): yield subclass yield from iter_all_subclasses(subclass) def is_hashable(value: Any) -> bool: try: hash(value) return True except TypeError: return False def is_hashable_type(typ: Any) -> bool: try: return issubclass(typ, typing.Hashable) except TypeError: return True def str_to_forward_ref( annotation: str, module: Optional[types.ModuleType] = None ) -> ForwardRef: if PY_39_MIN: return ForwardRef(annotation, module=module) # type: ignore else: return ForwardRef(annotation) def evaluate_forward_ref( typ: ForwardRef, globalns: Any, localns: Any ) -> Optional[Type]: if PY_39_MIN: return typ._evaluate( globalns, localns, recursive_guard=frozenset() ) # type: ignore[call-arg] else: return typ._evaluate(globalns, localns) # type: ignore[call-arg] def get_forward_ref_referencing_globals( referenced_type: typing.ForwardRef, referencing_object: Optional[Any] = None, fallback: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: if fallback is None: fallback = {} forward_module = getattr(referenced_type, "__forward_module__", None) if not forward_module and referencing_object: # We can't get the module in which ForwardRef's value is defined on # Python < 3.10, ForwardRef evaluation might not work properly # without this information, so we will consider the namespace of # the module in which this ForwardRef is used as globalns. return getattr( sys.modules.get(referencing_object.__module__, None), "__dict__", fallback, ) else: return getattr(forward_module, "__dict__", fallback) def is_type_alias_type(typ: Type) -> bool: if PY_312_MIN: return isinstance(typ, typing.TypeAliasType) # type: ignore else: return False def type_var_has_default(typ: Any) -> bool: try: return typ.has_default() except AttributeError: return getattr(typ, "__default__", None) is not None def get_type_var_default(typ: Any) -> Type: return getattr(typ, "__default__") mashumaro-3.13.1/mashumaro/core/meta/mixin.py000066400000000000000000000025261463331001200211410ustar00rootroot00000000000000from typing import Any, Dict, Optional, Tuple, Type from mashumaro.core.meta.code.builder import CodeBuilder from mashumaro.dialect import Dialect from mashumaro.exceptions import UnresolvedTypeReferenceError __all__ = [ "compile_mixin_packer", "compile_mixin_unpacker", ] def compile_mixin_packer( cls: Type, format_name: str = "dict", dialect: Optional[Type[Dialect]] = None, encoder: Any = None, encoder_kwargs: Optional[Dict[str, Dict[str, Tuple[str, Any]]]] = None, ) -> None: builder = CodeBuilder( cls=cls, format_name=format_name, encoder=encoder, encoder_kwargs=encoder_kwargs, default_dialect=dialect, ) config = builder.get_config() try: builder.add_pack_method() except UnresolvedTypeReferenceError: if not config.allow_postponed_evaluation: raise def compile_mixin_unpacker( cls: Type, format_name: str = "dict", dialect: Optional[Type[Dialect]] = None, decoder: Any = None, ) -> None: builder = CodeBuilder( cls=cls, format_name=format_name, decoder=decoder, default_dialect=dialect, ) config = builder.get_config() try: builder.add_unpack_method() except UnresolvedTypeReferenceError: if not config.allow_postponed_evaluation: raise mashumaro-3.13.1/mashumaro/core/meta/types/000077500000000000000000000000001463331001200206025ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/types/__init__.py000066400000000000000000000000001463331001200227010ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/core/meta/types/common.py000066400000000000000000000211411463331001200224430ustar00rootroot00000000000000import collections.abc import re import uuid from abc import ABC, abstractmethod from dataclasses import dataclass, field, replace from functools import cached_property from types import new_class from typing import ( TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Sequence, Type, TypeVar, ) from typing_extensions import ParamSpec, TypeAlias from mashumaro.core.const import PEP_585_COMPATIBLE from mashumaro.core.meta.code.lines import CodeLines from mashumaro.core.meta.helpers import ( get_args, get_type_origin, is_annotated, is_generic, is_hashable_type, is_self, type_name, ) from mashumaro.exceptions import UnserializableField if TYPE_CHECKING: # pragma: no cover from mashumaro.core.meta.code.builder import CodeBuilder else: CodeBuilder = Any NoneType = type(None) Expression: TypeAlias = str P = ParamSpec("P") T = TypeVar("T") _PY_VALID_ID_RE = re.compile(r"\W|^(?=\d)") class AttrsHolder: def __new__( cls, name: Optional[str] = None, *args: Any, **kwargs: Any ) -> Any: ah = new_class("AttrsHolder") ah_id = id(ah) if not name: name = f"attrs_{ah_id}" ah.__name__ = ah.__qualname__ = name return ah class ExpressionWrapper: def __init__(self, expression: str): self.expression = expression PROPER_COLLECTION_TYPES: Dict[Type, str] = { tuple: "typing.Tuple[T]", list: "typing.List[T]", set: "typing.Set[T]", frozenset: "typing.FrozenSet[T]", dict: "typing.Dict[KT,VT] or Mapping[KT,VT]", collections.deque: "typing.Deque[T]", collections.ChainMap: "typing.ChainMap[KT,VT]", collections.OrderedDict: "typing.OrderedDict[KT,VT]", collections.defaultdict: "typing.DefaultDict[KT, VT]", collections.Counter: "typing.Counter[KT]", } @dataclass class FieldContext: name: str metadata: Mapping def copy(self, **changes: Any) -> "FieldContext": return replace(self, **changes) @dataclass class ValueSpec: type: Type origin_type: Type = field(init=False) expression: Expression builder: CodeBuilder field_ctx: FieldContext could_be_none: bool = True annotated_type: Optional[Type] = None owner: Optional[Type] = None no_copy_collections: Sequence = tuple() def __setattr__(self, key: str, value: Any) -> None: if key == "type": self.origin_type = get_type_origin(value) super().__setattr__(key, value) def copy(self, **changes: Any) -> "ValueSpec": return replace(self, **changes) @cached_property def annotations(self) -> Sequence[str]: return getattr(self.annotated_type, "__metadata__", []) @cached_property def attrs(self) -> Any: if self.builder.is_nailed: return self.builder.attrs if is_self(self.type): typ = self.builder.cls else: typ = self.origin_type attrs = self.attrs_registry.get(typ) if attrs is None: attrs = AttrsHolder() self.attrs_registry[typ] = attrs return attrs @cached_property def cls_attrs_name(self) -> str: if self.builder.is_nailed: return "cls" else: self.builder.ensure_object_imported(self.attrs) return self.attrs.__name__ @cached_property def self_attrs_name(self) -> str: if self.builder.is_nailed: return "self" else: self.builder.ensure_object_imported(self.attrs) return self.attrs.__name__ @cached_property def attrs_registry(self) -> Dict[Any, Any]: return self.builder.attrs_registry @cached_property def attrs_registry_name(self) -> str: name = f"attrs_registry_{id(self.attrs_registry)}" self.builder.ensure_object_imported(self.attrs_registry, name) return name class AbstractMethodBuilder(ABC): @abstractmethod def get_method_prefix(self) -> str: # pragma: no cover raise NotImplementedError def _generate_method_name( self, spec: ValueSpec ) -> str: # pragma: no cover prefix = self.get_method_prefix() if prefix: prefix = f"{prefix}_" if spec.field_ctx.name: suffix = f"_{spec.field_ctx.name}" else: suffix = "" return f"__{prefix}{spec.builder.cls.__name__}{suffix}__{random_hex()}" @abstractmethod def _add_definition(self, spec: ValueSpec, lines: CodeLines) -> str: raise NotImplementedError @abstractmethod def _generate_method_args(self, spec: ValueSpec) -> str: raise NotImplementedError @abstractmethod def _add_body( self, spec: ValueSpec, lines: CodeLines ) -> None: # pragma: no cover raise NotImplementedError def _add_setattr( self, spec: ValueSpec, method_name: str, lines: CodeLines ) -> None: lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) def _compile(self, spec: ValueSpec, lines: CodeLines) -> None: if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) @abstractmethod def _get_call_expr(self, spec: ValueSpec, method_name: str) -> str: raise NotImplementedError def _before_build(self, spec: ValueSpec) -> None: pass def build(self, spec: ValueSpec) -> str: self._before_build(spec) lines = CodeLines() method_name = self._add_definition(spec, lines) with lines.indent(): self._add_body(spec, lines) self._add_setattr(spec, method_name, lines) self._compile(spec, lines) return self._get_call_expr(spec, method_name) ValueSpecExprCreator: TypeAlias = Callable[[ValueSpec], Optional[Expression]] @dataclass class Registry: _registry: List[ValueSpecExprCreator] = field(default_factory=list) def register(self, function: ValueSpecExprCreator) -> ValueSpecExprCreator: self._registry.append(function) return function def get(self, spec: ValueSpec) -> Expression: if is_annotated(spec.type): spec.annotated_type = spec.builder.get_real_type( spec.field_ctx.name, spec.type ) spec.type = get_type_origin(spec.type) spec.type = spec.builder.get_real_type(spec.field_ctx.name, spec.type) spec.builder.add_type_modules(spec.type) for packer in self._registry: expr = packer(spec) if expr is not None: return expr raise UnserializableField( spec.field_ctx.name, spec.type, spec.builder.cls ) def ensure_generic_collection(spec: ValueSpec) -> bool: if not PEP_585_COMPATIBLE and not get_args(spec.type): proper_type = PROPER_COLLECTION_TYPES.get(spec.type) if proper_type: raise UnserializableField( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, msg=f"Use {proper_type} instead", ) if not is_generic(spec.type): return False return True def ensure_mapping_key_type_hashable( spec: ValueSpec, type_args: Sequence[Type] ) -> bool: if type_args: first_type_arg = type_args[0] if not is_hashable_type(first_type_arg): raise UnserializableField( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, msg=( f"{type_name(first_type_arg, short=True)} " "is unhashable and can not be used as a key" ), ) return True def ensure_generic_collection_subclass( spec: ValueSpec, *checked_types: Type ) -> bool: return issubclass( spec.origin_type, checked_types ) and ensure_generic_collection(spec) def ensure_generic_mapping( spec: ValueSpec, args: Sequence[Type], checked_type: Type ) -> bool: return ensure_generic_collection_subclass( spec, checked_type ) and ensure_mapping_key_type_hashable(spec, args) def expr_or_maybe_none(spec: ValueSpec, new_expr: Expression) -> Expression: if spec.could_be_none: return f"{new_expr} if {spec.expression} is not None else None" else: return new_expr def random_hex() -> str: return str(uuid.uuid4().hex) def clean_id(value: str) -> str: if not value: return "_" return _PY_VALID_ID_RE.sub("_", value) mashumaro-3.13.1/mashumaro/core/meta/types/pack.py000066400000000000000000000740511463331001200221010ustar00rootroot00000000000000import datetime import enum import ipaddress import os import typing import uuid from base64 import encodebytes from contextlib import suppress from dataclasses import is_dataclass from decimal import Decimal from fractions import Fraction from typing import ( Any, Callable, Dict, ForwardRef, List, Optional, Tuple, Type, Union, ) import typing_extensions from mashumaro.core.const import PY_39_MIN, PY_311_MIN from mashumaro.core.meta.code.lines import CodeLines from mashumaro.core.meta.helpers import ( get_args, get_class_that_defines_method, get_function_return_annotation, get_literal_values, get_type_origin, get_type_var_default, is_final, is_generic, is_literal, is_named_tuple, is_new_type, is_not_required, is_optional, is_required, is_self, is_special_typing_primitive, is_type_alias_type, is_type_var, is_type_var_any, is_type_var_tuple, is_typed_dict, is_union, is_unpack, not_none_type_arg, resolve_type_params, substitute_type_params, type_name, type_var_has_default, ) from mashumaro.core.meta.types.common import ( Expression, ExpressionWrapper, NoneType, Registry, ValueSpec, clean_id, ensure_generic_collection, ensure_generic_collection_subclass, ensure_generic_mapping, expr_or_maybe_none, random_hex, ) from mashumaro.exceptions import ( UnserializableDataError, UnserializableField, UnsupportedSerializationEngine, ) from mashumaro.helper import pass_through from mashumaro.types import ( GenericSerializableType, SerializableType, SerializationStrategy, ) if PY_39_MIN: import zoneinfo __all__ = ["PackerRegistry"] PackerRegistry = Registry() register = PackerRegistry.register def _pack_with_annotated_serialization_strategy( spec: ValueSpec, strategy: SerializationStrategy, ) -> Expression: strategy_type = type(strategy) try: value_type: Union[Type, Any] = get_function_return_annotation( strategy.serialize ) except (KeyError, ValueError): value_type = Any if isinstance(value_type, ForwardRef): value_type = spec.builder.evaluate_forward_ref( value_type, spec.origin_type ) value_type = substitute_type_params( value_type, # type: ignore resolve_type_params(strategy_type, get_args(spec.type))[strategy_type], ) overridden_fn = f"__{spec.field_ctx.name}_serialize_{random_hex()}" setattr(spec.attrs, overridden_fn, strategy.serialize) new_spec = spec.copy( type=value_type, expression=( f"{spec.self_attrs_name}.{overridden_fn}({spec.expression})" ), ) field_metadata = new_spec.field_ctx.metadata if field_metadata.get("serialization_strategy") is strategy: new_spec.field_ctx.metadata = { k: v for k, v in field_metadata.items() if k != "serialization_strategy" } return PackerRegistry.get( spec.copy( type=value_type, expression=( f"{spec.self_attrs_name}.{overridden_fn}({spec.expression})" ), ) ) def get_overridden_serialization_method( spec: ValueSpec, ) -> Optional[Union[Callable, str, ExpressionWrapper]]: serialize_option = spec.field_ctx.metadata.get("serialize") if serialize_option is not None: return serialize_option checking_types = [spec.type, spec.origin_type] if spec.annotated_type: checking_types.insert(0, spec.annotated_type) for typ in checking_types: for strategy in spec.builder.iter_serialization_strategies( spec.field_ctx.metadata, typ ): if strategy is pass_through: return pass_through elif isinstance(strategy, dict): serialize_option = strategy.get("serialize") elif isinstance(strategy, SerializationStrategy): if strategy.__use_annotations__ or is_generic(type(strategy)): return ExpressionWrapper( _pack_with_annotated_serialization_strategy( spec=spec, strategy=strategy, ) ) else: serialize_option = strategy.serialize if serialize_option is not None: return serialize_option @register def pack_type_with_overridden_serialization( spec: ValueSpec, ) -> Optional[Expression]: serialization_method = get_overridden_serialization_method(spec) if serialization_method is pass_through: return spec.expression elif isinstance(serialization_method, ExpressionWrapper): return serialization_method.expression elif callable(serialization_method): overridden_fn = f"__{spec.field_ctx.name}_serialize_{random_hex()}" setattr(spec.attrs, overridden_fn, staticmethod(serialization_method)) return f"{spec.self_attrs_name}.{overridden_fn}({spec.expression})" def _pack_annotated_serializable_type( spec: ValueSpec, ) -> Optional[Expression]: try: # noinspection PyProtectedMember # noinspection PyUnresolvedReferences value_type = get_function_return_annotation( spec.origin_type._serialize ) except (KeyError, ValueError): raise UnserializableField( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, msg="Method _serialize must have return annotation", ) from None if is_self(value_type): return f"{spec.expression}._serialize()" if isinstance(value_type, ForwardRef): value_type = spec.builder.evaluate_forward_ref( value_type, spec.origin_type ) value_type = substitute_type_params( value_type, resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ], ) return PackerRegistry.get( spec.copy( type=value_type, expression=f"{spec.expression}._serialize()", ) ) @register def pack_serializable_type(spec: ValueSpec) -> Optional[Expression]: try: if not issubclass(spec.origin_type, SerializableType): return None except TypeError: return None if spec.origin_type.__use_annotations__: return _pack_annotated_serializable_type(spec) else: return f"{spec.expression}._serialize()" @register def pack_generic_serializable_type(spec: ValueSpec) -> Optional[Expression]: with suppress(TypeError): if issubclass(spec.origin_type, GenericSerializableType): type_args = get_args(spec.type) spec.builder.add_type_modules(*type_args) type_arg_names = ", ".join(list(map(type_name, type_args))) return f"{spec.expression}._serialize([{type_arg_names}])" @register def pack_dataclass(spec: ValueSpec) -> Optional[Expression]: if is_dataclass(spec.origin_type): type_args = get_args(spec.type) method_name = spec.builder.get_pack_method_name( type_args, spec.builder.format_name ) method_loc = spec.origin_type if spec.builder.is_nailed else spec.attrs if get_class_that_defines_method( method_name, method_loc ) != method_loc and ( spec.origin_type != spec.builder.cls or spec.builder.get_pack_method_name( type_args=type_args, format_name=spec.builder.format_name, encoder=spec.builder.encoder, ) != method_name ): builder = spec.builder.__class__( spec.origin_type, type_args, dialect=spec.builder.dialect, format_name=spec.builder.format_name, default_dialect=spec.builder.default_dialect, attrs=method_loc, attrs_registry=( spec.attrs_registry if not spec.builder.is_nailed else None ), ) builder.add_pack_method() flags = spec.builder.get_pack_method_flags(spec.type) if spec.builder.is_nailed: return f"{spec.expression}.{method_name}({flags})" else: cls_alias = clean_id(type_name(spec.origin_type)) method_name_alias = f"{cls_alias}_{method_name}" spec.builder.ensure_object_imported( getattr(spec.attrs, method_name), method_name_alias ) method_args = spec.expression return f"{method_name_alias}({method_args})" @register def pack_final(spec: ValueSpec) -> Optional[Expression]: if is_final(spec.type): return PackerRegistry.get(spec.copy(type=get_args(spec.type)[0])) @register def pack_any(spec: ValueSpec) -> Optional[Expression]: if spec.type is Any: return spec.expression def pack_union( spec: ValueSpec, args: Tuple[Type, ...], prefix: str = "union" ) -> Expression: lines = CodeLines() method_name = ( f"__pack_{prefix}_{spec.builder.cls.__name__}_{spec.field_ctx.name}__" f"{random_hex()}" ) method_args = "self, value" if spec.builder.is_nailed else "value" default_kwargs = spec.builder.get_pack_method_default_flag_values() if default_kwargs: lines.append(f"def {method_name}({method_args}, {default_kwargs}):") else: lines.append(f"def {method_name}({method_args}):") packers: List[str] = [] packer_arg_types: Dict[str, List[Type]] = {} for type_arg in args: packer = PackerRegistry.get( spec.copy(type=type_arg, expression="value") ) if packer not in packers: if packer == "value": packers.insert(0, packer) else: packers.append(packer) packer_arg_types.setdefault(packer, []).append(type_arg) if len(packers) == 1 and packers[0] == "value": return spec.expression with lines.indent(): for packer in packers: packer_arg_type_names = [] for packer_arg_type in packer_arg_types[packer]: if is_generic(packer_arg_type): packer_arg_type = get_type_origin(packer_arg_type) packer_arg_type_name = clean_id(type_name(packer_arg_type)) spec.builder.ensure_object_imported( packer_arg_type, packer_arg_type_name ) if packer_arg_type_name not in packer_arg_type_names: packer_arg_type_names.append(packer_arg_type_name) if len(packer_arg_type_names) > 1: packer_arg_type_check = ( f"in ({', '.join(packer_arg_type_names)})" ) else: packer_arg_type_check = f"is {packer_arg_type_names[0]}" if packer == "value": with lines.indent( f"if value.__class__ {packer_arg_type_check}:" ): lines.append(f"return {packer}") else: with lines.indent("try:"): lines.append(f"return {packer}") with lines.indent("except Exception:"): lines.append("pass") field_type = spec.builder.get_type_name_identifier( typ=spec.type, resolved_type_params=spec.builder.get_field_resolved_type_params( spec.field_ctx.name ), ) if spec.builder.is_nailed: lines.append( "raise InvalidFieldValue(" f"'{spec.field_ctx.name}',{field_type},value,type(self))" ) else: lines.append("raise ValueError(value)") lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) method_args = ", ".join( filter(None, (spec.expression, spec.builder.get_pack_method_flags())) ) if spec.builder.is_nailed: return f"{spec.self_attrs_name}.{method_name}({method_args})" else: spec.builder.ensure_object_imported( getattr(spec.attrs, method_name), method_name ) return f"{method_name}({method_args})" def pack_literal(spec: ValueSpec) -> Expression: spec.builder.add_type_modules(spec.type) lines = CodeLines() method_name = ( f"__pack_literal_{spec.builder.cls.__name__}_{spec.field_ctx.name}__" f"{random_hex()}" ) method_args = "self, value" if spec.builder.is_nailed else "value" default_kwargs = spec.builder.get_pack_method_default_flag_values() if default_kwargs: lines.append(f"def {method_name}({method_args}, {default_kwargs}):") else: lines.append(f"def {method_name}({method_args}):") resolved_type_params = spec.builder.get_field_resolved_type_params( spec.field_ctx.name ) with lines.indent(): for literal_value in get_literal_values(spec.type): value_type = type(literal_value) packer = PackerRegistry.get( spec.copy(type=value_type, expression="value") ) if isinstance(literal_value, enum.Enum): enum_type_name = spec.builder.get_type_name_identifier( typ=value_type, resolved_type_params=resolved_type_params, ) with lines.indent( f"if value == {enum_type_name}.{literal_value.name}:" ): lines.append(f"return {packer}") elif isinstance( literal_value, (int, str, bytes, bool, NoneType), # type: ignore ): with lines.indent(f"if value == {literal_value!r}:"): lines.append(f"return {packer}") field_type = spec.builder.get_type_name_identifier( typ=spec.type, resolved_type_params=resolved_type_params, ) if spec.builder.is_nailed: lines.append( f"raise InvalidFieldValue('{spec.field_ctx.name}'," f"{field_type},value,type(self))" ) else: lines.append("raise ValueError(value)") lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) method_args = ", ".join( filter(None, (spec.expression, spec.builder.get_pack_method_flags())) ) return f"{spec.self_attrs_name}.{method_name}({method_args})" @register def pack_special_typing_primitive(spec: ValueSpec) -> Optional[Expression]: if is_special_typing_primitive(spec.origin_type): if is_union(spec.type): resolved_type_params = spec.builder.get_field_resolved_type_params( spec.field_ctx.name ) if is_optional(spec.type, resolved_type_params): arg = not_none_type_arg( get_args(spec.type), resolved_type_params ) pv = PackerRegistry.get(spec.copy(type=arg)) return expr_or_maybe_none(spec, pv) else: return pack_union(spec, get_args(spec.type)) elif spec.origin_type is typing.AnyStr: raise UnserializableDataError( "AnyStr is not supported by mashumaro" ) elif is_type_var_any(spec.type): return spec.expression elif is_type_var(spec.type): constraints = getattr(spec.type, "__constraints__") if constraints: return pack_union(spec, constraints, "type_var") else: if type_var_has_default(spec.type): bound = get_type_var_default(spec.type) else: bound = getattr(spec.type, "__bound__") # act as if it was Optional[bound] pv = PackerRegistry.get(spec.copy(type=bound)) return expr_or_maybe_none(spec, pv) elif is_new_type(spec.type): return PackerRegistry.get(spec.copy(type=spec.type.__supertype__)) elif is_literal(spec.type): return pack_literal(spec) elif spec.type is typing_extensions.LiteralString: return PackerRegistry.get(spec.copy(type=str)) elif is_self(spec.type): method_name = spec.builder.get_pack_method_name( format_name=spec.builder.format_name ) method_loc = ( spec.builder.cls if spec.builder.is_nailed else spec.attrs ) if ( get_class_that_defines_method(method_name, method_loc) != method_loc # not hasattr(self.cls, method_name) and spec.builder.get_pack_method_name( format_name=spec.builder.format_name, encoder=spec.builder.encoder, ) != method_name ): builder = spec.builder.__class__( spec.builder.cls, dialect=spec.builder.dialect, format_name=spec.builder.format_name, default_dialect=spec.builder.default_dialect, attrs=method_loc, attrs_registry=( spec.attrs_registry if not spec.builder.is_nailed else None ), ) builder.add_pack_method() flags = spec.builder.get_pack_method_flags(spec.builder.cls) if spec.builder.is_nailed: return f"{spec.expression}.{method_name}({flags})" else: method_args = spec.expression return f"_cls.{method_name}({method_args})" elif is_required(spec.type) or is_not_required(spec.type): return PackerRegistry.get(spec.copy(type=get_args(spec.type)[0])) elif is_unpack(spec.type): packer = PackerRegistry.get(spec.copy(type=get_args(spec.type)[0])) return f"*{packer}" elif is_type_var_tuple(spec.type): return PackerRegistry.get(spec.copy(type=Tuple[Any, ...])) elif isinstance(spec.type, ForwardRef): evaluated = spec.builder.evaluate_forward_ref( spec.type, spec.owner ) if evaluated is not None: return PackerRegistry.get(spec.copy(type=evaluated)) elif is_type_alias_type(spec.type): return PackerRegistry.get(spec.copy(type=spec.type.__value__)) raise UnserializableDataError( f"{spec.type} as a field type is not supported by mashumaro" ) @register def pack_number_and_bool_and_none(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in (int, float, bool, NoneType, None): return spec.expression @register def pack_date_objects(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in (datetime.datetime, datetime.date, datetime.time): return f"{spec.expression}.isoformat()" @register def pack_timedelta(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is datetime.timedelta: return f"{spec.expression}.total_seconds()" @register def pack_timezone(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is datetime.timezone: return f"{spec.expression}.tzname(None)" @register def pack_zone_info(spec: ValueSpec) -> Optional[Expression]: if PY_39_MIN and spec.origin_type is zoneinfo.ZoneInfo: return f"str({spec.expression})" @register def pack_uuid(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is uuid.UUID: return f"str({spec.expression})" @register def pack_ipaddress(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in ( ipaddress.IPv4Address, ipaddress.IPv6Address, ipaddress.IPv4Network, ipaddress.IPv6Network, ipaddress.IPv4Interface, ipaddress.IPv6Interface, ): return f"str({spec.expression})" @register def pack_decimal(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is Decimal: return f"str({spec.expression})" @register def pack_fraction(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is Fraction: return f"str({spec.expression})" def pack_tuple(spec: ValueSpec, args: Tuple[Type, ...]) -> Expression: if not args: if spec.type in (Tuple, tuple): args = [typing.Any, ...] # type: ignore else: return "[]" elif len(args) == 1 and args[0] == (): if not PY_311_MIN: return "[]" if len(args) == 2 and args[1] is Ellipsis: packer = PackerRegistry.get( spec.copy(type=args[0], expression="value", could_be_none=True) ) return f"[{packer} for value in {spec.expression}]" else: arg_indexes: List[Union[int, Tuple[int, Union[int, None]]]] = [] unpack_idx: Optional[int] = None for arg_idx, type_arg in enumerate(args): if is_unpack(type_arg): if unpack_idx is not None: raise TypeError( "Multiple unpacks are disallowed within a single type " f"parameter list for {type_name(spec.type)}" ) unpack_idx = arg_idx if len(args) == 1: arg_indexes.append((arg_idx, None)) elif arg_idx < len(args) - 1: arg_indexes.append((arg_idx, arg_idx + 1 - len(args))) else: arg_indexes.append((arg_idx, None)) else: if unpack_idx is None: arg_indexes.append(arg_idx) else: arg_indexes.append(arg_idx - len(args)) packers: List[Expression] = [] for _idx, _arg_idx in enumerate(arg_indexes): if isinstance(_arg_idx, tuple): p_expr = f"{spec.expression}[{_arg_idx[0]}:{_arg_idx[1]}]" else: p_expr = f"{spec.expression}[{_arg_idx}]" packer = PackerRegistry.get( spec.copy( type=args[_idx], expression=p_expr, could_be_none=True, ) ) if packer != "*[]": packers.append(packer) return f"[{', '.join(packers)}]" def pack_named_tuple(spec: ValueSpec) -> Expression: resolved = resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ] annotations = { k: resolved.get(v, v) for k, v in getattr(spec.origin_type, "__annotations__", {}).items() } fields = getattr(spec.type, "_fields", ()) packers = [] as_dict = spec.builder.get_dialect_or_config_option( "namedtuple_as_dict", False ) serialize_option = get_overridden_serialization_method(spec) if serialize_option is not None: if serialize_option == "as_dict": as_dict = True elif serialize_option == "as_list": as_dict = False else: raise UnsupportedSerializationEngine( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, engine=serialize_option, ) for idx, field in enumerate(fields): packer = PackerRegistry.get( spec.copy( type=annotations.get(field, typing.Any), expression=f"{spec.expression}[{idx}]", could_be_none=True, ) ) packers.append(packer) if as_dict: kv = (f"'{key}': {value}" for key, value in zip(fields, packers)) return f"{{{', '.join(kv)}}}" else: return f"[{', '.join(packers)}]" def pack_typed_dict(spec: ValueSpec) -> Expression: resolved = resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ] annotations = { k: resolved.get(v, v) for k, v in spec.origin_type.__annotations__.items() } all_keys = list(annotations.keys()) required_keys = getattr(spec.type, "__required_keys__", all_keys) optional_keys = getattr(spec.type, "__optional_keys__", []) lines = CodeLines() method_name = ( f"__pack_typed_dict_{spec.builder.cls.__name__}_" f"{spec.field_ctx.name}__{random_hex()}" ) method_args = "self, value" if spec.builder.is_nailed else "value" default_kwargs = spec.builder.get_pack_method_default_flag_values() if default_kwargs: lines.append(f"def {method_name}({method_args}, {default_kwargs}):") else: lines.append(f"def {method_name}({method_args}):") with lines.indent(): lines.append("d = {}") for key in sorted(required_keys, key=all_keys.index): packer = PackerRegistry.get( spec.copy( type=annotations[key], expression=f"value['{key}']", could_be_none=True, owner=spec.type, ) ) lines.append(f"d['{key}'] = {packer}") for key in sorted(optional_keys, key=all_keys.index): lines.append(f"key_value = value.get('{key}', MISSING)") with lines.indent("if key_value is not MISSING:"): packer = PackerRegistry.get( spec.copy( type=annotations[key], expression="key_value", could_be_none=True, owner=spec.type, ) ) lines.append(f"d['{key}'] = {packer}") lines.append("return d") lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) method_args = ", ".join( filter(None, (spec.expression, spec.builder.get_pack_method_flags())) ) return f"{spec.self_attrs_name}.{method_name}({method_args})" @register def pack_collection(spec: ValueSpec) -> Optional[Expression]: if not issubclass(spec.origin_type, typing.Collection): return None elif issubclass(spec.origin_type, enum.Enum): return None args = get_args(spec.type) def inner_expr( arg_num: int = 0, v_name: str = "value", v_type: Optional[Type] = None ) -> Expression: if v_type: return PackerRegistry.get( spec.copy(type=v_type, expression=v_name) ) else: if args and len(args) > arg_num: type_arg: Any = args[arg_num] else: type_arg = Any return PackerRegistry.get( spec.copy( type=type_arg, expression=v_name, could_be_none=True, field_ctx=spec.field_ctx.copy(metadata={}), ) ) def _make_sequence_expression(ie: Expression) -> Expression: if ie == "value": if spec.origin_type in spec.no_copy_collections: return spec.expression elif spec.origin_type is list: return f"{spec.expression}.copy()" return f"[{ie} for value in {spec.expression}]" def _make_mapping_expression(ke: Expression, ve: Expression) -> Expression: if ke == "key" and ve == "value": if spec.origin_type in spec.no_copy_collections: return spec.expression elif spec.origin_type is dict: return f"{spec.expression}.copy()" return f"{{{ke}: {ve} for key, value in {spec.expression}.items()}}" if issubclass(spec.origin_type, typing.ByteString): # type: ignore spec.builder.ensure_object_imported(encodebytes) return f"encodebytes({spec.expression}).decode()" elif issubclass(spec.origin_type, str): return spec.expression elif issubclass(spec.origin_type, Tuple): # type: ignore if is_named_tuple(spec.origin_type): return pack_named_tuple(spec) elif ensure_generic_collection(spec): return pack_tuple(spec, args) elif ensure_generic_collection_subclass( spec, typing.List, typing.Deque, typing.AbstractSet ): ie = inner_expr() return _make_sequence_expression(ie) elif ensure_generic_mapping(spec, args, typing.ChainMap): ke = inner_expr(0, "key") ve = inner_expr(1) return ( f"[{{{ke}: {ve} for key, value in m.items()}} " f"for m in {spec.expression}.maps]" ) elif ensure_generic_mapping(spec, args, typing.OrderedDict): ke = inner_expr(0, "key") ve = inner_expr(1) return _make_mapping_expression(ke, ve) elif ensure_generic_mapping(spec, args, typing.Counter): ke = inner_expr(0, "key") ve = inner_expr(1, v_type=int) return _make_mapping_expression(ke, ve) elif is_typed_dict(spec.origin_type): return pack_typed_dict(spec) elif ensure_generic_mapping(spec, args, typing.Mapping): ke = inner_expr(0, "key") ve = inner_expr(1) return _make_mapping_expression(ke, ve) elif ensure_generic_collection_subclass(spec, typing.Sequence): ie = inner_expr() return _make_sequence_expression(ie) @register def pack_pathlike(spec: ValueSpec) -> Optional[Expression]: if issubclass(spec.origin_type, os.PathLike): return f"{spec.expression}.__fspath__()" @register def pack_enum(spec: ValueSpec) -> Optional[Expression]: if issubclass(spec.origin_type, enum.Enum): return f"{spec.expression}.value" mashumaro-3.13.1/mashumaro/core/meta/types/unpack.py000066400000000000000000001372411463331001200224450ustar00rootroot00000000000000import collections import collections.abc import datetime import enum import ipaddress import os import pathlib import types import typing import uuid from abc import ABC from base64 import decodebytes from contextlib import suppress from dataclasses import is_dataclass from decimal import Decimal from fractions import Fraction from typing import ( Any, Callable, ForwardRef, Iterable, List, Optional, Tuple, Type, Union, ) import typing_extensions from mashumaro.core.const import PY_39_MIN, PY_311_MIN from mashumaro.core.helpers import parse_timezone from mashumaro.core.meta.code.lines import CodeLines from mashumaro.core.meta.helpers import ( get_args, get_class_that_defines_method, get_function_arg_annotation, get_literal_values, get_type_var_default, is_final, is_generic, is_literal, is_named_tuple, is_new_type, is_not_required, is_optional, is_required, is_self, is_special_typing_primitive, is_type_alias_type, is_type_var, is_type_var_any, is_type_var_tuple, is_typed_dict, is_union, is_unpack, iter_all_subclasses, not_none_type_arg, resolve_type_params, substitute_type_params, type_name, type_var_has_default, ) from mashumaro.core.meta.types.common import ( AbstractMethodBuilder, AttrsHolder, Expression, ExpressionWrapper, NoneType, Registry, ValueSpec, clean_id, ensure_generic_collection, ensure_generic_collection_subclass, ensure_generic_mapping, expr_or_maybe_none, random_hex, ) from mashumaro.exceptions import ( ThirdPartyModuleNotFoundError, UnserializableDataError, UnserializableField, UnsupportedDeserializationEngine, ) from mashumaro.helper import pass_through from mashumaro.types import ( Discriminator, GenericSerializableType, SerializableType, SerializationStrategy, ) if PY_39_MIN: import zoneinfo try: import ciso8601 except ImportError: # pragma: no cover ciso8601: Optional[types.ModuleType] = None # type: ignore try: import pendulum except ImportError: # pragma: no cover pendulum: Optional[types.ModuleType] = None # type: ignore __all__ = ["UnpackerRegistry", "SubtypeUnpackerBuilder"] UnpackerRegistry = Registry() register = UnpackerRegistry.register class AbstractUnpackerBuilder(AbstractMethodBuilder, ABC): def _generate_method_name(self, spec: ValueSpec) -> str: prefix = self.get_method_prefix() if prefix: prefix = f"{prefix}_" if spec.field_ctx.name: suffix = f"_{spec.field_ctx.name}" else: suffix = "" return ( f"__unpack_{prefix}{spec.builder.cls.__name__}{suffix}" f"__{random_hex()}" ) def _add_definition(self, spec: ValueSpec, lines: CodeLines) -> str: method_name = self._generate_method_name(spec) method_args = self._generate_method_args(spec) if spec.builder.is_nailed: lines.append("@classmethod") lines.append(f"def {method_name}({method_args}):") return method_name def _get_extra_method_args(self) -> List[str]: return [] def _generate_method_args(self, spec: ValueSpec) -> str: default_kwargs = spec.builder.get_unpack_method_default_flag_values() extra_args = self._get_extra_method_args() if extra_args: extra_args_str = f", {', '.join(extra_args)}" else: extra_args_str = "" if spec.builder.is_nailed: first_args = "cls, value" else: first_args = "value" if default_kwargs: return f"{first_args}{extra_args_str}, {default_kwargs}" else: # pragma: no cover # we shouldn't be here because there will be default_kwargs return f"{first_args}{extra_args_str}" def _get_call_expr(self, spec: ValueSpec, method_name: str) -> str: method_args = ", ".join( filter( None, (spec.expression, spec.builder.get_unpack_method_flags()) ) ) return f"{spec.cls_attrs_name}.{method_name}({method_args})" class UnionUnpackerBuilder(AbstractUnpackerBuilder): def __init__(self, args: Tuple[Type, ...]): self.union_args = args def get_method_prefix(self) -> str: return "union" def _add_body(self, spec: ValueSpec, lines: CodeLines) -> None: ambiguous_unpacker_types = [] for type_arg in self.union_args: unpacker = UnpackerRegistry.get( spec.copy(type=type_arg, expression="value") ) if type_arg in (bool, str) and unpacker == "value": ambiguous_unpacker_types.append(type_arg) with lines.indent("try:"): lines.append(f"return {unpacker}") lines.append("except Exception: pass") # if len(ambiguous_unpacker_types) >= 2: # warnings.warn( # f"{type_name(spec.builder.cls)}.{spec.field_ctx.name} " # f"({type_name(spec.type)}): " # "In the next release, data marked with Union type " # "containing 'str' and 'bool' will be coerced to the value " # "of the type specified first instead of passing it as is" # ) field_type = spec.builder.get_type_name_identifier( typ=spec.type, resolved_type_params=spec.builder.get_field_resolved_type_params( spec.field_ctx.name ), ) if spec.builder.is_nailed: lines.append( "raise InvalidFieldValue(" f"'{spec.field_ctx.name}',{field_type},value,cls)" ) else: lines.append("raise ValueError(value)") class TypeVarUnpackerBuilder(UnionUnpackerBuilder): def get_method_prefix(self) -> str: return "type_var" class LiteralUnpackerBuilder(AbstractUnpackerBuilder): def _before_build(self, spec: ValueSpec) -> None: spec.builder.add_type_modules(spec.type) def get_method_prefix(self) -> str: return "literal" def _add_body(self, spec: ValueSpec, lines: CodeLines) -> None: for literal_value in get_literal_values(spec.type): if isinstance(literal_value, enum.Enum): lit_type = type(literal_value) enum_type_name = spec.builder.get_type_name_identifier( lit_type ) with lines.indent( f"if value == {enum_type_name}.{literal_value.name}.value:" ): lines.append( f"return {enum_type_name}.{literal_value.name}" ) elif isinstance(literal_value, bytes): unpacker = UnpackerRegistry.get( spec.copy(type=bytes, expression="value") ) with lines.indent("try:"): with lines.indent(f"if {unpacker} == {literal_value!r}:"): lines.append(f"return {literal_value!r}") lines.append("except Exception: pass") elif isinstance( literal_value, (int, str, bool, NoneType), # type: ignore ): with lines.indent(f"if value == {literal_value!r}:"): lines.append(f"return {literal_value!r}") lines.append("raise ValueError(value)") class DiscriminatedUnionUnpackerBuilder(AbstractUnpackerBuilder): def __init__( self, discriminator: Discriminator, base_variants: Optional[Tuple[Type, ...]] = None, ): self.discriminator = discriminator self.base_variants = base_variants or tuple() self._variants_attr: Optional[str] = None def get_method_prefix(self) -> str: return "" def _get_extra_method_args(self) -> List[str]: return ["_dialect", "_default_dialect"] def _get_variants_attr(self, spec: ValueSpec) -> str: if self._variants_attr is None: self._variants_attr = ( f"__mashumaro_{spec.field_ctx.name}_variants_{random_hex()}__" ) return self._variants_attr def _get_variants_map(self, spec: ValueSpec) -> str: variants_attr = self._get_variants_attr(spec) if spec.builder.is_nailed: typ_name = spec.builder.get_type_name_identifier(spec.builder.cls) return f"{typ_name}.{variants_attr}" else: return f"{spec.cls_attrs_name}.{variants_attr}" def _get_variant_names(self, spec: ValueSpec) -> List[str]: base_variants = self.base_variants or (spec.origin_type,) variant_names: List[str] = [] if self.discriminator.include_subtypes: spec.builder.ensure_object_imported(iter_all_subclasses) variant_names.extend( f"*iter_all_subclasses(" f"{spec.builder.get_type_name_identifier(base_variant)})" for base_variant in base_variants ) if self.discriminator.include_supertypes: variant_names.extend( map(spec.builder.get_type_name_identifier, base_variants) ) return variant_names def _get_variant_names_iterable(self, spec: ValueSpec) -> str: variant_names = self._get_variant_names(spec) if len(variant_names) == 1: if variant_names[0].startswith("*"): return variant_names[0][1:] else: return f"[{variant_names[0]}]" return f'({", ".join(variant_names)})' @staticmethod def _get_variants_attr_holder(spec: ValueSpec) -> Type: return spec.attrs @staticmethod def _get_variant_method_call(method_name: str, spec: ValueSpec) -> str: method_flags = spec.builder.get_unpack_method_flags() if method_flags: return f"{method_name}(value, {method_flags})" else: return f"{method_name}(value)" def _add_body(self, spec: ValueSpec, lines: CodeLines) -> None: discriminator = self.discriminator variants_attr = self._get_variants_attr(spec) variants_map = self._get_variants_map(spec) variants_attr_holder = self._get_variants_attr_holder(spec) variants = self._get_variant_names_iterable(spec) variants_type_expr = spec.builder.get_type_name_identifier(spec.type) if variants_attr not in variants_attr_holder.__dict__: setattr(variants_attr_holder, variants_attr, {}) variant_method_name = spec.builder.get_unpack_method_name( format_name=spec.builder.format_name ) variant_method_call = self._get_variant_method_call( variant_method_name, spec ) if discriminator.variant_tagger_fn: spec.builder.ensure_object_imported( discriminator.variant_tagger_fn, "variant_tagger_fn" ) variant_tagger_expr = "variant_tagger_fn(variant)" else: variant_tagger_expr = f"variant.__dict__['{discriminator.field}']" if spec.builder.dialect: spec.builder.ensure_object_imported( spec.builder.dialect, clean_id(type_name(spec.builder.dialect)), ) if spec.builder.default_dialect: spec.builder.ensure_object_imported( spec.builder.default_dialect, clean_id(type_name(spec.builder.default_dialect)), ) if discriminator.field: chosen_cls = f"{variants_map}[discriminator]" with lines.indent("try:"): lines.append(f"discriminator = value['{discriminator.field}']") with lines.indent("except KeyError:"): lines.append( f"raise MissingDiscriminatorError('{discriminator.field}')" " from None" ) with lines.indent("try:"): if spec.builder.is_nailed: lines.append(f"return {chosen_cls}.{variant_method_call}") else: lines.append( f"return {spec.attrs_registry_name}" f"[{chosen_cls}].{variant_method_call}" ) with lines.indent("except (KeyError, AttributeError):"): lines.append(f"variants_map = {variants_map}") with lines.indent(f"for variant in {variants}:"): if discriminator.variant_tagger_fn is not None: self._add_register_variant_tags( lines, variant_tagger_expr ) else: with lines.indent("try:"): self._add_register_variant_tags( lines, variant_tagger_expr ) with lines.indent("except KeyError:"): lines.append("continue") self._add_build_variant_unpacker( spec, lines, variant_method_name, variant_method_call ) with lines.indent("try:"): if spec.builder.is_nailed: lines.append( "return variants_map[discriminator]" f".{variant_method_call}" ) else: lines.append( f"return {spec.attrs_registry_name}[" "variants_map[discriminator]]" f".{variant_method_call}" ) with lines.indent("except KeyError:"): lines.append( "raise SuitableVariantNotFoundError(" f"{variants_type_expr}, '{discriminator.field}', " "discriminator) from None" ) else: with lines.indent(f"for variant in {variants}:"): with lines.indent("try:"): if spec.builder.is_nailed: lines.append(f"return variant.{variant_method_call}") else: lines.append( f"return {spec.attrs_registry_name}" f"[variant].{variant_method_call}" ) if spec.builder.is_nailed: exc_to_catch = "AttributeError" else: exc_to_catch = "(KeyError, AttributeError)" with lines.indent(f"except {exc_to_catch}:"): self._add_build_variant_unpacker( spec, lines, variant_method_name, variant_method_call ) lines.append("except Exception: pass") lines.append( f"raise SuitableVariantNotFoundError({variants_type_expr}) " "from None" ) def _get_call_expr(self, spec: ValueSpec, method_name: str) -> str: method_args = ", ".join( filter( None, ( spec.expression, clean_id(type_name(spec.builder.dialect)), clean_id(type_name(spec.builder.default_dialect)), spec.builder.get_unpack_method_flags(), ), ) ) return f"{spec.cls_attrs_name}.{method_name}({method_args})" def _add_build_variant_unpacker( self, spec: ValueSpec, lines: CodeLines, variant_method_name: str, variant_method_call: str, ) -> None: if spec.builder.is_nailed: spec.builder.ensure_object_imported(get_class_that_defines_method) lines.append( "if get_class_that_defines_method(" f"'{variant_method_name}',variant) != variant:" ) with lines.indent(): spec.builder.ensure_object_imported(spec.builder.__class__) lines.append( "CodeBuilder(variant, " "dialect=_dialect, " f"format_name={repr(spec.builder.format_name)}, " "default_dialect=_default_dialect)" ".add_unpack_method()" ) if not self.discriminator.field: with lines.indent("try:"): lines.append(f"return variant.{variant_method_call}") lines.append("except Exception: pass") else: spec.builder.ensure_object_imported(AttrsHolder) attrs = f"attrs_{random_hex()}" lines.append(f"{attrs} = AttrsHolder('{attrs}')") lines.append(f"{spec.attrs_registry_name}[variant] = {attrs}") lines.append( "CodeBuilder(variant, " "dialect=_dialect, " f"format_name={repr(spec.builder.format_name)}, " "default_dialect=_default_dialect," f"attrs={attrs}," f"attrs_registry={spec.attrs_registry_name})" ".add_unpack_method()" ) if not self.discriminator.field: with lines.indent("try:"): lines.append(f"return {attrs}.{variant_method_call}") lines.append("except Exception: pass") def _add_register_variant_tags( self, lines: CodeLines, variant_tagger_expr: str ) -> None: if self.discriminator.variant_tagger_fn: lines.append(f"variant_tags = {variant_tagger_expr}") with lines.indent("if type(variant_tags) is list:"): with lines.indent("for varint_tag in variant_tags:"): lines.append("variants_map[varint_tag] = variant") with lines.indent("else:"): lines.append("variants_map[variant_tags] = variant") else: lines.append(f"variants_map[{variant_tagger_expr}] = variant") class SubtypeUnpackerBuilder(DiscriminatedUnionUnpackerBuilder): def _get_variants_attr(self, spec: ValueSpec) -> str: if self._variants_attr is None: assert self.discriminator.include_subtypes self._variants_attr = "__mashumaro_subtype_variants__" return self._variants_attr def _unpack_with_annotated_serialization_strategy( spec: ValueSpec, strategy: SerializationStrategy, ) -> Expression: strategy_type = type(strategy) try: value_type: Union[Type, Any] = get_function_arg_annotation( strategy.deserialize, arg_pos=0 ) except (KeyError, ValueError): value_type = Any if isinstance(value_type, ForwardRef): value_type = spec.builder.evaluate_forward_ref( value_type, spec.origin_type ) value_type = substitute_type_params( value_type, # type: ignore resolve_type_params(strategy_type, get_args(spec.type))[strategy_type], ) overridden_fn = f"__{spec.field_ctx.name}_deserialize_{random_hex()}" setattr(spec.attrs, overridden_fn, strategy.deserialize) new_spec = spec.copy(type=value_type) field_metadata = new_spec.field_ctx.metadata if field_metadata.get("serialization_strategy") is strategy: new_spec.field_ctx.metadata = { k: v for k, v in field_metadata.items() if k != "serialization_strategy" } unpacker = UnpackerRegistry.get(new_spec) return f"{spec.cls_attrs_name}.{overridden_fn}({unpacker})" def get_overridden_deserialization_method( spec: ValueSpec, ) -> Optional[Union[Callable, str, ExpressionWrapper]]: deserialize_option = spec.field_ctx.metadata.get("deserialize") if deserialize_option is not None: return deserialize_option checking_types = [spec.type, spec.origin_type] if spec.annotated_type: checking_types.insert(0, spec.annotated_type) for typ in checking_types: for strategy in spec.builder.iter_serialization_strategies( spec.field_ctx.metadata, typ ): if strategy is pass_through: return pass_through elif isinstance(strategy, dict): deserialize_option = strategy.get("deserialize") elif isinstance(strategy, SerializationStrategy): if strategy.__use_annotations__ or is_generic(type(strategy)): return ExpressionWrapper( _unpack_with_annotated_serialization_strategy( spec=spec, strategy=strategy, ) ) deserialize_option = strategy.deserialize if deserialize_option is not None: return deserialize_option @register def unpack_type_with_overridden_deserialization( spec: ValueSpec, ) -> Optional[Expression]: deserialization_method = get_overridden_deserialization_method(spec) if deserialization_method is pass_through: return spec.expression elif isinstance(deserialization_method, ExpressionWrapper): return deserialization_method.expression elif callable(deserialization_method): overridden_fn = f"__{spec.field_ctx.name}_deserialize_{random_hex()}" setattr(spec.attrs, overridden_fn, deserialization_method) return f"{spec.cls_attrs_name}.{overridden_fn}({spec.expression})" def _unpack_annotated_serializable_type( spec: ValueSpec, ) -> Optional[Expression]: try: # noinspection PyProtectedMember # noinspection PyUnresolvedReferences value_type = get_function_arg_annotation( spec.origin_type._deserialize, arg_pos=0 ) except (KeyError, ValueError): raise UnserializableField( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, msg='Method _deserialize must have annotated "value" argument', ) from None if is_self(value_type): return ( f"{spec.builder.get_type_name_identifier(spec.type)}" f"._deserialize({spec.expression})" ) if isinstance(value_type, ForwardRef): value_type = spec.builder.evaluate_forward_ref( value_type, spec.origin_type ) value_type = substitute_type_params( value_type, resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ], ) unpacker = UnpackerRegistry.get(spec.copy(type=value_type)) field_type = spec.builder.get_type_name_identifier(spec.type) return f"{field_type}._deserialize({unpacker})" @register def unpack_serializable_type(spec: ValueSpec) -> Optional[Expression]: try: if not issubclass(spec.origin_type, SerializableType): return None except TypeError: return None if spec.origin_type.__use_annotations__: return _unpack_annotated_serializable_type(spec) else: field_type = spec.builder.get_type_name_identifier(spec.type) return f"{field_type}._deserialize({spec.expression})" @register def unpack_generic_serializable_type(spec: ValueSpec) -> Optional[Expression]: with suppress(TypeError): if issubclass(spec.origin_type, GenericSerializableType): type_arg_names = ", ".join( list(map(type_name, get_args(spec.type))) ) field_type = spec.builder.get_type_name_identifier( spec.origin_type ) return ( f"{field_type}._deserialize({spec.expression}, " f"[{type_arg_names}])" ) @register def unpack_dataclass(spec: ValueSpec) -> Optional[Expression]: if is_dataclass(spec.origin_type): for annotation in spec.annotations: if isinstance(annotation, Discriminator): return DiscriminatedUnionUnpackerBuilder(annotation).build( spec ) type_args = get_args(spec.type) method_name = spec.builder.get_unpack_method_name( type_args, spec.builder.format_name ) method_loc = spec.origin_type if spec.builder.is_nailed else spec.attrs if get_class_that_defines_method( method_name, method_loc ) != method_loc and ( spec.origin_type != spec.builder.cls or spec.builder.get_unpack_method_name( type_args=type_args, format_name=spec.builder.format_name, decoder=spec.builder.decoder, ) != method_name ): builder = spec.builder.__class__( spec.origin_type, type_args, dialect=spec.builder.dialect, format_name=spec.builder.format_name, default_dialect=spec.builder.default_dialect, attrs=method_loc, attrs_registry=( spec.attrs_registry if not spec.builder.is_nailed else None ), ) builder.add_unpack_method() method_args = ", ".join( filter( None, ( spec.expression, spec.builder.get_unpack_method_flags(spec.type), ), ) ) cls_alias = clean_id(type_name(spec.origin_type)) if spec.builder.is_nailed: spec.builder.ensure_object_imported(spec.origin_type, cls_alias) return f"{cls_alias}.{method_name}({method_args})" else: method_name_alias = f"{cls_alias}_{method_name}" spec.builder.ensure_object_imported( getattr(spec.attrs, method_name), method_name_alias, ) return f"{method_name_alias}({method_args})" @register def unpack_final(spec: ValueSpec) -> Optional[Expression]: if is_final(spec.type): return UnpackerRegistry.get(spec.copy(type=get_args(spec.type)[0])) @register def unpack_any(spec: ValueSpec) -> Optional[Expression]: if spec.type is Any: return spec.expression @register def unpack_special_typing_primitive(spec: ValueSpec) -> Optional[Expression]: if is_special_typing_primitive(spec.origin_type): if is_union(spec.type): resolved_type_params = spec.builder.get_field_resolved_type_params( spec.field_ctx.name ) if is_optional(spec.type, resolved_type_params): arg = not_none_type_arg( get_args(spec.type), resolved_type_params ) uv = UnpackerRegistry.get(spec.copy(type=arg)) return expr_or_maybe_none(spec, uv) else: union_args = get_args(spec.type) for annotation in spec.annotations: if isinstance(annotation, Discriminator): return DiscriminatedUnionUnpackerBuilder( annotation, union_args ).build(spec) return UnionUnpackerBuilder(union_args).build(spec) elif spec.origin_type is typing.AnyStr: raise UnserializableDataError( "AnyStr is not supported by mashumaro" ) elif is_type_var_any(spec.type): return spec.expression elif is_type_var(spec.type): constraints = getattr(spec.type, "__constraints__") if constraints: return TypeVarUnpackerBuilder(constraints).build(spec) else: if type_var_has_default(spec.type): bound = get_type_var_default(spec.type) else: bound = getattr(spec.type, "__bound__") # act as if it was Optional[bound] uv = UnpackerRegistry.get(spec.copy(type=bound)) return expr_or_maybe_none(spec, uv) elif is_new_type(spec.type): return UnpackerRegistry.get( spec.copy(type=spec.type.__supertype__) ) elif is_literal(spec.type): return LiteralUnpackerBuilder().build(spec) elif spec.type is typing_extensions.LiteralString: return UnpackerRegistry.get(spec.copy(type=str)) elif is_self(spec.type): method_name = spec.builder.get_unpack_method_name( format_name=spec.builder.format_name ) method_loc = ( spec.builder.cls if spec.builder.is_nailed else spec.attrs ) if ( get_class_that_defines_method(method_name, method_loc) != method_loc # not hasattr(spec.builder.cls, method_name) and spec.builder.get_unpack_method_name( format_name=spec.builder.format_name, decoder=spec.builder.decoder, ) != method_name ): builder = spec.builder.__class__( spec.builder.cls, dialect=spec.builder.dialect, format_name=spec.builder.format_name, default_dialect=spec.builder.default_dialect, attrs=method_loc, attrs_registry=( spec.attrs_registry if not spec.builder.is_nailed else None ), ) builder.add_unpack_method() method_args = ", ".join( filter( None, ( spec.expression, spec.builder.get_unpack_method_flags(spec.builder.cls), ), ) ) if spec.builder.is_nailed: spec.builder.add_type_modules(spec.builder.cls) self_cls_name = spec.builder.get_type_name_identifier( spec.builder.cls ) return f"{self_cls_name}.{method_name}({method_args})" else: return f"_cls.{method_name}({method_args})" elif is_required(spec.type) or is_not_required(spec.type): return UnpackerRegistry.get(spec.copy(type=get_args(spec.type)[0])) elif is_unpack(spec.type): unpacker = UnpackerRegistry.get( spec.copy(type=get_args(spec.type)[0]) ) return f"*{unpacker}" elif is_type_var_tuple(spec.type): return UnpackerRegistry.get(spec.copy(type=Tuple[Any, ...])) elif isinstance(spec.type, ForwardRef): evaluated = spec.builder.evaluate_forward_ref( spec.type, spec.owner ) if evaluated is not None: return UnpackerRegistry.get(spec.copy(type=evaluated)) elif is_type_alias_type(spec.type): return UnpackerRegistry.get(spec.copy(type=spec.type.__value__)) raise UnserializableDataError( f"{spec.type} as a field type is not supported by mashumaro" ) @register def unpack_number(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in (int, float): return f"{type_name(spec.origin_type)}({spec.expression})" @register def unpack_bool_and_none(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in (bool, NoneType, None): return spec.expression @register def unpack_date_objects(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in (datetime.datetime, datetime.date, datetime.time): deserialize_option = get_overridden_deserialization_method(spec) if deserialize_option is not None: if deserialize_option == "ciso8601": if ciso8601: spec.builder.ensure_module_imported(ciso8601) datetime_parser = "ciso8601.parse_datetime" else: raise ThirdPartyModuleNotFoundError( "ciso8601", spec.field_ctx.name, spec.builder.cls ) # pragma: no cover elif deserialize_option == "pendulum": if pendulum: spec.builder.ensure_module_imported(pendulum) datetime_parser = "pendulum.parse" else: raise ThirdPartyModuleNotFoundError( "pendulum", spec.field_ctx.name, spec.builder.cls ) # pragma: no cover else: raise UnsupportedDeserializationEngine( spec.field_ctx.name, spec.type, spec.builder.cls, deserialize_option, ) suffix = "" if spec.origin_type is datetime.date: suffix = ".date()" elif spec.origin_type is datetime.time: suffix = ".time()" return f"{datetime_parser}({spec.expression}){suffix}" method = f"__datetime_{spec.origin_type.__name__}_fromisoformat" spec.builder.ensure_object_imported( getattr(datetime, spec.origin_type.__name__).fromisoformat, method, ) return f"{method}({spec.expression})" @register def unpack_timedelta(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is datetime.timedelta: method = "__datetime_timedelta" spec.builder.ensure_object_imported(datetime.timedelta, method) return f"{method}(seconds={spec.expression})" @register def unpack_timezone(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is datetime.timezone: spec.builder.ensure_object_imported(parse_timezone) return f"parse_timezone({spec.expression})" @register def unpack_zone_info(spec: ValueSpec) -> Optional[Expression]: if PY_39_MIN and spec.origin_type is zoneinfo.ZoneInfo: method = "__zoneinfo_ZoneInfo" spec.builder.ensure_object_imported(zoneinfo.ZoneInfo, method) return f"{method}({spec.expression})" @register def unpack_uuid(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is uuid.UUID: method = "__uuid_UUID" spec.builder.ensure_object_imported(uuid.UUID, method) return f"{method}({spec.expression})" @register def unpack_ipaddress(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type in ( ipaddress.IPv4Address, ipaddress.IPv6Address, ipaddress.IPv4Network, ipaddress.IPv6Network, ipaddress.IPv4Interface, ipaddress.IPv6Interface, ): method = f"__ipaddress_{spec.origin_type.__name__}" spec.builder.ensure_object_imported(spec.origin_type, method) return f"{method}({spec.expression})" @register def unpack_decimal(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is Decimal: spec.builder.ensure_object_imported(Decimal) return f"Decimal({spec.expression})" @register def unpack_fraction(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is Fraction: spec.builder.ensure_object_imported(Fraction) return f"Fraction({spec.expression})" def unpack_tuple(spec: ValueSpec, args: Tuple[Type, ...]) -> Expression: if not args: if spec.type in (Tuple, tuple): args = [typing.Any, ...] # type: ignore else: return "()" elif len(args) == 1 and args[0] == (): if not PY_311_MIN: return "()" if len(args) == 2 and args[1] is Ellipsis: unpacker = UnpackerRegistry.get( spec.copy(type=args[0], expression="value", could_be_none=True) ) return f"tuple([{unpacker} for value in {spec.expression}])" else: arg_indexes: List[Union[int, Tuple[int, Union[int, None]]]] = [] unpack_idx: Optional[int] = None for arg_idx, type_arg in enumerate(args): if is_unpack(type_arg): if unpack_idx is not None: raise TypeError( "Multiple unpacks are disallowed within a single type " f"parameter list for {type_name(spec.type)}" ) unpack_idx = arg_idx if len(args) == 1: arg_indexes.append((arg_idx, None)) elif arg_idx < len(args) - 1: arg_indexes.append((arg_idx, arg_idx + 1 - len(args))) else: arg_indexes.append((arg_idx, None)) else: if unpack_idx is None: arg_indexes.append(arg_idx) else: arg_indexes.append(arg_idx - len(args)) unpackers: List[Expression] = [] for _idx, _arg_idx in enumerate(arg_indexes): if isinstance(_arg_idx, tuple): u_expr = f"{spec.expression}[{_arg_idx[0]}:{_arg_idx[1]}]" else: u_expr = f"{spec.expression}[{_arg_idx}]" unpacker = UnpackerRegistry.get( spec.copy( type=args[_idx], expression=u_expr, could_be_none=True, ) ) if unpacker != "*()": # workaround for empty tuples unpackers.append(unpacker) return f"tuple([{', '.join(unpackers)}])" def unpack_named_tuple(spec: ValueSpec) -> Expression: resolved = resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ] annotations = { k: resolved.get(v, v) for k, v in getattr(spec.origin_type, "__annotations__", {}).items() } fields = getattr(spec.type, "_fields", ()) defaults = getattr(spec.type, "_field_defaults", {}) unpackers = [] as_dict = spec.builder.get_dialect_or_config_option( "namedtuple_as_dict", False ) deserialize_option = get_overridden_deserialization_method(spec) if deserialize_option is not None: if deserialize_option == "as_dict": as_dict = True elif deserialize_option == "as_list": as_dict = False else: raise UnsupportedDeserializationEngine( field_name=spec.field_ctx.name, field_type=spec.type, holder_class=spec.builder.cls, engine=deserialize_option, ) field_indices: Iterable[Any] if as_dict: field_indices = zip((f"'{name}'" for name in fields), fields) else: field_indices = enumerate(fields) if not defaults: packed_value = spec.expression else: packed_value = "value" for idx, field in field_indices: unpacker = UnpackerRegistry.get( spec.copy( type=annotations.get(field, Any), expression=f"{packed_value}[{idx}]", could_be_none=True, ) ) unpackers.append(unpacker) if not defaults: field_type = spec.builder.get_type_name_identifier(spec.type) return f"{field_type}({', '.join(unpackers)})" lines = CodeLines() method_name = ( f"__unpack_named_tuple_{spec.builder.cls.__name__}_" f"{spec.field_ctx.name}__{random_hex()}" ) default_kwargs = spec.builder.get_unpack_method_default_flag_values() if spec.builder.is_nailed: lines.append("@classmethod") method_args = "cls, value" else: method_args = "value" if default_kwargs: lines.append(f"def {method_name}({method_args}, {default_kwargs}):") else: # pragma: no cover # we shouldn't be here because there will be default_kwargs lines.append(f"def {method_name}({method_args}):") with lines.indent(): lines.append("fields = []") with lines.indent("try:"): for unpacker in unpackers: lines.append(f"fields.append({unpacker})") with lines.indent("except IndexError:"): lines.append("pass") field_type = spec.builder.get_type_name_identifier(spec.type) lines.append(f"return {field_type}(*fields)") lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) method_args = ", ".join( filter(None, (spec.expression, spec.builder.get_unpack_method_flags())) ) return f"{spec.cls_attrs_name}.{method_name}({method_args})" def unpack_typed_dict(spec: ValueSpec) -> Expression: resolved = resolve_type_params(spec.origin_type, get_args(spec.type))[ spec.origin_type ] annotations = { k: resolved.get(v, v) for k, v in spec.origin_type.__annotations__.items() } all_keys = list(annotations.keys()) required_keys = getattr(spec.type, "__required_keys__", all_keys) optional_keys = getattr(spec.type, "__optional_keys__", []) lines = CodeLines() method_name = ( f"__unpack_typed_dict_{spec.builder.cls.__name__}_" f"{spec.field_ctx.name}__{random_hex()}" ) default_kwargs = spec.builder.get_unpack_method_default_flag_values() if spec.builder.is_nailed: lines.append("@classmethod") method_args = "cls, value" else: method_args = "value" if default_kwargs: lines.append(f"def {method_name}({method_args}, {default_kwargs}):") else: # pragma: no cover # we shouldn't be here because there will be default_kwargs lines.append(f"def {method_name}({method_args}):") with lines.indent(): lines.append("d = {}") for key in sorted(required_keys, key=all_keys.index): unpacker = UnpackerRegistry.get( spec.copy( type=annotations[key], expression=f"value['{key}']", could_be_none=True, owner=spec.type, ) ) lines.append(f"d['{key}'] = {unpacker}") for key in sorted(optional_keys, key=all_keys.index): lines.append(f"key_value = value.get('{key}', MISSING)") with lines.indent("if key_value is not MISSING:"): unpacker = UnpackerRegistry.get( spec.copy( type=annotations[key], expression="key_value", could_be_none=True, owner=spec.type, ) ) lines.append(f"d['{key}'] = {unpacker}") lines.append("return d") lines.append( f"setattr({spec.cls_attrs_name}, '{method_name}', {method_name})" ) if spec.builder.get_config().debug: print(f"{type_name(spec.builder.cls)}:") print(lines.as_text()) exec(lines.as_text(), spec.builder.globals, spec.builder.__dict__) method_args = ", ".join( filter(None, (spec.expression, spec.builder.get_unpack_method_flags())) ) return f"{spec.cls_attrs_name}.{method_name}({method_args})" @register def unpack_collection(spec: ValueSpec) -> Optional[Expression]: if not issubclass(spec.origin_type, typing.Collection): return None elif issubclass(spec.origin_type, enum.Enum): return None args = get_args(spec.type) def inner_expr( arg_num: int = 0, v_name: str = "value", v_type: Optional[Type] = None ) -> Expression: if v_type: return UnpackerRegistry.get( spec.copy(type=v_type, expression=v_name) ) else: if args and len(args) > arg_num: type_arg: Any = args[arg_num] else: type_arg = Any return UnpackerRegistry.get( spec.copy( type=type_arg, expression=v_name, could_be_none=True, field_ctx=spec.field_ctx.copy(metadata={}), ) ) if issubclass(spec.origin_type, typing.ByteString): # type: ignore if spec.origin_type is bytes: spec.builder.ensure_object_imported(decodebytes) return f"decodebytes({spec.expression}.encode())" elif spec.origin_type is bytearray: spec.builder.ensure_object_imported(decodebytes) return f"bytearray(decodebytes({spec.expression}.encode()))" elif issubclass(spec.origin_type, str): return spec.expression elif ensure_generic_collection_subclass(spec, List): return f"[{inner_expr()} for value in {spec.expression}]" elif ensure_generic_collection_subclass(spec, typing.Deque): spec.builder.ensure_module_imported(collections) return ( f"collections.deque([{inner_expr()} " f"for value in {spec.expression}])" ) elif issubclass(spec.origin_type, Tuple): # type: ignore if is_named_tuple(spec.origin_type): return unpack_named_tuple(spec) elif ensure_generic_collection(spec): return unpack_tuple(spec, args) elif ensure_generic_collection_subclass(spec, typing.FrozenSet): return f"frozenset([{inner_expr()} for value in {spec.expression}])" elif ensure_generic_collection_subclass(spec, typing.AbstractSet): return f"set([{inner_expr()} for value in {spec.expression}])" elif ensure_generic_mapping(spec, args, typing.ChainMap): spec.builder.ensure_module_imported(collections) return ( f'collections.ChainMap(*[{{{inner_expr(0, "key")}:{inner_expr(1)} ' f"for key, value in m.items()}} for m in {spec.expression}])" ) elif ensure_generic_mapping(spec, args, typing.OrderedDict): spec.builder.ensure_module_imported(collections) return ( f'collections.OrderedDict({{{inner_expr(0, "key")}: ' f"{inner_expr(1)} for key, value in {spec.expression}.items()}})" ) elif ensure_generic_mapping(spec, args, typing.DefaultDict): spec.builder.ensure_module_imported(collections) default_type = type_name(args[1] if args else None) return ( f"collections.defaultdict({default_type}, " f"{{{inner_expr(0, 'key')}: " f"{inner_expr(1)} for key, value in {spec.expression}.items()}})" ) elif ensure_generic_mapping(spec, args, typing.Counter): spec.builder.ensure_module_imported(collections) return ( f'collections.Counter({{{inner_expr(0, "key")}: ' f"{inner_expr(1, v_type=int)} " f"for key, value in {spec.expression}.items()}})" ) elif is_typed_dict(spec.origin_type): return unpack_typed_dict(spec) elif issubclass(spec.origin_type, types.MappingProxyType): spec.builder.ensure_module_imported(types) return ( f'types.MappingProxyType({{{inner_expr(0, "key")}: {inner_expr(1)}' f" for key, value in {spec.expression}.items()}})" ) elif ensure_generic_mapping(spec, args, typing.Mapping): return ( f'{{{inner_expr(0, "key")}: {inner_expr(1)} ' f"for key, value in {spec.expression}.items()}}" ) elif ensure_generic_collection_subclass(spec, typing.Sequence): return f"[{inner_expr()} for value in {spec.expression}]" @register def unpack_pathlike(spec: ValueSpec) -> Optional[Expression]: if spec.origin_type is os.PathLike: spec.builder.ensure_module_imported(pathlib) return f"{type_name(pathlib.PurePath)}({spec.expression})" elif issubclass(spec.origin_type, os.PathLike): field_type = spec.builder.get_type_name_identifier(spec.origin_type) return f"{field_type}({spec.expression})" @register def unpack_enum(spec: ValueSpec) -> Optional[Expression]: if issubclass(spec.origin_type, enum.Enum): field_type = spec.builder.get_type_name_identifier(spec.origin_type) return f"{field_type}({spec.expression})" mashumaro-3.13.1/mashumaro/dialect.py000066400000000000000000000044471463331001200175500ustar00rootroot00000000000000from types import new_class from typing import Any, Callable, Dict, Sequence, Type, Union, cast from typing_extensions import Literal from mashumaro.core.const import Sentinel from mashumaro.types import SerializationStrategy __all__ = ["Dialect"] SerializationStrategyValueType = Union[ SerializationStrategy, Dict[str, Union[str, Callable]] ] class Dialect: serialization_strategy: Dict[Any, SerializationStrategyValueType] = {} serialize_by_alias: Union[bool, Literal[Sentinel.MISSING]] = ( Sentinel.MISSING ) namedtuple_as_dict: Union[bool, Literal[Sentinel.MISSING]] = ( Sentinel.MISSING ) omit_none: Union[bool, Literal[Sentinel.MISSING]] = Sentinel.MISSING omit_default: Union[bool, Literal[Sentinel.MISSING]] = Sentinel.MISSING no_copy_collections: Union[Sequence[Any], Literal[Sentinel.MISSING]] = ( Sentinel.MISSING ) @classmethod def merge(cls, other: Type["Dialect"]) -> Type["Dialect"]: serialization_strategy: Dict[Any, SerializationStrategyValueType] = {} for key, value in cls.serialization_strategy.items(): if isinstance(value, SerializationStrategy): serialization_strategy[key] = value else: serialization_strategy[key] = value.copy() for key, value in other.serialization_strategy.items(): if isinstance(value, SerializationStrategy): serialization_strategy[key] = value elif isinstance( serialization_strategy.get(key), SerializationStrategy ): serialization_strategy[key] = value else: ( serialization_strategy.setdefault( key, {} ).update( # type: ignore value ) ) new_dialect = cast(Type[Dialect], new_class("Dialect", (Dialect,))) new_dialect.serialization_strategy = serialization_strategy for key in ("omit_none", "omit_default", "no_copy_collections"): if (others_value := getattr(other, key)) is not Sentinel.MISSING: setattr(new_dialect, key, others_value) else: setattr(new_dialect, key, getattr(cls, key)) return new_dialect mashumaro-3.13.1/mashumaro/exceptions.py000066400000000000000000000136121463331001200203160ustar00rootroot00000000000000from typing import Any, Optional, Set, Type from mashumaro.core.meta.helpers import type_name class MissingField(LookupError): def __init__(self, field_name: str, field_type: Type, holder_class: Type): self.field_name = field_name self.field_type = field_type self.holder_class = holder_class @property def field_type_name(self) -> str: return type_name(self.field_type, short=True) @property def holder_class_name(self) -> str: return type_name(self.holder_class, short=True) def __str__(self) -> str: return ( f'Field "{self.field_name}" of type {self.field_type_name}' f" is missing in {self.holder_class_name} instance" ) class ExtraKeysError(ValueError): def __init__(self, extra_keys: Set[str], target_type: Type): self.extra_keys = extra_keys self.target_type = target_type @property def target_class_name(self) -> str: return type_name(self.target_type, short=True) def __str__(self) -> str: extra_keys_str = ", ".join(k for k in self.extra_keys) return ( "Serialized dict has keys that are not defined in " f"{self.target_class_name}: {extra_keys_str}" ) class UnserializableDataError(TypeError): pass class UnserializableField(UnserializableDataError): def __init__( self, field_name: str, field_type: Type, holder_class: Type, msg: Optional[str] = None, ): self.field_name = field_name self.field_type = field_type self.holder_class = holder_class self.msg = msg @property def field_type_name(self) -> str: return type_name(self.field_type, short=True) @property def holder_class_name(self) -> str: return type_name(self.holder_class, short=True) def __str__(self) -> str: s = ( f'Field "{self.field_name}" of type {self.field_type_name} ' f"in {self.holder_class_name} is not serializable" ) if self.msg: s += f": {self.msg}" return s class UnsupportedSerializationEngine(UnserializableField): def __init__( self, field_name: str, field_type: Type, holder_class: Type, engine: Any, ): super(UnsupportedSerializationEngine, self).__init__( field_name, field_type, holder_class, msg=f'Unsupported serialization engine "{engine}"', ) class UnsupportedDeserializationEngine(UnserializableField): def __init__( self, field_name: str, field_type: Type, holder_class: Type, engine: Any, ): super(UnsupportedDeserializationEngine, self).__init__( field_name, field_type, holder_class, msg=f'Unsupported deserialization engine "{engine}"', ) class InvalidFieldValue(ValueError): def __init__( self, field_name: str, field_type: Type, field_value: Any, holder_class: Type, msg: Optional[str] = None, ): self.field_name = field_name self.field_type = field_type self.field_value = field_value self.holder_class = holder_class self.msg = msg @property def field_type_name(self) -> str: return type_name(self.field_type, short=True) @property def holder_class_name(self) -> str: return type_name(self.holder_class, short=True) def __str__(self) -> str: s = ( f'Field "{self.field_name}" of type {self.field_type_name} ' f"in {self.holder_class_name} has invalid value " f"{repr(self.field_value)}" ) if self.msg: s += f": {self.msg}" return s class MissingDiscriminatorError(LookupError): def __init__(self, field_name: str): self.field_name = field_name def __str__(self) -> str: return f"Discriminator '{self.field_name}' is missing" class SuitableVariantNotFoundError(ValueError): def __init__( self, variants_type: Type, discriminator_name: Optional[str] = None, discriminator_value: Any = None, ): self.variants_type = variants_type self.discriminator_name = discriminator_name self.discriminator_value = discriminator_value def __str__(self) -> str: s = f"{type_name(self.variants_type)} has no " if self.discriminator_value is not None: s += ( f"subtype with attribute '{self.discriminator_name}' " f"equal to {self.discriminator_value!r}" ) else: s += "suitable subtype" return s class BadHookSignature(TypeError): pass class ThirdPartyModuleNotFoundError(ModuleNotFoundError): def __init__(self, module_name: str, field_name: str, holder_class: Type): self.module_name = module_name self.field_name = field_name self.holder_class = holder_class @property def holder_class_name(self) -> str: return type_name(self.holder_class, short=True) def __str__(self) -> str: s = ( f'Install "{self.module_name}" to use it as the serialization ' f'method for the field "{self.field_name}" ' f"in {self.holder_class_name}" ) return s class UnresolvedTypeReferenceError(NameError): def __init__(self, holder_class: Type, unresolved_type_name: str): self.holder_class = holder_class self.name = unresolved_type_name @property def holder_class_name(self) -> str: return type_name(self.holder_class, short=True) def __str__(self) -> str: return ( f"Class {self.holder_class_name} has unresolved type reference " f"{self.name} in some of its fields" ) class BadDialect(ValueError): pass mashumaro-3.13.1/mashumaro/helper.py000066400000000000000000000027001463331001200174100ustar00rootroot00000000000000from typing import Any, Callable, Dict, Optional, TypeVar, Union from typing_extensions import Literal from mashumaro.types import SerializationStrategy __all__ = [ "field_options", "pass_through", ] NamedTupleDeserializationEngine = Literal["as_dict", "as_list"] DateTimeDeserializationEngine = Literal["ciso8601", "pendulum"] AnyDeserializationEngine = Literal[ NamedTupleDeserializationEngine, DateTimeDeserializationEngine ] NamedTupleSerializationEngine = Literal["as_dict", "as_list"] OmitSerializationEngine = Literal["omit"] AnySerializationEngine = Union[ NamedTupleSerializationEngine, OmitSerializationEngine ] T = TypeVar("T") def field_options( serialize: Optional[ Union[AnySerializationEngine, Callable[[Any], Any]] ] = None, deserialize: Optional[ Union[AnyDeserializationEngine, Callable[[Any], Any]] ] = None, serialization_strategy: Optional[SerializationStrategy] = None, alias: Optional[str] = None, ) -> Dict[str, Any]: return { "serialize": serialize, "deserialize": deserialize, "serialization_strategy": serialization_strategy, "alias": alias, } class _PassThrough(SerializationStrategy): def __call__(self, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError def serialize(self, value: T) -> T: return value def deserialize(self, value: T) -> T: return value pass_through = _PassThrough() mashumaro-3.13.1/mashumaro/jsonschema/000077500000000000000000000000001463331001200177125ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/jsonschema/__init__.py000066400000000000000000000003261463331001200220240ustar00rootroot00000000000000from .builder import JSONSchemaBuilder, build_json_schema from .dialects import DRAFT_2020_12, OPEN_API_3_1 __all__ = [ "JSONSchemaBuilder", "build_json_schema", "DRAFT_2020_12", "OPEN_API_3_1", ] mashumaro-3.13.1/mashumaro/jsonschema/annotations.py000066400000000000000000000041471463331001200226270ustar00rootroot00000000000000from dataclasses import dataclass from typing import Dict, Set from mashumaro.jsonschema.models import JSONSchema, Number class Annotation: pass class Constraint(Annotation): pass class NumberConstraint(Constraint): pass @dataclass(unsafe_hash=True) class Minimum(NumberConstraint): value: Number @dataclass(unsafe_hash=True) class Maximum(NumberConstraint): value: Number @dataclass(unsafe_hash=True) class ExclusiveMinimum(NumberConstraint): value: Number @dataclass(unsafe_hash=True) class ExclusiveMaximum(NumberConstraint): value: Number @dataclass(unsafe_hash=True) class MultipleOf(NumberConstraint): value: Number class StringConstraint(Constraint): pass @dataclass(unsafe_hash=True) class MinLength(StringConstraint): value: int @dataclass(unsafe_hash=True) class MaxLength(StringConstraint): value: int @dataclass(unsafe_hash=True) class Pattern(StringConstraint): value: str class ArrayConstraint(Constraint): pass @dataclass(unsafe_hash=True) class MinItems(ArrayConstraint): value: int @dataclass(unsafe_hash=True) class MaxItems(ArrayConstraint): value: int @dataclass(unsafe_hash=True) class UniqueItems(ArrayConstraint): value: bool @dataclass(unsafe_hash=True) class Contains(ArrayConstraint): value: JSONSchema @dataclass(unsafe_hash=True) class MinContains(ArrayConstraint): value: int @dataclass(unsafe_hash=True) class MaxContains(ArrayConstraint): value: int class ObjectConstraint(Constraint): pass @dataclass(unsafe_hash=True) class MaxProperties(ObjectConstraint): value: int @dataclass(unsafe_hash=True) class MinProperties(ObjectConstraint): value: int @dataclass class DependentRequired(ObjectConstraint): value: Dict[str, Set[str]] __all__ = [ "Annotation", "MultipleOf", "Maximum", "ExclusiveMaximum", "Minimum", "ExclusiveMinimum", "MaxLength", "MinLength", "Pattern", "MaxItems", "MinItems", "UniqueItems", "Contains", "MaxContains", "MinContains", "MaxProperties", "MinProperties", "DependentRequired", ] mashumaro-3.13.1/mashumaro/jsonschema/builder.py000066400000000000000000000054261463331001200217210ustar00rootroot00000000000000from dataclasses import dataclass from typing import Any, Dict, List, Optional, Type from mashumaro.jsonschema.dialects import DRAFT_2020_12, JSONSchemaDialect from mashumaro.jsonschema.models import Context, JSONSchema from mashumaro.jsonschema.schema import Instance, get_schema try: from mashumaro.mixins.orjson import ( DataClassORJSONMixin as DataClassJSONMixin, ) except ImportError: # pragma: no cover from mashumaro.mixins.json import DataClassJSONMixin # type: ignore def build_json_schema( instance_type: Type, context: Optional[Context] = None, with_definitions: bool = True, all_refs: Optional[bool] = None, with_dialect_uri: bool = False, dialect: Optional[JSONSchemaDialect] = None, ref_prefix: Optional[str] = None, ) -> JSONSchema: if context is None: context = Context() else: context = Context( dialect=context.dialect, definitions=context.definitions, all_refs=context.all_refs, ref_prefix=context.ref_prefix, ) if dialect is not None: context.dialect = dialect if all_refs is not None: context.all_refs = all_refs elif context.all_refs is None: context.all_refs = context.dialect.all_refs if ref_prefix is not None: context.ref_prefix = ref_prefix.rstrip("/") elif context.ref_prefix is None: context.ref_prefix = context.dialect.definitions_root_pointer instance = Instance(instance_type) schema = get_schema(instance, context, with_dialect_uri=with_dialect_uri) if with_definitions and context.definitions: schema.definitions = context.definitions return schema @dataclass class JSONSchemaDefinitions(DataClassJSONMixin): definitions: Dict[str, JSONSchema] def __post_serialize__( # type: ignore self, d: Dict[Any, Any] ) -> List[Dict[str, Any]]: return d["definitions"] class JSONSchemaBuilder: def __init__( self, dialect: JSONSchemaDialect = DRAFT_2020_12, all_refs: Optional[bool] = None, ref_prefix: Optional[str] = None, ): if all_refs is None: all_refs = dialect.all_refs if ref_prefix is None: ref_prefix = dialect.definitions_root_pointer self.context = Context( dialect=dialect, all_refs=all_refs, ref_prefix=ref_prefix.rstrip("/"), ) def build(self, instance_type: Type) -> JSONSchema: return build_json_schema( instance_type=instance_type, context=self.context, with_definitions=False, ) def get_definitions(self) -> JSONSchemaDefinitions: return JSONSchemaDefinitions(self.context.definitions) __all__ = ["JSONSchemaBuilder", "build_json_schema"] mashumaro-3.13.1/mashumaro/jsonschema/dialects.py000066400000000000000000000013531463331001200220560ustar00rootroot00000000000000from dataclasses import dataclass @dataclass(frozen=True) class JSONSchemaDialect: uri: str definitions_root_pointer: str all_refs: bool @dataclass(frozen=True) class JSONSchemaDraft202012Dialect(JSONSchemaDialect): uri: str = "https://json-schema.org/draft/2020-12/schema" definitions_root_pointer: str = "#/$defs" all_refs: bool = False @dataclass(frozen=True) class OpenAPISchema31Dialect(JSONSchemaDialect): uri: str = "https://spec.openapis.org/oas/3.1/dialect/base" definitions_root_pointer: str = "#/components/schemas" all_refs: bool = True DRAFT_2020_12 = JSONSchemaDraft202012Dialect() OPEN_API_3_1 = OpenAPISchema31Dialect() __all__ = ["JSONSchemaDialect", "DRAFT_2020_12", "OPEN_API_3_1"] mashumaro-3.13.1/mashumaro/jsonschema/models.py000066400000000000000000000133621463331001200215540ustar00rootroot00000000000000import datetime import ipaddress from dataclasses import MISSING, dataclass, field from enum import Enum from typing import Any, Dict, List, Optional, Set, Union from typing_extensions import TypeAlias from mashumaro.config import BaseConfig from mashumaro.helper import pass_through from mashumaro.jsonschema.dialects import DRAFT_2020_12, JSONSchemaDialect try: from mashumaro.mixins.orjson import ( DataClassORJSONMixin as DataClassJSONMixin, ) except ImportError: # pragma: no cover from mashumaro.mixins.json import DataClassJSONMixin # type: ignore # https://github.com/python/mypy/issues/3186 Number: TypeAlias = Union[int, float] Null = object() class JSONSchemaInstanceType(Enum): NULL = "null" BOOLEAN = "boolean" OBJECT = "object" ARRAY = "array" NUMBER = "number" STRING = "string" INTEGER = "integer" class JSONSchemaInstanceFormat(Enum): pass class JSONSchemaStringFormat(JSONSchemaInstanceFormat): DATETIME = "date-time" DATE = "date" TIME = "time" DURATION = "duration" EMAIL = "email" IDN_EMAIL = "idn-email" HOSTNAME = "hostname" IDN_HOSTNAME = "idn-hostname" IPV4ADDRESS = "ipv4" IPV6ADDRESS = "ipv6" URI = "uri" URI_REFERENCE = "uri-reference" IRI = "iri" IRI_REFERENCE = "iri-reference" UUID = "uuid" URI_TEMPLATE = "uri-template" JSON_POINTER = "json-pointer" RELATIVE_JSON_POINTER = "relative-json-pointer" REGEX = "regex" class JSONSchemaInstanceFormatExtension(JSONSchemaInstanceFormat): TIMEDELTA = "time-delta" TIME_ZONE = "time-zone" IPV4NETWORK = "ipv4network" IPV6NETWORK = "ipv6network" IPV4INTERFACE = "ipv4interface" IPV6INTERFACE = "ipv6interface" DECIMAL = "decimal" FRACTION = "fraction" BASE64 = "base64" PATH = "path" DATETIME_FORMATS = { datetime.datetime: JSONSchemaStringFormat.DATETIME, datetime.date: JSONSchemaStringFormat.DATE, datetime.time: JSONSchemaStringFormat.TIME, } IPADDRESS_FORMATS = { ipaddress.IPv4Address: JSONSchemaStringFormat.IPV4ADDRESS, ipaddress.IPv6Address: JSONSchemaStringFormat.IPV6ADDRESS, ipaddress.IPv4Network: JSONSchemaInstanceFormatExtension.IPV4NETWORK, ipaddress.IPv6Network: JSONSchemaInstanceFormatExtension.IPV6NETWORK, ipaddress.IPv4Interface: JSONSchemaInstanceFormatExtension.IPV4INTERFACE, ipaddress.IPv6Interface: JSONSchemaInstanceFormatExtension.IPV6INTERFACE, } @dataclass(unsafe_hash=True) class JSONSchema(DataClassJSONMixin): # Common keywords schema: Optional[str] = None type: Optional[JSONSchemaInstanceType] = None enum: Optional[List[Any]] = None const: Optional[Any] = field(default_factory=lambda: MISSING) format: Optional[ Union[JSONSchemaStringFormat, JSONSchemaInstanceFormatExtension] ] = None title: Optional[str] = None description: Optional[str] = None anyOf: Optional[List["JSONSchema"]] = None reference: Optional[str] = None definitions: Optional[Dict[str, "JSONSchema"]] = None default: Optional[Any] = field(default_factory=lambda: MISSING) deprecated: Optional[bool] = None examples: Optional[List[Any]] = None # Keywords for Objects properties: Optional[Dict[str, "JSONSchema"]] = None patternProperties: Optional[Dict[str, "JSONSchema"]] = None additionalProperties: Union["JSONSchema", bool, None] = None propertyNames: Optional["JSONSchema"] = None # Keywords for Arrays prefixItems: Optional[List["JSONSchema"]] = None items: Optional["JSONSchema"] = None contains: Optional["JSONSchema"] = None # Validation keywords for numeric instances multipleOf: Optional[Number] = None maximum: Optional[Number] = None exclusiveMaximum: Optional[Number] = None minimum: Optional[Number] = None exclusiveMinimum: Optional[Number] = None # Validation keywords for Strings maxLength: Optional[int] = None minLength: Optional[int] = None pattern: Optional[str] = None # Validation keywords for Arrays maxItems: Optional[int] = None minItems: Optional[int] = None uniqueItems: Optional[bool] = None maxContains: Optional[int] = None minContains: Optional[int] = None # Validation keywords for Objects maxProperties: Optional[int] = None minProperties: Optional[int] = None required: Optional[List[str]] = None dependentRequired: Optional[Dict[str, Set[str]]] = None class Config(BaseConfig): omit_none = True serialize_by_alias = True aliases = { "schema": "$schema", "reference": "$ref", "definitions": "$defs", } serialization_strategy = { int: pass_through, float: pass_through, Null: pass_through, } def __pre_serialize__(self) -> "JSONSchema": if self.const is None: self.const = Null if self.default is None: self.default = Null return self def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: const = d.get("const") if const is MISSING: d.pop("const") elif const is Null: d["const"] = None default = d.get("default") if default is MISSING: d.pop("default") elif default is Null: d["default"] = None return d @dataclass class JSONObjectSchema(JSONSchema): type: JSONSchemaInstanceType = JSONSchemaInstanceType.OBJECT @dataclass class JSONArraySchema(JSONSchema): type: JSONSchemaInstanceType = JSONSchemaInstanceType.ARRAY @dataclass class Context: dialect: JSONSchemaDialect = DRAFT_2020_12 definitions: Dict[str, JSONSchema] = field(default_factory=dict) all_refs: Optional[bool] = None ref_prefix: Optional[str] = None mashumaro-3.13.1/mashumaro/jsonschema/schema.py000066400000000000000000000700411463331001200215260ustar00rootroot00000000000000import datetime import ipaddress import os import typing import warnings from base64 import encodebytes from dataclasses import MISSING, dataclass, field, is_dataclass, replace from decimal import Decimal from enum import Enum from fractions import Fraction from functools import cached_property from typing import ( Any, Callable, Dict, ForwardRef, Iterable, List, Optional, Tuple, Type, Union, ) from uuid import UUID from typing_extensions import TypeAlias from mashumaro.config import BaseConfig from mashumaro.core.const import PY_39_MIN, PY_311_MIN from mashumaro.core.meta.code.builder import CodeBuilder from mashumaro.core.meta.helpers import ( evaluate_forward_ref, get_args, get_forward_ref_referencing_globals, get_function_return_annotation, get_literal_values, get_type_origin, is_annotated, is_generic, is_literal, is_named_tuple, is_new_type, is_not_required, is_required, is_special_typing_primitive, is_type_var, is_type_var_any, is_type_var_tuple, is_typed_dict, is_union, is_unpack, resolve_type_params, type_name, ) from mashumaro.core.meta.types.common import NoneType from mashumaro.helper import pass_through from mashumaro.jsonschema.annotations import ( Annotation, Contains, DependentRequired, ExclusiveMaximum, ExclusiveMinimum, MaxContains, Maximum, MaxItems, MaxLength, MaxProperties, MinContains, Minimum, MinItems, MinLength, MinProperties, MultipleOf, Pattern, UniqueItems, ) from mashumaro.jsonschema.models import ( DATETIME_FORMATS, IPADDRESS_FORMATS, Context, JSONArraySchema, JSONObjectSchema, JSONSchema, JSONSchemaInstanceFormatExtension, JSONSchemaInstanceType, JSONSchemaStringFormat, ) from mashumaro.types import SerializationStrategy if PY_39_MIN: from zoneinfo import ZoneInfo try: from mashumaro.mixins.orjson import ( DataClassORJSONMixin as DataClassJSONMixin, ) except ImportError: # pragma: no cover from mashumaro.mixins.json import DataClassJSONMixin # type: ignore UTC_OFFSET_PATTERN = r"^UTC([+-][0-2][0-9]:[0-5][0-9])?$" @dataclass class Instance: type: Type name: Optional[str] = None __owner_builder: Optional[CodeBuilder] = None __self_builder: Optional[CodeBuilder] = None origin_type: Type = field(init=False) annotations: List[Annotation] = field(init=False, default_factory=list) @cached_property def metadata(self) -> Dict[str, Any]: if self.name and self.__owner_builder: return dict(**self.__owner_builder.metadatas.get(self.name, {})) else: return {} @property def _self_builder(self) -> CodeBuilder: assert self.__self_builder return self.__self_builder @property def alias(self) -> Optional[str]: alias = self.metadata.get("alias") if alias is None: aliases_config = self.get_owner_config().aliases alias = aliases_config.get(self.name) # type: ignore if alias is None: alias = self.name return alias @property def owner_class(self) -> Optional[Type]: if self.__owner_builder: return self.__owner_builder.cls return None def derive(self, **changes: Any) -> "Instance": new_type = changes.get("type") if isinstance(new_type, ForwardRef): changes["type"] = evaluate_forward_ref( new_type, get_forward_ref_referencing_globals(new_type, self.type), self.__dict__, ) new_instance = replace(self, **changes) if is_dataclass(self.origin_type): new_instance.__owner_builder = self.__self_builder return new_instance def __post_init__(self) -> None: self.update_type(self.type) if is_annotated(self.type): self.annotations = getattr(self.type, "__metadata__", []) self.type = get_args(self.type)[0] self.origin_type = get_type_origin(self.type) def update_type(self, new_type: Type) -> None: if self.__owner_builder: self.type = self.__owner_builder.get_real_type( field_name=self.name, # type: ignore field_type=new_type, ) self.origin_type = get_type_origin(self.type) if is_dataclass(self.origin_type): type_args = get_args(self.type) self.__self_builder = CodeBuilder(self.origin_type, type_args) self.__self_builder.reset() else: self.__self_builder = None def fields(self) -> Iterable[Tuple[str, Type, bool, Any]]: for f_name, f_type in self._self_builder.get_field_types( include_extras=True ).items(): f = self._self_builder.dataclass_fields.get(f_name) if not f or f and not f.init: continue f_default = f.default if f_default is MISSING: f_default = self._self_builder.namespace.get(f_name, MISSING) if f_default is not MISSING: f_default = _default(f_type, f_default, self.get_self_config()) has_default = ( f.default is not MISSING or f.default_factory is not MISSING ) yield f_name, f_type, has_default, f_default def get_overridden_serialization_method( self, ) -> Optional[Union[Callable, str]]: if not self.__owner_builder: return None serialize_option = self.metadata.get("serialize") if serialize_option is not None: if callable(serialize_option): self.metadata.pop("serialize", None) # prevent recursion return serialize_option for strategy in self.__owner_builder.iter_serialization_strategies( self.metadata, self.type ): if strategy is pass_through: return pass_through elif isinstance(strategy, dict): serialize_option = strategy.get("serialize") elif isinstance(strategy, SerializationStrategy): serialize_option = strategy.serialize if serialize_option is not None: return serialize_option return None def get_owner_config(self) -> Type[BaseConfig]: if self.__owner_builder: return self.__owner_builder.get_config() else: return BaseConfig def get_owner_dialect_or_config_option( self, option: str, default: Any ) -> Any: if self.__owner_builder: return self.__owner_builder.get_dialect_or_config_option( option, default ) else: return default def get_self_config(self) -> Type[BaseConfig]: if self.__self_builder: return self.__self_builder.get_config() else: return BaseConfig InstanceSchemaCreator: TypeAlias = Callable[ [Instance, Context], Optional[JSONSchema] ] @dataclass class InstanceSchemaCreatorRegistry: _registry: List[InstanceSchemaCreator] = field(default_factory=list) def register(self, func: InstanceSchemaCreator) -> InstanceSchemaCreator: self._registry.append(func) return func def iter(self) -> Iterable[InstanceSchemaCreator]: yield from self._registry @dataclass class EmptyJSONSchema(JSONSchema): pass def get_schema( instance: Instance, ctx: Context, with_dialect_uri: bool = False ) -> JSONSchema: for schema_creator in Registry.iter(): schema = schema_creator(instance, ctx) if schema is not None: if with_dialect_uri: schema.schema = ctx.dialect.uri return schema raise NotImplementedError( f'Type {type_name(instance.type)} of field "{instance.name}" ' f"in {type_name(instance.owner_class)} isn't supported" ) def _get_schema_or_none( instance: Instance, ctx: Context ) -> Optional[JSONSchema]: schema = get_schema(instance, ctx) if isinstance(schema, EmptyJSONSchema): return None return schema def _default(f_type: Type, f_value: Any, config_cls: Type[BaseConfig]) -> Any: @dataclass class CC(DataClassJSONMixin): x: f_type = f_value # type: ignore class Config(config_cls): # type: ignore pass return CC(f_value).to_dict()["x"] Registry = InstanceSchemaCreatorRegistry() register = Registry.register BASIC_TYPES = {str, int, float, bool} @register def on_type_with_overridden_serialization( instance: Instance, ctx: Context ) -> Optional[JSONSchema]: def override_with_any(reason: Any) -> None: if instance.owner_class is not None: name = f"{type_name(instance.owner_class)}.{instance.name}" else: # pragma: no cover # we will have an owner class, but leave this here just in case name = type_name(instance.type) warnings.warn( f"Type Any will be used for {name} with " f"overridden serialization method: {reason}" ) instance.update_type(Any) # type: ignore[arg-type] overridden_method = instance.get_overridden_serialization_method() if overridden_method is pass_through: return None elif overridden_method in BASIC_TYPES: instance.update_type(overridden_method) # type: ignore elif callable(overridden_method): try: new_type = get_function_return_annotation(overridden_method) if new_type is instance.type: return None else: instance.update_type(new_type) except Exception as e: override_with_any(e) return get_schema(instance, ctx) @register def on_dataclass(instance: Instance, ctx: Context) -> Optional[JSONSchema]: # TODO: Self references might not work if is_dataclass(instance.origin_type): jsonschema_config = instance.get_self_config().json_schema schema = JSONObjectSchema( title=instance.origin_type.__name__, additionalProperties=jsonschema_config.get( "additionalProperties", False ), ) properties: Dict[str, JSONSchema] = {} required = [] field_schema_overrides = jsonschema_config.get("properties", {}) for f_name, f_type, has_default, f_default in instance.fields(): override = field_schema_overrides.get(f_name) f_instance = instance.derive(type=f_type, name=f_name) if override: f_schema = JSONSchema.from_dict(override) else: f_schema = get_schema(f_instance, ctx) if f_instance.alias: f_name = f_instance.alias if f_default is not MISSING: f_schema.default = f_default description = f_instance.metadata.get("description") if description: f_schema.description = description if not has_default: required.append(f_name) properties[f_name] = f_schema if properties: schema.properties = properties if required: schema.required = required if ctx.all_refs: ctx.definitions[instance.origin_type.__name__] = schema ref_prefix = ctx.ref_prefix or ctx.dialect.definitions_root_pointer return JSONSchema( reference=f"{ref_prefix}/{instance.origin_type.__name__}" ) else: return schema @register def on_any(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.type is Any: return EmptyJSONSchema() def on_literal(instance: Instance, ctx: Context) -> Optional[JSONSchema]: enum_values = [] for value in get_literal_values(instance.type): if isinstance(value, Enum): enum_values.append(value.value) elif isinstance(value, (int, str, bool, NoneType)): # type: ignore enum_values.append(value) elif isinstance(value, bytes): enum_values.append(encodebytes(value).decode()) if len(enum_values) == 1: return JSONSchema(const=enum_values[0]) else: return JSONSchema(enum=enum_values) @register def on_special_typing_primitive( instance: Instance, ctx: Context ) -> Optional[JSONSchema]: if not is_special_typing_primitive(instance.origin_type): return None args = get_args(instance.type) if is_union(instance.type): return JSONSchema( anyOf=[get_schema(instance.derive(type=arg), ctx) for arg in args] ) elif is_type_var_any(instance.type): return EmptyJSONSchema() elif is_type_var(instance.type): constraints = getattr(instance.type, "__constraints__") if constraints: return JSONSchema( anyOf=[ get_schema(instance.derive(type=arg), ctx) for arg in constraints ] ) else: bound = getattr(instance.type, "__bound__") return get_schema(instance.derive(type=bound), ctx) elif is_new_type(instance.type): return get_schema( instance.derive(type=instance.type.__supertype__), ctx ) elif is_literal(instance.type): return on_literal(instance, ctx) # elif is_self(instance.type): # raise NotImplementedError elif is_required(instance.type) or is_not_required(instance.type): return get_schema(instance.derive(type=args[0]), ctx) elif is_unpack(instance.type): return get_schema( instance.derive(type=get_args(instance.type)[0]), ctx ) elif is_type_var_tuple(instance.type): return get_schema(instance.derive(type=Tuple[Any, ...]), ctx) elif isinstance(instance.type, ForwardRef): evaluated = evaluate_forward_ref( instance.type, get_forward_ref_referencing_globals(instance.type), None, ) if evaluated is not None: return get_schema(instance.derive(type=evaluated), ctx) @register def on_number(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is int: schema = JSONSchema(type=JSONSchemaInstanceType.INTEGER) elif instance.origin_type is float: schema = JSONSchema(type=JSONSchemaInstanceType.NUMBER) else: return None for annotation in instance.annotations: if isinstance(annotation, Maximum): schema.maximum = annotation.value elif isinstance(annotation, Minimum): schema.minimum = annotation.value elif isinstance(annotation, ExclusiveMaximum): schema.exclusiveMaximum = annotation.value elif isinstance(annotation, ExclusiveMinimum): schema.exclusiveMinimum = annotation.value elif isinstance(annotation, MultipleOf): schema.multipleOf = annotation.value return schema @register def on_bool(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is bool: return JSONSchema(type=JSONSchemaInstanceType.BOOLEAN) @register def on_none(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type in (NoneType, None): return JSONSchema(type=JSONSchemaInstanceType.NULL) @register def on_date_objects(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type in ( datetime.datetime, datetime.date, datetime.time, ): return JSONSchema( type=JSONSchemaInstanceType.STRING, format=DATETIME_FORMATS[instance.origin_type], ) @register def on_timedelta(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is datetime.timedelta: return JSONSchema( type=JSONSchemaInstanceType.NUMBER, format=JSONSchemaInstanceFormatExtension.TIMEDELTA, ) @register def on_timezone(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is datetime.timezone: return JSONSchema( type=JSONSchemaInstanceType.STRING, pattern=UTC_OFFSET_PATTERN ) @register def on_zone_info(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if PY_39_MIN and instance.origin_type is ZoneInfo: return JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.TIME_ZONE, ) @register def on_uuid(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is UUID: return JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaStringFormat.UUID, ) @register def on_ipaddress(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type in ( ipaddress.IPv4Address, ipaddress.IPv6Address, ipaddress.IPv4Network, ipaddress.IPv6Network, ipaddress.IPv4Interface, ipaddress.IPv6Interface, ): return JSONSchema( type=JSONSchemaInstanceType.STRING, format=IPADDRESS_FORMATS[instance.origin_type], # type: ignore ) @register def on_decimal(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is Decimal: return JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.DECIMAL, ) @register def on_fraction(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if instance.origin_type is Fraction: return JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.FRACTION, ) def on_tuple(instance: Instance, ctx: Context) -> JSONArraySchema: args = get_args(instance.type) if not args: if instance.type in (Tuple, tuple): args = [typing.Any, ...] # type: ignore else: return JSONArraySchema(maxItems=0) elif len(args) == 1 and args[0] == (): if not PY_311_MIN: return JSONArraySchema(maxItems=0) if len(args) == 2 and args[1] is Ellipsis: items_schema = _get_schema_or_none(instance.derive(type=args[0]), ctx) return JSONArraySchema(items=items_schema) else: min_items = 0 max_items = 0 prefix_items = [] items: Optional[JSONSchema] = None unpack_schema: Optional[JSONSchema] = None unpack_idx = 0 for arg_idx, arg in enumerate(args, start=1): if not is_unpack(arg): min_items += 1 if not unpack_schema: prefix_items.append( get_schema(instance.derive(type=arg), ctx) ) else: unpack_schema = get_schema(instance.derive(type=arg), ctx) unpack_idx = arg_idx if unpack_schema: prefix_items.extend(unpack_schema.prefixItems or []) min_items += unpack_schema.minItems or 0 max_items += unpack_schema.maxItems or 0 if unpack_idx == len(args): items = unpack_schema.items else: min_items = len(args) max_items = len(args) return JSONArraySchema( prefixItems=prefix_items or None, items=items, minItems=min_items or None, maxItems=max_items or None, ) def on_named_tuple(instance: Instance, ctx: Context) -> JSONSchema: resolved = resolve_type_params( instance.origin_type, get_args(instance.type) )[instance.origin_type] annotations = { k: resolved.get(v, v) for k, v in getattr( instance.origin_type, "__annotations__", {} ).items() } fields = getattr(instance.type, "_fields", ()) defaults = getattr(instance.type, "_field_defaults", {}) as_dict = instance.get_owner_dialect_or_config_option( "namedtuple_as_dict", False ) serialize_option = instance.get_overridden_serialization_method() if serialize_option == "as_dict": as_dict = True elif serialize_option == "as_list": as_dict = False properties = {} for f_name in fields: f_type = annotations.get(f_name, typing.Any) f_schema = get_schema(instance.derive(type=f_type), ctx) f_default = defaults.get(f_name, MISSING) if f_default is not MISSING: if isinstance(f_schema, EmptyJSONSchema): f_schema = JSONSchema() f_schema.default = _default( f_type, f_default, instance.get_self_config() ) properties[f_name] = f_schema if as_dict: return JSONObjectSchema( properties=properties or None, required=list(fields), additionalProperties=False, ) else: return JSONArraySchema( prefixItems=list(properties.values()) or None, maxItems=len(properties) or None, minItems=len(properties) or None, ) def on_typed_dict(instance: Instance, ctx: Context) -> JSONObjectSchema: resolved = resolve_type_params( instance.origin_type, get_args(instance.type) )[instance.origin_type] annotations = { k: resolved.get(v, v) for k, v in instance.origin_type.__annotations__.items() } all_keys = list(annotations.keys()) required_keys = getattr(instance.type, "__required_keys__", all_keys) return JSONObjectSchema( properties={ key: get_schema(instance.derive(type=annotations[key]), ctx) for key in all_keys } or None, required=sorted(required_keys) or None, additionalProperties=False, ) def apply_array_constraints( instance: Instance, schema: JSONSchema, ) -> JSONSchema: has_contains = False min_contains: Optional[int] = None max_contains: Optional[int] = None for annotation in instance.annotations: if isinstance(annotation, MinItems): schema.minItems = annotation.value elif isinstance(annotation, MaxItems): schema.maxItems = annotation.value elif isinstance(annotation, UniqueItems): schema.uniqueItems = annotation.value elif isinstance(annotation, Contains): schema.contains = annotation.value has_contains = True elif isinstance(annotation, MinContains): min_contains = annotation.value elif isinstance(annotation, MaxContains): max_contains = annotation.value if has_contains: if min_contains is not None: schema.minContains = min_contains if max_contains is not None: schema.maxContains = max_contains return schema def apply_object_constraints( instance: Instance, schema: JSONSchema ) -> JSONSchema: for annotation in instance.annotations: if isinstance(annotation, MaxProperties): schema.maxProperties = annotation.value elif isinstance(annotation, MinProperties): schema.minProperties = annotation.value elif isinstance(annotation, DependentRequired): schema.dependentRequired = annotation.value return schema @register def on_collection(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if not issubclass(instance.origin_type, typing.Collection): return None elif issubclass(instance.origin_type, Enum): return None args = get_args(instance.type) if issubclass(instance.origin_type, typing.ByteString): # type: ignore return JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.BASE64, ) elif issubclass(instance.origin_type, str): schema = JSONSchema(type=JSONSchemaInstanceType.STRING) for annotation in instance.annotations: if isinstance(annotation, MinLength): schema.minLength = annotation.value elif isinstance(annotation, MaxLength): schema.maxLength = annotation.value elif isinstance(annotation, Pattern): schema.pattern = annotation.value return schema elif is_generic(instance.type) and issubclass( instance.origin_type, (List, typing.Deque) ): return apply_array_constraints( instance, JSONArraySchema( items=( _get_schema_or_none(instance.derive(type=args[0]), ctx) if args else None ) ), ) elif issubclass(instance.origin_type, Tuple): # type: ignore if is_named_tuple(instance.origin_type): return apply_array_constraints( instance, on_named_tuple(instance, ctx) ) elif is_generic(instance.type): return apply_array_constraints(instance, on_tuple(instance, ctx)) elif is_generic(instance.type) and issubclass( instance.origin_type, (typing.FrozenSet, typing.AbstractSet) ): return apply_array_constraints( instance, JSONArraySchema( items=( _get_schema_or_none(instance.derive(type=args[0]), ctx) if args else None ), uniqueItems=True, ), ) elif is_generic(instance.type) and issubclass( instance.origin_type, typing.ChainMap ): return apply_array_constraints( instance, JSONArraySchema( items=get_schema( instance=instance.derive( type=( Dict[args[0], args[1]] # type: ignore if args else Dict ) ), ctx=ctx, ) ), ) elif is_generic(instance.type) and issubclass( instance.origin_type, typing.Counter ): schema = JSONObjectSchema( additionalProperties=get_schema(instance.derive(type=int), ctx), ) if args: schema.propertyNames = _get_schema_or_none( instance.derive(type=args[0]), ctx ) return apply_object_constraints(instance, schema) elif is_typed_dict(instance.origin_type): return on_typed_dict(instance, ctx) elif is_generic(instance.type) and issubclass( instance.origin_type, typing.Mapping ): schema = JSONObjectSchema( additionalProperties=( _get_schema_or_none(instance.derive(type=args[1]), ctx) if args else None ), propertyNames=( _get_schema_or_none(instance.derive(type=args[0]), ctx) if args else None ), ) return apply_object_constraints(instance, schema) elif is_generic(instance.type) and issubclass( instance.origin_type, typing.Sequence ): return apply_array_constraints( instance, JSONArraySchema( items=( _get_schema_or_none(instance.derive(type=args[0]), ctx) if args else None ) ), ) @register def on_pathlike(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if issubclass(instance.origin_type, os.PathLike): schema = JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.PATH, ) for annotation in instance.annotations: if isinstance(annotation, MaxLength): schema.maxLength = annotation.value elif isinstance(annotation, MinLength): schema.minLength = annotation.value return schema @register def on_enum(instance: Instance, ctx: Context) -> Optional[JSONSchema]: if issubclass(instance.origin_type, Enum): return JSONSchema(enum=[m.value for m in instance.origin_type]) __all__ = ["Instance", "get_schema"] mashumaro-3.13.1/mashumaro/mixins/000077500000000000000000000000001463331001200170675ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/mixins/__init__.py000066400000000000000000000000001463331001200211660ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/mixins/dict.py000066400000000000000000000035621463331001200203720ustar00rootroot00000000000000from typing import Any, Dict, Mapping, Type, TypeVar, final from mashumaro.core.meta.mixin import ( compile_mixin_packer, compile_mixin_unpacker, ) __all__ = ["DataClassDictMixin"] T = TypeVar("T", bound="DataClassDictMixin") class DataClassDictMixin: __slots__ = () __mashumaro_builder_params = {"packer": {}, "unpacker": {}} # type: ignore def __init_subclass__(cls: Type[T], **kwargs: Any): super().__init_subclass__(**kwargs) for ancestor in cls.__mro__[-1:0:-1]: builder_params_ = f"_{ancestor.__name__}__mashumaro_builder_params" builder_params = getattr(ancestor, builder_params_, None) if builder_params: compile_mixin_unpacker(cls, **builder_params["unpacker"]) compile_mixin_packer(cls, **builder_params["packer"]) @final def to_dict( self: T, # * # keyword-only arguments that exist with the code generation options: # omit_none: bool = False # by_alias: bool = False # dialect: Type[Dialect] = None **kwargs: Any, ) -> Dict[Any, Any]: ... @classmethod @final def from_dict( cls: Type[T], d: Mapping, # * # keyword-only arguments that exist with the code generation options: # dialect: Type[Dialect] = None **kwargs: Any, ) -> T: ... @classmethod def __pre_deserialize__( cls: Type[T], d: Dict[Any, Any] ) -> Dict[Any, Any]: ... @classmethod def __post_deserialize__(cls: Type[T], obj: T) -> T: ... def __pre_serialize__( self: T, # context: Any = None, # added with ADD_SERIALIZATION_CONTEXT option ) -> T: ... def __post_serialize__( self: T, d: Dict[Any, Any], # context: Any = None, # added with ADD_SERIALIZATION_CONTEXT option ) -> Dict[Any, Any]: ... mashumaro-3.13.1/mashumaro/mixins/json.py000066400000000000000000000014261463331001200204150ustar00rootroot00000000000000import json from typing import Any, Callable, Dict, Type, TypeVar, Union from mashumaro.mixins.dict import DataClassDictMixin T = TypeVar("T", bound="DataClassJSONMixin") EncodedData = Union[str, bytes, bytearray] Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] class DataClassJSONMixin(DataClassDictMixin): __slots__ = () def to_json( self: T, encoder: Encoder = json.dumps, **to_dict_kwargs: Any, ) -> EncodedData: return encoder(self.to_dict(**to_dict_kwargs)) @classmethod def from_json( cls: Type[T], data: EncodedData, decoder: Decoder = json.loads, **from_dict_kwargs: Any, ) -> T: return cls.from_dict(decoder(data), **from_dict_kwargs) mashumaro-3.13.1/mashumaro/mixins/msgpack.py000066400000000000000000000030721463331001200210700ustar00rootroot00000000000000from typing import Any, Callable, Dict, Type, TypeVar, final import msgpack from mashumaro.dialect import Dialect from mashumaro.helper import pass_through from mashumaro.mixins.dict import DataClassDictMixin T = TypeVar("T", bound="DataClassMessagePackMixin") EncodedData = bytes Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] class MessagePackDialect(Dialect): no_copy_collections = (list, dict) serialization_strategy = { bytes: pass_through, bytearray: { "deserialize": bytearray, "serialize": pass_through, }, } def default_encoder(data: Any) -> EncodedData: return msgpack.packb(data, use_bin_type=True) def default_decoder(data: EncodedData) -> Dict[Any, Any]: return msgpack.unpackb(data, raw=False) class DataClassMessagePackMixin(DataClassDictMixin): __slots__ = () __mashumaro_builder_params = { "packer": { "format_name": "msgpack", "dialect": MessagePackDialect, "encoder": default_encoder, }, "unpacker": { "format_name": "msgpack", "dialect": MessagePackDialect, "decoder": default_decoder, }, } @final def to_msgpack( self: T, encoder: Encoder = default_encoder, **to_dict_kwargs: Any, ) -> EncodedData: ... @classmethod @final def from_msgpack( cls: Type[T], data: EncodedData, decoder: Decoder = default_decoder, **from_dict_kwargs: Any, ) -> T: ... mashumaro-3.13.1/mashumaro/mixins/orjson.py000066400000000000000000000033671463331001200207640ustar00rootroot00000000000000from datetime import date, datetime, time from typing import Any, Callable, Dict, Type, TypeVar, Union, final from uuid import UUID import orjson from mashumaro.core.helpers import ConfigValue from mashumaro.dialect import Dialect from mashumaro.helper import pass_through from mashumaro.mixins.dict import DataClassDictMixin T = TypeVar("T", bound="DataClassORJSONMixin") EncodedData = Union[str, bytes, bytearray] Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] class OrjsonDialect(Dialect): no_copy_collections = (list, dict) serialization_strategy = { datetime: {"serialize": pass_through}, date: {"serialize": pass_through}, time: {"serialize": pass_through}, UUID: {"serialize": pass_through}, } class DataClassORJSONMixin(DataClassDictMixin): __slots__ = () __mashumaro_builder_params = { "packer": { "format_name": "jsonb", "dialect": OrjsonDialect, "encoder": orjson.dumps, "encoder_kwargs": { "option": ("orjson_options", ConfigValue("orjson_options")), }, }, "unpacker": { "format_name": "json", "dialect": OrjsonDialect, "decoder": orjson.loads, }, } @final def to_jsonb( self: T, encoder: Encoder = orjson.dumps, *, orjson_options: int = ..., **to_dict_kwargs: Any, ) -> bytes: ... def to_json(self: T, **kwargs: Any) -> str: return self.to_jsonb(**kwargs).decode() @classmethod @final def from_json( cls: Type[T], data: EncodedData, decoder: Decoder = orjson.loads, **from_dict_kwargs: Any, ) -> T: ... mashumaro-3.13.1/mashumaro/mixins/orjson.pyi000066400000000000000000000020031463331001200211170ustar00rootroot00000000000000from typing import Any, Callable, Dict, Type, TypeVar, Union, final import orjson from mashumaro.dialect import Dialect from mashumaro.mixins.dict import DataClassDictMixin T = TypeVar("T", bound="DataClassORJSONMixin") EncodedData = Union[str, bytes, bytearray] Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] class OrjsonDialect(Dialect): serialization_strategy: Any class DataClassORJSONMixin(DataClassDictMixin): __slots__ = () @final def to_jsonb( self: T, encoder: Encoder = orjson.dumps, *, orjson_options: int = ..., **to_dict_kwargs: Any, ) -> bytes: ... def to_json( self: T, encoder: Encoder = orjson.dumps, *, orjson_options: int = ..., **to_dict_kwargs: Any, ) -> str: ... @classmethod @final def from_json( cls: Type[T], data: EncodedData, decoder: Decoder = orjson.loads, **from_dict_kwargs: Any, ) -> T: ... mashumaro-3.13.1/mashumaro/mixins/toml.py000066400000000000000000000026441463331001200204220ustar00rootroot00000000000000from datetime import date, datetime, time from typing import Any, Callable, Dict, Type, TypeVar, final import tomli_w from mashumaro.dialect import Dialect from mashumaro.helper import pass_through from mashumaro.mixins.dict import DataClassDictMixin try: import tomllib except ModuleNotFoundError: import tomli as tomllib # type: ignore T = TypeVar("T", bound="DataClassTOMLMixin") EncodedData = str Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] class TOMLDialect(Dialect): no_copy_collections = (list, dict) omit_none = True serialization_strategy = { datetime: pass_through, date: pass_through, time: pass_through, } class DataClassTOMLMixin(DataClassDictMixin): __slots__ = () __mashumaro_builder_params = { "packer": { "format_name": "toml", "dialect": TOMLDialect, "encoder": tomli_w.dumps, }, "unpacker": { "format_name": "toml", "dialect": TOMLDialect, "decoder": tomllib.loads, }, } @final def to_toml( self: T, encoder: Encoder = tomli_w.dumps, **to_dict_kwargs: Any, ) -> EncodedData: ... @classmethod @final def from_toml( cls: Type[T], data: EncodedData, decoder: Decoder = tomllib.loads, **from_dict_kwargs: Any, ) -> T: ... mashumaro-3.13.1/mashumaro/mixins/yaml.py000066400000000000000000000021241463331001200204020ustar00rootroot00000000000000from typing import Any, Callable, Dict, Type, TypeVar, Union import yaml from mashumaro.mixins.dict import DataClassDictMixin T = TypeVar("T", bound="DataClassYAMLMixin") EncodedData = Union[str, bytes] Encoder = Callable[[Any], EncodedData] Decoder = Callable[[EncodedData], Dict[Any, Any]] DefaultLoader = getattr(yaml, "CSafeLoader", yaml.SafeLoader) DefaultDumper = getattr(yaml, "CDumper", yaml.Dumper) def default_encoder(data: Any) -> EncodedData: return yaml.dump(data, Dumper=DefaultDumper) def default_decoder(data: EncodedData) -> Dict[Any, Any]: return yaml.load(data, DefaultLoader) class DataClassYAMLMixin(DataClassDictMixin): __slots__ = () def to_yaml( self: T, encoder: Encoder = default_encoder, **to_dict_kwargs: Any, ) -> EncodedData: return encoder(self.to_dict(**to_dict_kwargs)) @classmethod def from_yaml( cls: Type[T], data: EncodedData, decoder: Decoder = default_decoder, **from_dict_kwargs: Any, ) -> T: return cls.from_dict(decoder(data), **from_dict_kwargs) mashumaro-3.13.1/mashumaro/py.typed000066400000000000000000000000001463331001200172450ustar00rootroot00000000000000mashumaro-3.13.1/mashumaro/types.py000066400000000000000000000062041463331001200173000ustar00rootroot00000000000000import decimal from dataclasses import dataclass from typing import Any, Callable, List, Optional, Type, Union from typing_extensions import Literal from mashumaro.core.const import Sentinel __all__ = [ "SerializableType", "GenericSerializableType", "SerializationStrategy", "RoundedDecimal", "Discriminator", "Alias", ] class SerializableType: __slots__ = () __use_annotations__ = False def __init_subclass__( cls, use_annotations: Union[ bool, Literal[Sentinel.MISSING] ] = Sentinel.MISSING, **kwargs: Any, ): if use_annotations is not Sentinel.MISSING: cls.__use_annotations__ = use_annotations def _serialize(self) -> Any: raise NotImplementedError @classmethod def _deserialize(cls, value: Any) -> Any: raise NotImplementedError class GenericSerializableType: __slots__ = () def _serialize(self, types: List[Type]) -> Any: raise NotImplementedError @classmethod def _deserialize(cls, value: Any, types: List[Type]) -> Any: raise NotImplementedError class SerializationStrategy: __use_annotations__ = False def __init_subclass__( cls, use_annotations: Union[ bool, Literal[Sentinel.MISSING] ] = Sentinel.MISSING, **kwargs: Any, ): if use_annotations is not Sentinel.MISSING: cls.__use_annotations__ = use_annotations def serialize(self, value: Any) -> Any: raise NotImplementedError def deserialize(self, value: Any) -> Any: raise NotImplementedError class RoundedDecimal(SerializationStrategy): def __init__( self, places: Optional[int] = None, rounding: Optional[str] = None ): if places is not None: self.exp = decimal.Decimal((0, (1,), -places)) else: self.exp = None # type: ignore self.rounding = rounding def serialize(self, value: decimal.Decimal) -> str: if self.exp: if self.rounding: return str(value.quantize(self.exp, rounding=self.rounding)) else: return str(value.quantize(self.exp)) else: return str(value) def deserialize(self, value: str) -> decimal.Decimal: return decimal.Decimal(str(value)) @dataclass(unsafe_hash=True) class Discriminator: field: Optional[str] = None include_supertypes: bool = False include_subtypes: bool = False variant_tagger_fn: Optional[Callable[[Any], Any]] = None def __post_init__(self) -> None: if not self.include_supertypes and not self.include_subtypes: raise ValueError( "Either 'include_supertypes' or 'include_subtypes' " "must be enabled" ) class Alias: def __init__(self, name: str, /): self.name = name def __repr__(self) -> str: return f"Alias(name='{self.name}')" def __eq__(self, other: Any) -> bool: if not isinstance(other, Alias): return False return self.name == other.name def __hash__(self) -> int: return hash(self.name) mashumaro-3.13.1/pyproject.toml000066400000000000000000000015571463331001200165100ustar00rootroot00000000000000[tool.mypy] ignore_missing_imports = true disallow_untyped_defs = true disallow_incomplete_defs = true [[tool.mypy.overrides]] module = [ 'mashumaro.mixins.dict', 'mashumaro.mixins.msgpack', 'mashumaro.mixins.toml', 'mashumaro.codecs.*', ] disable_error_code = 'empty-body' [[tool.mypy.overrides]] module = [ 'mashumaro.core.meta.types.pack', 'mashumaro.core.meta.types.unpack', 'mashumaro.jsonschema.schema', ] disable_error_code = 'return' [tool.isort] profile = 'black' line_length = 79 multi_line_output = 3 include_trailing_comma = true ensure_newline_before_comments = true [tool.black] line-length = 79 target-version = ['py38', 'py39', 'py310', 'py311'] [tool.ruff] line-length = 79 [tool.coverage.run] omit = ["setup.py"] [tool.coverage.report] exclude_lines = ["pragma: no cover", "@overload", "@abstractmethod"] ignore_errors = true mashumaro-3.13.1/requirements-dev.txt000066400000000000000000000012411463331001200176220ustar00rootroot00000000000000# extra msgpack>=0.5.6 pyyaml>=3.13 tomli-w>=1.0 tomli>=1.1.0;python_version<'3.11' orjson>=3.6.1;python_version<'3.13' # tests mypy>=0.812 isort>=5.6.4 pytest>=6.2.1 pytest-mock>=3.5.1 pytest-cov>=2.10.1 pytest-xdist>=3.5.0 coveralls>=3.0.0 black==24.3.0 ruff>=0.0.285 codespell>=2.2.2 # third party features ciso8601>=2.1.3 pendulum>=2.1.2;python_version<'3.13' # benchmark pyperf>=2.6.1 termtables>=0.2.3 pytablewriter[html]>=0.58.0 cattrs==23.2.2 #pydantic==2.7.4 dacite==1.7.0 # see https://github.com/konradhalas/dacite/issues/236#issuecomment-1613987368 marshmallow>=3.19.0 dataclasses-json==0.6.2 # library stubs types-backports types-dataclasses types-PyYAML mashumaro-3.13.1/setup.py000066400000000000000000000027471463331001200153100ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import find_packages, setup setup( name="mashumaro", version="3.13.1", description="Fast and well tested serialization library", long_description=open("README.md", encoding="utf8").read(), long_description_content_type="text/markdown", platforms="all", classifiers=[ "License :: OSI Approved :: Apache Software License", "Intended Audience :: Developers", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Development Status :: 5 - Production/Stable", ], license="Apache License, Version 2.0", author="Alexander Tikhonov", author_email="random.gauss@gmail.com", url="https://github.com/Fatal1ty/mashumaro", packages=find_packages(include=("mashumaro", "mashumaro.*")), package_data={"mashumaro": ["py.typed", "mixins/orjson.pyi"]}, python_requires=">=3.8", install_requires=[ "typing_extensions>=4.1.0", ], extras_require={ "orjson": ["orjson"], "msgpack": ["msgpack>=0.5.6"], "yaml": ["pyyaml>=3.13"], "toml": [ "tomli-w>=1.0", "tomli>=1.1.0;python_version<'3.11'", ], }, zip_safe=False, ) mashumaro-3.13.1/tests/000077500000000000000000000000001463331001200147265ustar00rootroot00000000000000mashumaro-3.13.1/tests/__init__.py000066400000000000000000000000001463331001200170250ustar00rootroot00000000000000mashumaro-3.13.1/tests/conftest.py000066400000000000000000000010521463331001200171230ustar00rootroot00000000000000from unittest.mock import patch from mashumaro.core.const import PY_312_MIN, PY_313_MIN if not PY_312_MIN: collect_ignore = [ "test_generics_pep_695.py", "test_pep_695.py", ] if PY_313_MIN: collect_ignore = [ "test_codecs/test_orjson_codec.py", "test_discriminated_unions/test_dialects.py", "test_orjson.py", "test_pep_563.py", "test_self.py", ] add_unpack_method = patch( "mashumaro.core.meta.code.builder.CodeBuilder.add_unpack_method", lambda *args, **kwargs: ..., ) mashumaro-3.13.1/tests/entities.py000066400000000000000000000137141463331001200171320ustar00rootroot00000000000000from collections import namedtuple from dataclasses import dataclass from datetime import date, datetime from enum import Enum, Flag, IntEnum, IntFlag from os import PathLike from typing import Any, Generic, List, NewType, Optional, Union try: from enum import StrEnum except ImportError: # pragma: no cover class StrEnum(str, Enum): pass from typing_extensions import NamedTuple, TypedDict, TypeVar from mashumaro import DataClassDictMixin from mashumaro.config import TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig from mashumaro.types import GenericSerializableType, SerializableType T = TypeVar("T") TAny = TypeVar("TAny", bound=Any) TInt = TypeVar("TInt", bound=int) TDefaultInt = TypeVar("TDefaultInt", default=int) TIntStr = TypeVar("TIntStr", int, str) T_Optional_int = TypeVar("T_Optional_int", bound=Optional[int]) class MyEnum(Enum): a = "letter a" b = "letter b" class MyStrEnum(str, Enum): a = "letter a" b = "letter b" class MyNativeStrEnum(StrEnum): a = "letter a" b = "letter b" class MyIntEnum(IntEnum): a = 1 b = 2 class MyFlag(Flag): a = 1 b = 2 class MyIntFlag(IntFlag): a = 1 b = 2 class MyList(list): pass @dataclass class MyDataClass(DataClassDictMixin): a: int b: int @dataclass(frozen=True) class MyFrozenDataClass: x: int class MutableString(SerializableType): def __init__(self, value: str): self.characters = [c for c in value] def _serialize(self) -> str: return str(self) @classmethod def _deserialize(cls, value: str) -> "MutableString": return MutableString(value) def __str__(self): return "".join(self.characters) def __eq__(self, other): return self.characters == other.characters class GenericSerializableList(Generic[T], GenericSerializableType): def __init__(self, value: List[T]): self.value = value def _serialize(self, types): if types[0] == int: return [v + 2 for v in self.value] elif types[0] == str: return [f"_{v}" for v in self.value] @classmethod def _deserialize(cls, value, types): if types[0] == int: return GenericSerializableList([int(v) - 2 for v in value]) elif types[0] == str: return GenericSerializableList([v[1:] for v in value]) def __eq__(self, other): return self.value == other.value class GenericSerializableWrapper(Generic[T], GenericSerializableType): def __init__(self, value: T): self.value = value def _serialize(self, types): if types[0] == date: return self.value.isoformat() @classmethod def _deserialize(cls, value, types): if types[0] == date: return GenericSerializableWrapper(date.fromisoformat(value)) def __eq__(self, other): return self.value == other.value class CustomPath(PathLike): def __init__(self, *args: str): self._path = "/".join(args) def __fspath__(self): return self._path def __eq__(self, other): return isinstance(other, CustomPath) and self._path == other._path @dataclass class MyDataClassWithUnion(DataClassDictMixin): a: Union[int, str] b: Union[MyEnum, int] @dataclass class MyDataClassWithOptional(DataClassDictMixin): a: Optional[int] = None b: Optional[int] = None @dataclass class MyDataClassWithOptionalAndOmitNoneFlag(DataClassDictMixin): a: Optional[int] = None b: Optional[int] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] class ThirdPartyType: def __init__(self, value): self.value = value def __eq__(self, other): return isinstance(other, ThirdPartyType) and self.value == other.value @dataclass class DataClassWithoutMixin: i: int @dataclass class SerializableTypeDataClass(SerializableType): a: int b: int def _serialize(self): return {"a": self.a + 1, "b": self.b + 1} @classmethod def _deserialize(cls, value): a = value.get("a") - 1 b = value.get("b") - 1 return cls(a, b) @dataclass class GenericSerializableTypeDataClass(GenericSerializableType): a: int b: int def _serialize(self, types): return {"a": self.a + 1, "b": self.b + 1} @classmethod def _deserialize(cls, value, types): a = value.get("a") - 1 b = value.get("b") - 1 return cls(a, b) @dataclass class MyGenericDataClass(Generic[T], DataClassDictMixin): x: T class MyGenericList(List[T]): pass class SerializableTypeGenericList(Generic[T], SerializableType): def __init__(self, value: List[T]): self.value = value def _serialize(self): return self.value @classmethod def _deserialize(cls, value): return SerializableTypeGenericList(value) def __eq__(self, other): return self.value == other.value TMyDataClass = TypeVar("TMyDataClass", bound=MyDataClass) class TypedDictRequiredKeys(TypedDict): int: int float: float class TypedDictOptionalKeys(TypedDict, total=False): int: int float: float class TypedDictRequiredAndOptionalKeys(TypedDictRequiredKeys, total=False): str: str class TypedDictRequiredKeysWithOptional(TypedDict): x: Optional[int] y: int class TypedDictOptionalKeysWithOptional(TypedDict, total=False): x: Optional[int] y: float class GenericTypedDict(TypedDict, Generic[T]): x: T y: int class MyNamedTuple(NamedTuple): i: int f: float class MyNamedTupleWithDefaults(NamedTuple): i: int = 1 f: float = 2.0 class MyNamedTupleWithOptional(NamedTuple): i: Optional[int] f: int MyUntypedNamedTuple = namedtuple("MyUntypedNamedTuple", ("i", "f")) MyUntypedNamedTupleWithDefaults = namedtuple( "MyUntypedNamedTupleWithDefaults", ("i", "f"), defaults=(1, 2.0), ) class GenericNamedTuple(NamedTuple, Generic[T]): x: T y: int MyDatetimeNewType = NewType("MyDatetimeNewType", datetime) mashumaro-3.13.1/tests/test_aliases.py000066400000000000000000000225531463331001200177670ustar00rootroot00000000000000from dataclasses import dataclass, field from typing import Optional import pytest from typing_extensions import Annotated from mashumaro import DataClassDictMixin from mashumaro.config import ( TO_DICT_ADD_BY_ALIAS_FLAG, TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig, ) from mashumaro.exceptions import MissingField from mashumaro.types import Alias @dataclass class Aliased(DataClassDictMixin): a: int = field(metadata={"alias": "alias_a"}) b: Annotated[int, Alias("alias_b")] @dataclass class AliasedWithSerializeByAliasFlag(DataClassDictMixin): a: int = field(metadata={"alias": "alias_a"}) b: Annotated[int, Alias("alias_b")] class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] def test_alias(): assert Aliased.from_dict({"alias_a": 123, "alias_b": 456}) == Aliased( a=123, b=456 ) with pytest.raises(MissingField): assert Aliased.from_dict({"a": 123, "alias_b": 456}) with pytest.raises(MissingField): assert Aliased.from_dict({"alias_a": 123, "b": 456}) def test_alias_with_default(): @dataclass class DataClass(DataClassDictMixin): a: int = field(default=111, metadata={"alias": "alias_a"}) b: Annotated[int, Alias("alias_b")] = 222 assert DataClass.from_dict({"alias_a": 123, "alias_b": 456}) == DataClass( a=123, b=456 ) assert DataClass.from_dict({}) == DataClass(a=111, b=222) def test_alias_with_omit_none(): @dataclass class DataClass(DataClassDictMixin): a: Optional[int] = field(default=None, metadata={"alias": "alias_a"}) b: Annotated[Optional[int], Alias("alias_b")] = None class Config(BaseConfig): code_generation_options = [ TO_DICT_ADD_BY_ALIAS_FLAG, TO_DICT_ADD_OMIT_NONE_FLAG, ] instance = DataClass() assert instance.to_dict(omit_none=True) == {} assert instance.to_dict(by_alias=True) == { "alias_a": None, "alias_b": None, } assert instance.to_dict(omit_none=True, by_alias=True) == {} instance = DataClass(a=123, b=456) assert instance.to_dict(omit_none=True) == {"a": 123, "b": 456} assert instance.to_dict(by_alias=True) == {"alias_a": 123, "alias_b": 456} assert instance.to_dict(omit_none=True, by_alias=True) == { "alias_a": 123, "alias_b": 456, } def test_serialize_by_alias_config_option(): @dataclass class DataClass(DataClassDictMixin): a: int = field(metadata={"alias": "alias_a"}) b: Annotated[int, Alias("alias_b")] class Config(BaseConfig): serialize_by_alias = True assert DataClass(123, 456).to_dict() == {"alias_a": 123, "alias_b": 456} def test_serialize_by_alias_code_generation_flag(): @dataclass class DataClass(DataClassDictMixin): x: int = field(metadata={"alias": "alias_x"}) y: Annotated[int, Alias("alias_y")] class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] instance = DataClass(x=123, y=456) assert instance.to_dict() == {"x": 123, "y": 456} assert instance.to_dict(by_alias=True) == {"alias_x": 123, "alias_y": 456} def test_serialize_by_alias_code_generation_flag_without_alias(): @dataclass class DataClass(DataClassDictMixin): x: int class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] instance = DataClass(x=123) assert instance.to_dict() == {"x": 123} assert instance.to_dict(by_alias=True) == {"x": 123} def test_no_serialize_by_alias_code_generation_flag(): @dataclass class DataClass(DataClassDictMixin): x: int = field(metadata={"alias": "alias_x"}) y: Annotated[int, Alias("alias_y")] instance = DataClass(x=123, y=456) assert instance.to_dict() == {"x": 123, "y": 456} with pytest.raises(TypeError): instance.to_dict(by_alias=True) def test_serialize_by_alias_flag_for_inner_class_without_it(): @dataclass class DataClass(DataClassDictMixin): x: Aliased = field(metadata={"alias": "alias_x"}) y: Annotated[Aliased, Alias("alias_y")] class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] instance = DataClass(Aliased(a=1, b=2), Aliased(a=3, b=4)) assert instance.to_dict() == {"x": {"a": 1, "b": 2}, "y": {"a": 3, "b": 4}} assert instance.to_dict(by_alias=True) == { "alias_x": {"a": 1, "b": 2}, "alias_y": {"a": 3, "b": 4}, } def test_serialize_by_alias_flag_for_inner_class_with_it(): @dataclass class DataClass(DataClassDictMixin): x: AliasedWithSerializeByAliasFlag = field( metadata={"alias": "alias_x"} ) y: Annotated[AliasedWithSerializeByAliasFlag, Alias("alias_y")] class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] instance = DataClass( AliasedWithSerializeByAliasFlag(a=1, b=2), AliasedWithSerializeByAliasFlag(a=3, b=4), ) assert instance.to_dict() == {"x": {"a": 1, "b": 2}, "y": {"a": 3, "b": 4}} assert instance.to_dict(by_alias=True) == { "alias_x": {"alias_a": 1, "alias_b": 2}, "alias_y": {"alias_a": 3, "alias_b": 4}, } def test_aliases_in_config(): @dataclass class DataClass(DataClassDictMixin): a: int = 111 b: Annotated[int, Alias("alias_b")] = 222 class Config(BaseConfig): aliases = {"a": "alias_a", "b": "alias_c"} serialize_by_alias = True assert DataClass.from_dict({"alias_a": 123, "alias_b": 456}) == DataClass( a=123, b=456 ) assert DataClass.from_dict({}) == DataClass(a=111, b=222) assert DataClass(a=123, b=456).to_dict() == { "alias_a": 123, "alias_b": 456, } def test_by_alias_with_serialize_by_alias(): @dataclass class DataClass(DataClassDictMixin): a: int = field(metadata={"alias": "alias_a"}) b: Annotated[int, Alias("alias_b")] class Config(BaseConfig): serialize_by_alias = True code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] instance = DataClass(a=123, b=456) assert DataClass.from_dict({"alias_a": 123, "alias_b": 456}) == instance assert instance.to_dict() == {"alias_a": 123, "alias_b": 456} assert instance.to_dict(by_alias=False) == {"a": 123, "b": 456} def test_no_serialize_by_alias_with_serialize_by_alias_and_optional(): @dataclass class DataClass(DataClassDictMixin): x: Optional[int] = field(metadata={"alias": "alias_x"}) y: Annotated[Optional[int], Alias("alias_y")] class Config(BaseConfig): serialize_by_alias = True assert DataClass(x=123, y=456).to_dict() == { "alias_x": 123, "alias_y": 456, } assert DataClass(x=None, y=None).to_dict() == { "alias_x": None, "alias_y": None, } def test_by_field_with_allow_deserialization_not_by_alias(): @dataclass class DataClass(DataClassDictMixin): a1: int = field(metadata={"alias": "alias_a1"}) a2: Annotated[int, Alias("alias_a2")] b1: Optional[int] = field(metadata={"alias": "alias_b1"}) b2: Annotated[Optional[int], Alias("alias_b2")] c1: Optional[str] = field(metadata={"alias": "alias_c1"}) c2: Annotated[Optional[str], Alias("alias_c2")] d1: int = field(metadata={"alias": "alias_d1"}, default=4) d2: Annotated[int, Alias("alias_d2")] = 4 e1: Optional[int] = field(metadata={"alias": "alias_e1"}, default=5) e2: Annotated[Optional[int], Alias("alias_e2")] = 5 f1: Optional[str] = field(metadata={"alias": "alias_f1"}, default="6") f2: Annotated[Optional[str], Alias("alias_f2")] = "6" class Config(BaseConfig): serialize_by_alias = True code_generation_options = [TO_DICT_ADD_BY_ALIAS_FLAG] allow_deserialization_not_by_alias = True instance = DataClass(a1=1, a2=1, b1=2, b2=2, c1="3", c2="3") assert ( DataClass.from_dict( { "a1": 1, "a2": 1, "alias_b1": 2, "alias_b2": 2, "c1": "3", "c2": "3", "alias_d1": 4, "alias_d2": 4, "e1": 5, "e2": 5, "alias_f1": "6", "alias_f2": "6", } ) == instance ) assert instance.to_dict() == { "alias_a1": 1, "alias_a2": 1, "alias_b1": 2, "alias_b2": 2, "alias_c1": "3", "alias_c2": "3", "alias_d1": 4, "alias_d2": 4, "alias_e1": 5, "alias_e2": 5, "alias_f1": "6", "alias_f2": "6", } assert instance.to_dict(by_alias=False) == { "a1": 1, "a2": 1, "b1": 2, "b2": 2, "c1": "3", "c2": "3", "d1": 4, "d2": 4, "e1": 5, "e2": 5, "f1": "6", "f2": "6", } def test_order_of_metadata_and_annotated(): @dataclass class DataClass(DataClassDictMixin): x: Annotated[int, Alias("foo")] = field(metadata={"alias": "bar"}) class Config(BaseConfig): serialize_by_alias = True instance = DataClass(42) assert DataClass.from_dict({"bar": 42}) == instance assert instance.to_dict() == {"bar": 42} mashumaro-3.13.1/tests/test_annotated.py000066400000000000000000000033001463331001200203100ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime from typing_extensions import Annotated from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig def test_annotated(): @dataclass class DataClass(DataClassDictMixin): x: Annotated[date, None] obj = DataClass(date(2022, 2, 6)) assert DataClass.from_dict({"x": "2022-02-06"}) == obj assert obj.to_dict() == {"x": "2022-02-06"} def test_annotated_with_overridden_methods(): @dataclass class DataClass(DataClassDictMixin): foo: Annotated[date, "foo"] bar: Annotated[date, "bar"] baz: Annotated[date, "baz"] class Config(BaseConfig): serialization_strategy = { Annotated[date, "foo"]: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, Annotated[date, "bar"]: { "serialize": date.isoformat, "deserialize": date.fromisoformat, }, date: { "serialize": lambda x: x.strftime("%Y%m%d"), "deserialize": ( lambda x: datetime.strptime(x, "%Y%m%d").date() ), }, } obj = DataClass( foo=date(2023, 6, 12), bar=date(2023, 6, 12), baz=date(2023, 6, 12), ) obj.foo.strftime("%Y%M%D") assert ( DataClass.from_dict( {"foo": 738683, "bar": "2023-06-12", "baz": "20230612"} ) == obj ) assert obj.to_dict() == { "foo": 738683, "bar": "2023-06-12", "baz": "20230612", } mashumaro-3.13.1/tests/test_code_generation_options.py000066400000000000000000000034741463331001200232470ustar00rootroot00000000000000from dataclasses import dataclass from typing import Optional from mashumaro import DataClassDictMixin from mashumaro.config import ( TO_DICT_ADD_BY_ALIAS_FLAG, TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig, ) @dataclass class A(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): aliases = {"x": "x_alias"} code_generation_options = [ TO_DICT_ADD_OMIT_NONE_FLAG, TO_DICT_ADD_BY_ALIAS_FLAG, ] @dataclass class B(DataClassDictMixin): a: Optional[A] = None class Config(BaseConfig): aliases = {"a": "a_alias"} code_generation_options = [ TO_DICT_ADD_OMIT_NONE_FLAG, TO_DICT_ADD_BY_ALIAS_FLAG, ] def test_passing_flags_if_parent_has_them(): @dataclass class WithFlags(DataClassDictMixin): b: B class Config(BaseConfig): code_generation_options = [ TO_DICT_ADD_OMIT_NONE_FLAG, TO_DICT_ADD_BY_ALIAS_FLAG, ] assert WithFlags.from_dict({"b": {"a": {"x": None}}}) == WithFlags( b=B(a=None) ) obj = WithFlags.from_dict({"b": {"a_alias": {"x": None}}}) assert obj == WithFlags(b=B(a=A(x=None))) assert obj.to_dict() == {"b": {"a": {"x": None}}} assert obj.to_dict(by_alias=True) == {"b": {"a_alias": {"x_alias": None}}} assert obj.to_dict(by_alias=True, omit_none=True) == {"b": {"a_alias": {}}} def test_passing_flags_if_parent_does_not_have_them(): @dataclass class WithoutFlags(DataClassDictMixin): b: B assert WithoutFlags.from_dict({"b": {"a": {"x": None}}}) == WithoutFlags( b=B(a=None) ) obj = WithoutFlags.from_dict({"b": {"a_alias": {"x": None}}}) assert obj == WithoutFlags(b=B(a=A(x=None))) assert obj.to_dict() == {"b": {"a": {"x": None}}} mashumaro-3.13.1/tests/test_codecs/000077500000000000000000000000001463331001200172255ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_codecs/__init__.py000066400000000000000000000000001463331001200213240ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_codecs/test_basic_codec.py000066400000000000000000000147661463331001200230720ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime from typing import Generic, List, Optional, TypeVar, Union import pytest from typing_extensions import Literal from mashumaro.codecs import BasicDecoder, BasicEncoder from mashumaro.codecs.basic import decode, encode from mashumaro.dialect import Dialect from tests.entities import ( DataClassWithoutMixin, GenericNamedTuple, GenericTypedDict, MyGenericDataClass, ) T = TypeVar("T") @dataclass class Foo: foo: str @dataclass class Bar: bar: str class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } @dataclass class GenericDataClass(Generic[T]): x: T y: List[T] @pytest.mark.parametrize( ("shape_type", "encoded", "decoded"), [ [ List[date], ["2023-09-22", "2023-09-23"], [ date(2023, 9, 22), date(2023, 9, 23), ], ], [DataClassWithoutMixin, {"i": 42}, DataClassWithoutMixin(42)], [ List[DataClassWithoutMixin], [{"i": 42}], [DataClassWithoutMixin(42)], ], [ GenericDataClass, {"x": "2023-09-23", "y": ["2023-09-23"]}, GenericDataClass("2023-09-23", ["2023-09-23"]), ], [ GenericDataClass[date], {"x": "2023-09-23", "y": ["2023-09-23"]}, GenericDataClass(date(2023, 9, 23), [date(2023, 9, 23)]), ], [ List[Union[int, date]], ["42", "2023-09-23"], [42, date(2023, 9, 23)], ], [Optional[date], "2023-09-23", date(2023, 9, 23)], [Optional[date], None, None], ], ) def test_decode(shape_type, encoded, decoded): assert decode(encoded, shape_type) == decoded @pytest.mark.parametrize( ("shape_type", "encoded", "decoded"), [ [ List[date], ["2023-09-22", "2023-09-23"], [ date(2023, 9, 22), date(2023, 9, 23), ], ], [DataClassWithoutMixin, {"i": 42}, DataClassWithoutMixin(42)], [ List[DataClassWithoutMixin], [{"i": 42}], [DataClassWithoutMixin(42)], ], [ GenericDataClass, {"x": date(2023, 9, 23), "y": [date(2023, 9, 23)]}, GenericDataClass(date(2023, 9, 23), [date(2023, 9, 23)]), ], [ GenericDataClass[date], {"x": "2023-09-23", "y": ["2023-09-23"]}, GenericDataClass(date(2023, 9, 23), [date(2023, 9, 23)]), ], [List[Union[int, date]], [42, "2023-09-23"], [42, date(2023, 9, 23)]], [Optional[date], "2023-09-23", date(2023, 9, 23)], [Optional[date], None, None], ], ) def test_encode(shape_type, encoded, decoded): assert encode(decoded, shape_type) == encoded def test_decoder_with_default_dialect(): decoder = BasicDecoder(List[date], default_dialect=MyDialect) assert decoder.decode([738785, 738786]) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_encoder_with_default_dialect(): encoder = BasicEncoder(List[date], default_dialect=MyDialect) assert encoder.encode([date(2023, 9, 22), date(2023, 9, 23)]) == [ 738785, 738786, ] def test_pre_decoder_func(): decoder = BasicDecoder(List[date], pre_decoder_func=lambda v: v.split(",")) assert decoder.decode("2023-09-22,2023-09-23") == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_post_encoder_func(): encoder = BasicEncoder(List[date], post_encoder_func=lambda v: ",".join(v)) assert ( encoder.encode( [ date(2023, 9, 22), date(2023, 9, 23), ] ) == "2023-09-22,2023-09-23" ) @pytest.mark.parametrize( ("shape_type", "invalid_value"), [[Union[date, datetime], "foo"], [Literal["foo"], "bar"]], ) def test_value_error_on_decode(shape_type, invalid_value): decoder = BasicDecoder(shape_type) with pytest.raises(ValueError) as e: decoder.decode(invalid_value) assert type(e.value) is ValueError @pytest.mark.parametrize( ("shape_type", "invalid_value"), [[Union[date, datetime], "foo"], [Literal["foo"], "bar"]], ) def test_value_error_on_encode(shape_type, invalid_value): encoder = BasicEncoder(shape_type) with pytest.raises(ValueError) as e: encoder.encode(invalid_value) assert type(e.value) is ValueError def test_with_fields_with_generated_methods(): @dataclass class MyClass: td1: GenericTypedDict[str] td2: GenericTypedDict[date] nt1: GenericNamedTuple[str] nt2: GenericNamedTuple[date] u1: List[Union[int, str]] u2: List[Union[int, date]] l1: Literal["l1"] l2: Literal["l2"] decoder = BasicDecoder(MyClass) encoder = BasicEncoder(MyClass) data = { "td1": {"x": "2023-11-15", "y": 1}, "td2": {"x": "2023-11-15", "y": 2}, "nt1": ["2023-11-15", 3], "nt2": ["2023-11-15", 4], "u1": [5, "2023-11-15"], "u2": [6, "2023-11-15"], "l1": "l1", "l2": "l2", } obj = MyClass( td1={"x": "2023-11-15", "y": 1}, td2={"x": date(2023, 11, 15), "y": 2}, nt1=GenericNamedTuple("2023-11-15", 3), nt2=GenericNamedTuple(date(2023, 11, 15), 4), u1=[5, "2023-11-15"], u2=[6, date(2023, 11, 15)], l1="l1", l2="l2", ) assert decoder.decode(data) == obj assert encoder.encode(obj) == data def test_with_two_dataclass_fields(): @dataclass class MyClass: x1: Foo x2: Bar decoder = BasicDecoder(MyClass) encoder = BasicEncoder(MyClass) data = {"x1": {"foo": "foo"}, "x2": {"bar": "bar"}} obj = MyClass(x1=Foo("foo"), x2=Bar("bar")) assert decoder.decode(data) == obj assert encoder.encode(obj) == data def test_with_two_generic_dataclass_fields(): @dataclass class MyClass: x1: MyGenericDataClass[str] x2: MyGenericDataClass[date] decoder = BasicDecoder(MyClass) encoder = BasicEncoder(MyClass) data = {"x1": {"x": "2023-11-15"}, "x2": {"x": "2023-11-15"}} obj = MyClass( x1=MyGenericDataClass("2023-11-15"), x2=MyGenericDataClass(date(2023, 11, 15)), ) assert decoder.decode(data) == obj assert encoder.encode(obj) == data mashumaro-3.13.1/tests/test_codecs/test_json_codec.py000066400000000000000000000034271463331001200227520ustar00rootroot00000000000000import json from datetime import date from typing import List from mashumaro.codecs.json import ( JSONDecoder, JSONEncoder, json_decode, json_encode, ) from mashumaro.dialect import Dialect class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } def test_json_decode(): assert json_decode('["2023-09-22", "2023-09-23"]', List[date]) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_json_encode(): assert ( json_encode([date(2023, 9, 22), date(2023, 9, 23)], List[date]) == '["2023-09-22", "2023-09-23"]' ) def test_decoder_with_default_dialect(): decoder = JSONDecoder(List[date], default_dialect=MyDialect) assert decoder.decode("[738785, 738786]") == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_encoder_with_default_dialect(): encoder = JSONEncoder(List[date], default_dialect=MyDialect) assert ( encoder.encode([date(2023, 9, 22), date(2023, 9, 23)]) == "[738785, 738786]" ) def test_pre_decoder_func(): decoder = JSONDecoder( List[date], pre_decoder_func=lambda v: json.loads( "[" + ",".join(f'"{v}"' for v in v.split(",")) + "]" ), ) assert decoder.decode("2023-09-22,2023-09-23") == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_post_encoder_func(): encoder = JSONEncoder( List[date], post_encoder_func=lambda v: json.dumps(v, separators=(",", ":")), ) assert ( encoder.encode( [ date(2023, 9, 22), date(2023, 9, 23), ] ) == '["2023-09-22","2023-09-23"]' ) mashumaro-3.13.1/tests/test_codecs/test_msgpack_codec.py000066400000000000000000000042661463331001200234300ustar00rootroot00000000000000from datetime import date from typing import List import msgpack from mashumaro.codecs.msgpack import ( MessagePackDecoder, MessagePackEncoder, msgpack_decode, msgpack_encode, ) from mashumaro.dialect import Dialect class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } def test_msgpack_decode(): data = msgpack.dumps(["2023-09-22", "2023-09-23"]) assert msgpack_decode(data, List[date]) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_msgpack_encode(): data = msgpack.dumps(["2023-09-22", "2023-09-23"]) assert ( msgpack_encode([date(2023, 9, 22), date(2023, 9, 23)], List[date]) == data ) def test_decoder_with_default_dialect(): data = msgpack.dumps([738785, 738786]) decoder = MessagePackDecoder(List[date], default_dialect=MyDialect) assert decoder.decode(data) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_encoder_with_default_dialect(): data = msgpack.dumps([738785, 738786]) encoder = MessagePackEncoder(List[date], default_dialect=MyDialect) assert encoder.encode([date(2023, 9, 22), date(2023, 9, 23)]) == data def test_pre_decoder_func(): data = msgpack.dumps(["2023-09-22", "2023-09-23"]) calls = 0 def pre_decoder_func(value): nonlocal calls calls += 1 return msgpack.loads(value) decoder = MessagePackDecoder( List[date], pre_decoder_func=pre_decoder_func, ) assert decoder.decode(data) == [ date(2023, 9, 22), date(2023, 9, 23), ] assert calls == 1 def test_post_encoder_func(): data = msgpack.dumps(["2023-09-22", "2023-09-23"]) calls = 0 def post_encoder_func(value): nonlocal calls calls += 1 return msgpack.dumps(value) encoder = MessagePackEncoder( List[date], post_encoder_func=post_encoder_func, ) assert ( encoder.encode( [ date(2023, 9, 22), date(2023, 9, 23), ] ) == data ) assert calls == 1 mashumaro-3.13.1/tests/test_codecs/test_orjson_codec.py000066400000000000000000000021631463331001200233070ustar00rootroot00000000000000from datetime import date from typing import List from mashumaro.codecs.orjson import ( ORJSONDecoder, ORJSONEncoder, json_decode, json_encode, ) from mashumaro.dialect import Dialect class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } def test_json_decode(): assert json_decode('["2023-09-22", "2023-09-23"]', List[date]) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_json_encode(): assert ( json_encode([date(2023, 9, 22), date(2023, 9, 23)], List[date]) == b'["2023-09-22","2023-09-23"]' ) def test_decoder_with_default_dialect(): decoder = ORJSONDecoder(List[date], default_dialect=MyDialect) assert decoder.decode("[738785, 738786]") == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_encoder_with_default_dialect(): encoder = ORJSONEncoder(List[date], default_dialect=MyDialect) assert ( encoder.encode([date(2023, 9, 22), date(2023, 9, 23)]) == b"[738785,738786]" ) mashumaro-3.13.1/tests/test_codecs/test_toml_codec.py000066400000000000000000000026721463331001200227550ustar00rootroot00000000000000from datetime import date from typing import Dict, List import tomli_w from mashumaro.codecs.toml import ( TOMLDecoder, TOMLEncoder, toml_decode, toml_encode, ) from mashumaro.dialect import Dialect class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } def test_toml_decode(): data = tomli_w.dumps({"x": [date(2023, 9, 22), date(2023, 9, 23)]}) assert toml_decode(data, Dict[str, List[date]]) == { "x": [ date(2023, 9, 22), date(2023, 9, 23), ] } def test_toml_encode(): data = tomli_w.dumps({"x": [date(2023, 9, 22), date(2023, 9, 23)]}) assert ( toml_encode( {"x": [date(2023, 9, 22), date(2023, 9, 23)]}, Dict[str, List[date]], ) == data ) def test_decoder_with_default_dialect(): data = tomli_w.dumps({"x": [738785, 738786]}) decoder = TOMLDecoder(Dict[str, List[date]], default_dialect=MyDialect) assert decoder.decode(data) == { "x": [ date(2023, 9, 22), date(2023, 9, 23), ] } def test_encoder_with_default_dialect(): data = tomli_w.dumps({"x": [738785, 738786]}) encoder = TOMLEncoder(Dict[str, List[date]], default_dialect=MyDialect) assert ( encoder.encode({"x": [date(2023, 9, 22), date(2023, 9, 23)]}) == data ) mashumaro-3.13.1/tests/test_codecs/test_yaml_codec.py000066400000000000000000000042131463331001200227350ustar00rootroot00000000000000from datetime import date from typing import List import yaml from mashumaro.codecs.yaml import ( YAMLDecoder, YAMLEncoder, yaml_decode, yaml_encode, ) from mashumaro.dialect import Dialect class MyDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, } def test_yaml_decode(): data = "- '2023-09-22'\n- '2023-09-23'\n" assert yaml_decode(data, List[date]) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_yaml_encode(): data = "- '2023-09-22'\n- '2023-09-23'\n" assert ( yaml_encode([date(2023, 9, 22), date(2023, 9, 23)], List[date]) == data ) def test_decoder_with_default_dialect(): data = "- 738785\n- 738786\n" decoder = YAMLDecoder(List[date], default_dialect=MyDialect) assert decoder.decode(data) == [ date(2023, 9, 22), date(2023, 9, 23), ] def test_encoder_with_default_dialect(): data = "- 738785\n- 738786\n" encoder = YAMLEncoder(List[date], default_dialect=MyDialect) assert encoder.encode([date(2023, 9, 22), date(2023, 9, 23)]) == data def test_pre_decoder_func(): data = "- '2023-09-22'\n- '2023-09-23'\n" calls = 0 def pre_decoder_func(value): nonlocal calls calls += 1 return yaml.load(value, getattr(yaml, "CSafeLoader", yaml.SafeLoader)) decoder = YAMLDecoder( List[date], pre_decoder_func=pre_decoder_func, ) assert decoder.decode(data) == [ date(2023, 9, 22), date(2023, 9, 23), ] assert calls == 1 def test_post_encoder_func(): data = "- '2023-09-22'\n- '2023-09-23'\n" calls = 0 def post_encoder_func(value): nonlocal calls calls += 1 return yaml.dump(value, Dumper=getattr(yaml, "CDumper", yaml.Dumper)) encoder = YAMLEncoder( List[date], post_encoder_func=post_encoder_func, ) assert ( encoder.encode( [ date(2023, 9, 22), date(2023, 9, 23), ] ) == data ) assert calls == 1 mashumaro-3.13.1/tests/test_common.py000066400000000000000000000225071463331001200176350ustar00rootroot00000000000000import dataclasses from dataclasses import dataclass, field from enum import Enum from pathlib import PurePosixPath from typing import Any, Literal, NamedTuple, Optional import msgpack import pytest from typing_extensions import Self from mashumaro.config import BaseConfig from mashumaro.core.const import PY_310_MIN from mashumaro.core.meta.types.common import clean_id from mashumaro.mixins.dict import DataClassDictMixin from mashumaro.mixins.json import DataClassJSONMixin from mashumaro.mixins.msgpack import DataClassMessagePackMixin from mashumaro.mixins.yaml import DataClassYAMLMixin from mashumaro.types import GenericSerializableType, SerializableType @dataclass class EntityA1(DataClassDictMixin): x: int @dataclass class EntityA2(EntityA1): y: int @dataclass class EntityA1Wrapper(DataClassMessagePackMixin): entity: EntityA1 @dataclass class EntityA2Wrapper(DataClassMessagePackMixin): entity: EntityA2 @dataclass class EntityB1(DataClassDictMixin): x: int @dataclass class EntityB2(EntityB1): y: int @dataclass class EntityB1WrapperDict(DataClassDictMixin): entity: EntityB1 @dataclass class EntityB2WrapperMessagePack(DataClassMessagePackMixin): entity: EntityB2 @dataclass class EntityBWrapperMessagePack(DataClassMessagePackMixin): entity1wrapper: EntityB1WrapperDict entity2wrapper: EntityB2WrapperMessagePack if PY_310_MIN: @dataclass(kw_only=True) class DataClassKwOnly1(DataClassDictMixin): x: int y: int @dataclass class DataClassKwOnly2(DataClassDictMixin): x: int = field(kw_only=True) y: int @dataclass(kw_only=True) class DataClassKwOnly3(DataClassDictMixin): x: int y: int = field(kw_only=False) @dataclass class DataClassKwOnly4(DataClassDictMixin): x: int _: dataclasses.KW_ONLY y: int @dataclass(kw_only=True) class LazyDataClassKwOnly1(DataClassDictMixin): x: int y: int class Config(BaseConfig): lazy_compilation = True @dataclass class LazyDataClassKwOnly2(DataClassDictMixin): x: int = field(kw_only=True) y: int class Config(BaseConfig): lazy_compilation = True @dataclass(kw_only=True) class LazyDataClassKwOnly3(DataClassDictMixin): x: int y: int = field(kw_only=False) class Config(BaseConfig): lazy_compilation = True @dataclass class LazyDataClassKwOnly4(DataClassDictMixin): x: int _: dataclasses.KW_ONLY y: int class Config(BaseConfig): lazy_compilation = True @dataclass class BaseClassWithPosArgs(DataClassDictMixin): pos1: int pos2: int pos3: int @dataclass class DerivedClassWithOverriddenMiddlePosArg(BaseClassWithPosArgs): kw1: int = 0 pos2: int @dataclass class DerivedClassWithOverriddenMiddlePosArgWithField(BaseClassWithPosArgs): kw1: int = 0 pos2: int = field(metadata={}) def test_slots(): @dataclass class RegularDataClass: __slots__ = ("number",) number: int @dataclass class DictDataClass(DataClassDictMixin): __slots__ = ("number",) number: int @dataclass class JSONDataClass(DataClassJSONMixin): __slots__ = ("number",) number: int @dataclass class MessagePackDataClass(DataClassMessagePackMixin): __slots__ = ("number",) number: int @dataclass class YAMLDataClass(DataClassYAMLMixin): __slots__ = ("number",) number: int class MySerializableType(SerializableType): __slots__ = ("number",) def __init__(self, number): self.number = number class MyGenericSerializableType(GenericSerializableType): __slots__ = ("number",) def __init__(self, number): self.number = number for cls in ( RegularDataClass, DictDataClass, JSONDataClass, MessagePackDataClass, YAMLDataClass, MySerializableType, MyGenericSerializableType, ): instance = cls(1) with pytest.raises(AttributeError) as e: instance.new_attribute = 2 assert str(e.value).startswith( f"'{cls.__name__}' object has no attribute 'new_attribute'" ) def test_data_class_dict_mixin_from_dict(): assert DataClassDictMixin.from_dict({}) is None def test_data_class_dict_mixin_to_dict(): assert DataClassDictMixin().to_dict() is None def test_compiled_mixin_with_inheritance_1(): entity = EntityA2(x=1, y=2) wrapper = EntityA2Wrapper(entity) data = msgpack.packb({"entity": {"x": 1, "y": 2}}, use_bin_type=True) assert wrapper.to_msgpack() == data assert EntityA2Wrapper.from_msgpack(data) == wrapper def test_compiled_mixin_with_inheritance_2(): entity1w = EntityB1WrapperDict(EntityB1(x=1)) entity2w = EntityB2WrapperMessagePack(EntityB2(x=1, y=2)) wrapper = EntityBWrapperMessagePack(entity1w, entity2w) data = msgpack.packb( { "entity1wrapper": {"entity": {"x": 1}}, "entity2wrapper": {"entity": {"x": 1, "y": 2}}, }, use_bin_type=True, ) assert wrapper.to_msgpack() == data assert EntityBWrapperMessagePack.from_msgpack(data) == wrapper @pytest.mark.skipif(not PY_310_MIN, reason="requires python 3.10+") def test_kw_only_dataclasses(): data = {"x": "1", "y": "2"} for cls in ( DataClassKwOnly1, DataClassKwOnly2, DataClassKwOnly3, DataClassKwOnly4, LazyDataClassKwOnly1, LazyDataClassKwOnly2, LazyDataClassKwOnly3, LazyDataClassKwOnly4, ): obj = cls.from_dict(data) assert obj.x == 1 assert obj.y == 2 def test_kw_args_when_pos_arg_is_overridden_without_field(): obj = DerivedClassWithOverriddenMiddlePosArg(1, 2, 3, 4) loaded = DerivedClassWithOverriddenMiddlePosArg.from_dict( {"pos1": "1", "pos2": "2", "pos3": "3", "kw1": "4"} ) assert loaded == obj assert loaded.pos1 == 1 assert loaded.pos2 == 2 assert loaded.pos3 == 3 assert loaded.kw1 == 4 def test_kw_args_when_pos_arg_is_overridden_with_field(): obj = DerivedClassWithOverriddenMiddlePosArgWithField(1, 2, 3, 4) loaded = DerivedClassWithOverriddenMiddlePosArgWithField.from_dict( {"pos1": "1", "pos2": "2", "pos3": "3", "kw1": "4"} ) assert loaded == obj assert loaded.pos1 == 1 assert loaded.pos2 == 2 assert loaded.pos3 == 3 assert loaded.kw1 == 4 def test_local_types(): @dataclass class LocalDataclassType: foo: int class LocalNamedTupleType(NamedTuple): foo: int class LocalPathLike(PurePosixPath): pass class LocalEnumType(Enum): FOO = "foo" class LocalSerializableType(SerializableType): @classmethod def _deserialize(cls, value): return LocalSerializableType() def _serialize(self) -> Any: return {} def __eq__(self, __value: object) -> bool: return isinstance(__value, LocalSerializableType) class LocalGenericSerializableType(GenericSerializableType): @classmethod def _deserialize(cls, value, types): return LocalGenericSerializableType() def _serialize(self, types) -> Any: return {} def __eq__(self, __value: object) -> bool: return isinstance(__value, LocalGenericSerializableType) class LocalSelfSerializableAnnotatedType( SerializableType, use_annotations=True ): @classmethod def _deserialize(cls, value: Self) -> Self: return value def _serialize(self) -> Self: return self def __eq__(self, __value: object) -> bool: return isinstance(__value, LocalSelfSerializableAnnotatedType) @dataclass class DataClassWithLocalType(DataClassDictMixin): x1: LocalDataclassType x2: LocalNamedTupleType x3: LocalPathLike x4: LocalEnumType x4_1: Literal[LocalEnumType.FOO] x5: LocalSerializableType x6: LocalGenericSerializableType x7: Optional[Self] x8: LocalSelfSerializableAnnotatedType obj = DataClassWithLocalType( x1=LocalDataclassType(foo=0), x2=LocalNamedTupleType(foo=0), x3=LocalPathLike("path/to/file"), x4=LocalEnumType.FOO, x4_1=LocalEnumType.FOO, x5=LocalSerializableType(), x6=LocalGenericSerializableType(), x7=None, x8=LocalSelfSerializableAnnotatedType(), ) assert obj.to_dict() == { "x1": {"foo": 0}, "x2": [0], "x3": "path/to/file", "x4": "foo", "x4_1": "foo", "x5": {}, "x6": {}, "x7": None, "x8": LocalSelfSerializableAnnotatedType(), } assert ( DataClassWithLocalType.from_dict( { "x1": {"foo": 0}, "x2": [0], "x3": "path/to/file", "x4": "foo", "x4_1": "foo", "x5": {}, "x6": {}, "x7": None, "x8": LocalSelfSerializableAnnotatedType(), } ) == obj ) def test_clean_id(): assert clean_id("") == "_" assert clean_id("foo") == "foo" assert clean_id("foo..bar") == "foo__locals__bar" mashumaro-3.13.1/tests/test_config.py000066400000000000000000000311301463331001200176020ustar00rootroot00000000000000from dataclasses import dataclass, field from typing import Optional, Union import pytest from typing_extensions import Literal from mashumaro import DataClassDictMixin from mashumaro.config import TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig from mashumaro.exceptions import ExtraKeysError, InvalidFieldValue from mashumaro.types import Discriminator, SerializationStrategy from .entities import ( MyDataClassWithOptional, MyDataClassWithOptionalAndOmitNoneFlag, MyNamedTuple, MyNamedTupleWithDefaults, MyUntypedNamedTuple, MyUntypedNamedTupleWithDefaults, TypedDictRequiredKeys, ) @dataclass class LazyCompilationDataClass(DataClassDictMixin): x: int class Config(BaseConfig): lazy_compilation = True def test_debug_true_option(mocker): mocked_print = mocker.patch("builtins.print") @dataclass class _(DataClassDictMixin): union: Union[int, str, MyNamedTuple] typed_dict: TypedDictRequiredKeys named_tuple: MyNamedTupleWithDefaults literal: Literal[1, 2, 3] class Config(BaseConfig): debug = True mocked_print.assert_called() def test_config_without_base_config_base(mocker): mocked_print = mocker.patch("builtins.print") @dataclass class _(DataClassDictMixin): x: Union[int, str] class Config: debug = True mocked_print.assert_called() def test_debug_false_option(mocker): mocked_print = mocker.patch("builtins.print") @dataclass class _(DataClassDictMixin): x: Union[int, str] class Config(BaseConfig): debug = False mocked_print.assert_not_called() def test_omit_none_code_generation_flag(): @dataclass class DataClass(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] assert DataClass().to_dict() == {"x": None} assert DataClass().to_dict(omit_none=True) == {} def test_no_omit_none_code_generation_flag(): @dataclass class DataClass(DataClassDictMixin): x: Optional[int] = None assert DataClass().to_dict() == {"x": None} with pytest.raises(TypeError): DataClass().to_dict(omit_none=True) def test_omit_none_flag_for_inner_class_without_it(): @dataclass class DataClass(DataClassDictMixin): x: Optional[MyDataClassWithOptional] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] assert DataClass().to_dict() == {"x": None} assert DataClass().to_dict(omit_none=True) == {} empty_x = MyDataClassWithOptional() assert DataClass(empty_x).to_dict() == {"x": {"a": None, "b": None}} assert DataClass(empty_x).to_dict(omit_none=True) == { "x": {"a": None, "b": None} } def test_omit_none_flag_for_inner_class_with_it(): @dataclass class DataClass(DataClassDictMixin): x: Optional[MyDataClassWithOptionalAndOmitNoneFlag] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] assert DataClass().to_dict() == {"x": None} assert DataClass().to_dict(omit_none=True) == {} empty_x = MyDataClassWithOptionalAndOmitNoneFlag() assert DataClass(empty_x).to_dict() == {"x": {"a": None, "b": None}} assert DataClass(empty_x).to_dict(omit_none=True) == {"x": {}} def test_passing_omit_none_into_union(): @dataclass class DataClass(DataClassDictMixin): a: Optional[int] = None b: Optional[Union[int, MyDataClassWithOptionalAndOmitNoneFlag]] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] instance = DataClass(b=MyDataClassWithOptionalAndOmitNoneFlag(a=1)) assert instance.to_dict(omit_none=True) == {"b": {"a": 1}} def test_serialization_strategy(): class TestSerializationStrategy(SerializationStrategy): def serialize(self, value): return [value] def deserialize(self, value): return value[0] @dataclass class DataClass(DataClassDictMixin): a: int b: str c: int class Config(BaseConfig): serialization_strategy = { int: TestSerializationStrategy(), str: { "serialize": lambda v: [v], "deserialize": lambda v: v[0], }, } obj = DataClass(a=123, b="abc", c=123) assert DataClass.from_dict({"a": [123], "b": ["abc"], "c": [123]}) == obj assert obj.to_dict() == {"a": [123], "b": ["abc"], "c": [123]} def test_named_tuple_as_dict(): @dataclass class DataClass(DataClassDictMixin): mnp: MyNamedTuple mnpwd: MyNamedTupleWithDefaults munp: MyUntypedNamedTuple class Config(BaseConfig): namedtuple_as_dict = True obj = DataClass( mnp=MyNamedTuple(i=1, f=2.0), mnpwd=MyNamedTupleWithDefaults(i=1, f=2.0), munp=MyUntypedNamedTuple(i=1, f=2.0), ) obj_dict = { "mnp": {"i": 1, "f": 2.0}, "mnpwd": {"i": 1, "f": 2.0}, "munp": {"i": 1, "f": 2.0}, } assert obj.to_dict() == obj_dict assert DataClass.from_dict(obj_dict) == obj def test_untyped_named_tuple_with_defaults_as_dict(): @dataclass class DataClass(DataClassDictMixin): munpwd: MyUntypedNamedTupleWithDefaults class Config(BaseConfig): namedtuple_as_dict = True obj = DataClass(munpwd=MyUntypedNamedTupleWithDefaults(i=1, f=2.0)) assert obj.to_dict() == {"munpwd": {"i": 1, "f": 2.0}} assert DataClass.from_dict({"munpwd": {"i": 1, "f": 2.0}}) == obj def test_named_tuple_as_dict_and_as_list_engine(): @dataclass class DataClass(DataClassDictMixin): as_dict: MyNamedTuple as_list: MyNamedTuple = field( metadata={"serialize": "as_list", "deserialize": "as_list"} ) class Config(BaseConfig): namedtuple_as_dict = True obj = DataClass( as_dict=MyNamedTuple(i=1, f=2.0), as_list=MyNamedTuple(i=1, f=2.0), ) obj_dict = { "as_dict": {"i": 1, "f": 2.0}, "as_list": [1, 2.0], } assert obj.to_dict() == obj_dict assert DataClass.from_dict(obj_dict) == obj def test_omit_none_code_generation_flag_with_omit_none_by_default(): @dataclass class DataClass(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): code_generation_options = [TO_DICT_ADD_OMIT_NONE_FLAG] omit_none = True assert DataClass().to_dict() == {} assert DataClass().to_dict(omit_none=True) == {} assert DataClass().to_dict(omit_none=False) == {"x": None} def test_lazy_compilation(): obj = LazyCompilationDataClass(42) assert LazyCompilationDataClass.from_dict({"x": "42"}) == obj assert obj.to_dict() == {"x": 42} def test_sort_keys_with_mixin(): @dataclass class SortedDataClass(DataClassDictMixin): foo: int bar: int class Config(BaseConfig): sort_keys = True @dataclass class UnSortedDataClass(DataClassDictMixin): foo: int bar: int class Config(BaseConfig): sort_keys = False t = SortedDataClass(1, 2) assert str(t.to_dict()) == "{'bar': 2, 'foo': 1}" t = UnSortedDataClass(1, 2) assert str(t.to_dict()) == "{'foo': 1, 'bar': 2}" @dataclass class SortedDataClass: foo: int bar: int class Config(BaseConfig): sort_keys = True @dataclass class UnSortedDataClass: foo: int bar: int class Config(BaseConfig): sort_keys = False def test_sort_keys_plain_dataclass(): @dataclass class RootSortedModel(DataClassDictMixin): unsorted_sub: UnSortedDataClass sorted_sub: SortedDataClass class Config(BaseConfig): sort_keys = True @dataclass class RootUnSortedModel(DataClassDictMixin): unsorted_sub: UnSortedDataClass sorted_sub: SortedDataClass class Config(BaseConfig): sort_keys = False t = RootSortedModel( unsorted_sub=UnSortedDataClass(1, 2), sorted_sub=SortedDataClass(1, 2), ) assert str(t.to_dict()) == ( "{'sorted_sub': {'bar': 2, 'foo': 1}, " "'unsorted_sub': {'foo': 1, 'bar': 2}}" ) t = RootUnSortedModel( unsorted_sub=UnSortedDataClass(1, 2), sorted_sub=SortedDataClass(1, 2), ) assert str(t.to_dict()) == ( "{'unsorted_sub': {'foo': 1, 'bar': 2}, " "'sorted_sub': {'bar': 2, 'foo': 1}}" ) def test_forbid_extra_keys(): @dataclass class ForbidKeysModel(DataClassDictMixin): foo: int class Config(BaseConfig): forbid_extra_keys = True # Test unpacking works assert ForbidKeysModel.from_dict({"foo": 1}) == ForbidKeysModel(1) # Test extra keys are forbidden with pytest.raises(ExtraKeysError) as exc_info: ForbidKeysModel.from_dict({"foo": 1, "bar": 2, "baz": 3}) assert exc_info.value.extra_keys == {"bar", "baz"} assert exc_info.value.target_type == ForbidKeysModel # Now with alias, but not allow_deserialization_not_by_alias @dataclass class ForbidKeysModel(DataClassDictMixin): foo: int = field(metadata={"alias": "f"}) bar: int class Config(BaseConfig): forbid_extra_keys = True aliases = {"bar": "b"} # Test unpacking works assert ForbidKeysModel.from_dict({"f": 1, "b": 2}) == ForbidKeysModel(1, 2) # Test extra keys are forbidden with pytest.raises(ExtraKeysError) as exc_info: ForbidKeysModel.from_dict({"foo": 1, "bar": 2}) assert exc_info.value.extra_keys == {"foo", "bar"} assert exc_info.value.target_type == ForbidKeysModel # Now with alias, but allow_deserialization_not_by_alias @dataclass class ForbidKeysModel(DataClassDictMixin): foo: int = field(metadata={"alias": "f"}) bar: int class Config(BaseConfig): forbid_extra_keys = True aliases = {"bar": "b"} allow_deserialization_not_by_alias = True # Test unpacking works assert ForbidKeysModel.from_dict({"f": 1, "bar": 2}) == ForbidKeysModel( 1, 2 ) # Test extra keys are forbidden with pytest.raises(ExtraKeysError) as exc_info: ForbidKeysModel.from_dict({"foo": 1, "b": 2, "baz": 3}) assert exc_info.value.extra_keys == {"baz"} assert exc_info.value.target_type == ForbidKeysModel @dataclass class _VariantByBase(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( field="__type", include_subtypes=True, variant_tagger_fn=lambda clz: clz.__name__, ) forbid_extra_keys = True @dataclass class _VariantByField1(_VariantByBase): x: Optional[str] = None @dataclass class _VariantByField2(_VariantByBase): x: Optional[str] = None @dataclass class _VariantByField3(_VariantByBase): x: Optional[str] = None class Config(BaseConfig): # add intermediate config to check if we're looking for the # discriminator in the base class _VariantByBase to add the # discriminator field to the list of allowed keys for _VariantByField4 # deserializable via _VariantByBase.from_dict omit_none = True @dataclass class _VariantByField4(_VariantByField3): class Config(BaseConfig): forbid_extra_keys = True @dataclass class ForbidKeysModelWithDiscriminator(DataClassDictMixin): inner: _VariantByBase class Config(BaseConfig): forbid_extra_keys = True def test_forbid_extra_keys_with_discriminator(): # Test unpacking works assert ForbidKeysModelWithDiscriminator.from_dict( {"inner": {"x": "foo", "__type": "_VariantByField2"}} ) == ForbidKeysModelWithDiscriminator(_VariantByField2(x="foo")) # Test extra keys are forbidden with pytest.raises(InvalidFieldValue) as exc_info: ForbidKeysModelWithDiscriminator.from_dict( {"inner": {"x": "foo", "__type": "_VariantByField2", "bar": "baz"}} ) root_exc = exc_info.value.__context__ assert isinstance(root_exc, ExtraKeysError) assert root_exc.extra_keys == {"bar"} assert root_exc.target_type == _VariantByField2 def test_forbid_extra_keys_with_discriminator_for_subclass(): assert _VariantByBase.from_dict( {"x": "foo", "__type": "_VariantByField4"} ) == _VariantByField4("foo") with pytest.raises(ExtraKeysError) as exc_info: _VariantByBase.from_dict( {"x": "foo", "__type": "_VariantByField4", "y": "bar"} ) assert exc_info.value.extra_keys == {"y"} mashumaro-3.13.1/tests/test_data_types.py000066400000000000000000001342121463331001200204770ustar00rootroot00000000000000import collections import collections.abc import decimal import fractions import ipaddress import os import uuid from dataclasses import InitVar, dataclass, field from datetime import date, datetime, time, timedelta, timezone from pathlib import ( Path, PosixPath, PurePath, PurePosixPath, PureWindowsPath, WindowsPath, ) from queue import Queue from types import MappingProxyType from typing import ( Any, AnyStr, ChainMap, ClassVar, Counter, DefaultDict, Deque, Dict, FrozenSet, Hashable, List, Mapping, MutableMapping, MutableSet, NewType, Optional, OrderedDict, Sequence, Set, Tuple, ) import pytest from typing_extensions import Final, LiteralString from mashumaro import DataClassDictMixin from mashumaro.codecs import BasicDecoder, BasicEncoder from mashumaro.codecs.basic import decode, encode from mashumaro.config import BaseConfig from mashumaro.core.const import PEP_585_COMPATIBLE, PY_39_MIN from mashumaro.exceptions import ( InvalidFieldValue, MissingField, UnserializableDataError, UnserializableField, ) from mashumaro.types import ( GenericSerializableType, RoundedDecimal, SerializableType, SerializationStrategy, ) from tests.entities import MyUntypedNamedTupleWithDefaults, TDefaultInt from .conftest import add_unpack_method from .entities import ( CustomPath, DataClassWithoutMixin, GenericNamedTuple, GenericSerializableList, GenericSerializableTypeDataClass, GenericTypedDict, MutableString, MyDataClass, MyDataClassWithUnion, MyDatetimeNewType, MyEnum, MyFlag, MyIntEnum, MyIntFlag, MyNamedTuple, MyNamedTupleWithDefaults, MyNamedTupleWithOptional, MyNativeStrEnum, MyStrEnum, MyUntypedNamedTuple, SerializableTypeDataClass, T, T_Optional_int, TAny, TInt, TIntStr, TMyDataClass, TypedDictOptionalKeys, TypedDictOptionalKeysWithOptional, TypedDictRequiredAndOptionalKeys, TypedDictRequiredKeys, TypedDictRequiredKeysWithOptional, ) from .utils import same_types if PY_39_MIN: from zoneinfo import ZoneInfo NoneType = type(None) class Fixture: T = 123 T_INT = 123 T_INT_STR = 123 ANY = 123 INT = 123 FLOAT = 1.23 BOOL = True LIST = [1, 2, 3] TUPLE = (1,) TUPLE_DUMPED = [1] DEQUE = collections.deque([1, 2, 3]) SET = {1, 2, 3} FROZEN_SET = frozenset([1, 2, 3]) CHAIN_MAP = collections.ChainMap({"a": 1, "b": 2}, {"c": 3, "d": 4}) MAPS_LIST = [{"a": 1, "b": 2}, {"c": 3, "d": 4}] DICT = {"a": 1, "b": 2} ORDERED_DICT = collections.OrderedDict(a=1, b=2) DEFAULT_DICT = collections.defaultdict(int, a=1, b=2) MAPPING_PROXY = MappingProxyType(DICT) DEFAULT_NONE_DICT = collections.defaultdict(None, a=1, b=2) COUNTER: Counter[str] = collections.Counter(a=1, b=2) BYTES = b"123" BYTES_BASE64 = "MTIz\n" BYTE_ARRAY = bytearray(b"123") STR = "123" ENUM = MyEnum.a INT_ENUM = MyIntEnum.a STR_ENUM = MyStrEnum.a STR_ENUM_NATIVE = MyNativeStrEnum.a FLAG = MyFlag.a INT_FLAG = MyIntFlag.a DATA_CLASS = MyDataClass(a=1, b=2) T_DATA_CLASS = MyDataClass(a=1, b=2) DATA_CLASS_WITH_UNION = MyDataClassWithUnion(a=1, b=2) NONE = None DATETIME = datetime(2018, 10, 29, 12, 46, 55, 308495) DATETIME_STR = "2018-10-29T12:46:55.308495" DATE = DATETIME.date() DATE_STR = "2018-10-29" TIME = DATETIME.time() TIME_STR = "12:46:55.308495" TIMEDELTA = timedelta(3.14159265358979323846) TIMEZONE = timezone(timedelta(hours=3)) UUID = uuid.UUID("3c25dd74-f208-46a2-9606-dd3919e975b7") UUID_STR = "3c25dd74-f208-46a2-9606-dd3919e975b7" IP4ADDRESS_STR = "127.0.0.1" IP4ADDRESS = ipaddress.IPv4Address(IP4ADDRESS_STR) IP6ADDRESS_STR = "::1" IP6ADDRESS = ipaddress.IPv6Address(IP6ADDRESS_STR) IP4NETWORK_STR = "127.0.0.0/8" IP4NETWORK = ipaddress.IPv4Network(IP4NETWORK_STR) IP6NETWORK_STR = "::/128" IP6NETWORK = ipaddress.IPv6Network(IP6NETWORK_STR) IP4INTERFACE_STR = "192.168.1.1/24" IP4INTERFACE = ipaddress.IPv4Interface(IP4INTERFACE_STR) IP6INTERFACE_STR = "::1/128" IP6INTERFACE = ipaddress.IPv6Interface(IP6INTERFACE_STR) DECIMAL = decimal.Decimal("1.33") DECIMAL_STR = "1.33" FRACTION = fractions.Fraction("1/3") FRACTION_STR = "1/3" MUTABLE_STRING = MutableString(STR) MUTABLE_STRING_STR = STR CUSTOM_PATH = CustomPath("/a/b/c") CUSTOM_PATH_STR = "/a/b/c" CUSTOM_SERIALIZE = "_FOOBAR_" GENERIC_SERIALIZABLE_LIST_INT = GenericSerializableList([1, 2, 3]) GENERIC_SERIALIZABLE_LIST_STR = GenericSerializableList(["a", "b", "c"]) LITERAL_STRING = "foo" inner_values = [ (T, Fixture.T, Fixture.T), (TInt, Fixture.T_INT, Fixture.T_INT), (TIntStr, Fixture.T_INT_STR, Fixture.T_INT_STR), (TDefaultInt, Fixture.T_INT, Fixture.T_INT), (Any, Fixture.ANY, Fixture.ANY), (TAny, Fixture.ANY, Fixture.ANY), (int, Fixture.INT, Fixture.INT), (float, Fixture.FLOAT, Fixture.FLOAT), (bool, Fixture.BOOL, Fixture.BOOL), (List[int], Fixture.LIST, Fixture.LIST), (List, Fixture.LIST, Fixture.LIST), (Deque[int], Fixture.DEQUE, Fixture.LIST), (Deque, Fixture.DEQUE, Fixture.LIST), (Tuple[int], Fixture.TUPLE, Fixture.TUPLE_DUMPED), (Tuple, Fixture.TUPLE, Fixture.TUPLE_DUMPED), (Set[int], Fixture.SET, Fixture.LIST), (Set, Fixture.SET, Fixture.LIST), (FrozenSet[int], Fixture.FROZEN_SET, Fixture.LIST), (FrozenSet, Fixture.FROZEN_SET, Fixture.LIST), (ChainMap[str, int], Fixture.CHAIN_MAP, Fixture.MAPS_LIST), (ChainMap, Fixture.CHAIN_MAP, Fixture.MAPS_LIST), (Dict[str, int], Fixture.DICT, Fixture.DICT), (Dict, Fixture.DICT, Fixture.DICT), (Mapping[str, int], Fixture.DICT, Fixture.DICT), (Mapping, Fixture.DICT, Fixture.DICT), (OrderedDict[str, int], Fixture.ORDERED_DICT, Fixture.DICT), (OrderedDict, Fixture.ORDERED_DICT, Fixture.DICT), (DefaultDict[str, int], Fixture.DEFAULT_DICT, Fixture.DICT), (DefaultDict, Fixture.DEFAULT_NONE_DICT, Fixture.DICT), (Counter[str], Fixture.COUNTER, Fixture.DICT), (Counter, Fixture.COUNTER, Fixture.DICT), (MutableMapping[str, int], Fixture.DICT, Fixture.DICT), (MutableMapping, Fixture.DICT, Fixture.DICT), (Sequence[int], Fixture.LIST, Fixture.LIST), (Sequence, Fixture.LIST, Fixture.LIST), (bytes, Fixture.BYTES, Fixture.BYTES_BASE64), (bytearray, Fixture.BYTE_ARRAY, Fixture.BYTES_BASE64), (str, Fixture.STR, Fixture.STR), (MyEnum, Fixture.ENUM, Fixture.ENUM.value), (MyStrEnum, Fixture.STR_ENUM, Fixture.STR_ENUM.value), (MyNativeStrEnum, Fixture.STR_ENUM_NATIVE, Fixture.STR_ENUM_NATIVE.value), (MyIntEnum, Fixture.INT_ENUM, Fixture.INT_ENUM.value), (MyFlag, Fixture.FLAG, Fixture.FLAG.value), (MyIntFlag, Fixture.INT_FLAG, Fixture.INT_FLAG.value), (MyDataClass, Fixture.DATA_CLASS, Fixture.DICT), (TMyDataClass, Fixture.T_DATA_CLASS, Fixture.DICT), (MyDataClassWithUnion, Fixture.DATA_CLASS_WITH_UNION, Fixture.DICT), (NoneType, Fixture.NONE, Fixture.NONE), (datetime, Fixture.DATETIME, Fixture.DATETIME_STR), (date, Fixture.DATE, Fixture.DATE_STR), (time, Fixture.TIME, Fixture.TIME_STR), (timedelta, Fixture.TIMEDELTA, Fixture.TIMEDELTA.total_seconds()), (timezone, Fixture.TIMEZONE, "UTC+03:00"), (uuid.UUID, Fixture.UUID, Fixture.UUID_STR), (ipaddress.IPv4Address, Fixture.IP4ADDRESS, Fixture.IP4ADDRESS_STR), (ipaddress.IPv6Address, Fixture.IP6ADDRESS, Fixture.IP6ADDRESS_STR), (ipaddress.IPv4Network, Fixture.IP4NETWORK, Fixture.IP4NETWORK_STR), (ipaddress.IPv6Network, Fixture.IP6NETWORK, Fixture.IP6NETWORK_STR), (ipaddress.IPv4Interface, Fixture.IP4INTERFACE, Fixture.IP4INTERFACE_STR), (ipaddress.IPv6Interface, Fixture.IP6INTERFACE, Fixture.IP6INTERFACE_STR), (decimal.Decimal, Fixture.DECIMAL, Fixture.DECIMAL_STR), (fractions.Fraction, Fixture.FRACTION, Fixture.FRACTION_STR), (MutableString, Fixture.MUTABLE_STRING, Fixture.MUTABLE_STRING_STR), (CustomPath, Fixture.CUSTOM_PATH, Fixture.CUSTOM_PATH_STR), ( GenericSerializableList[int], Fixture.GENERIC_SERIALIZABLE_LIST_INT, [3, 4, 5], ), ( GenericSerializableList[str], Fixture.GENERIC_SERIALIZABLE_LIST_STR, ["_a", "_b", "_c"], ), (MyDatetimeNewType, Fixture.DATETIME, Fixture.DATETIME_STR), (LiteralString, Fixture.LITERAL_STRING, Fixture.LITERAL_STRING), ] if os.name == "posix": inner_values.extend( [ (Path, Path("/a/b/c"), "/a/b/c"), (PurePath, PurePath("/a/b/c"), "/a/b/c"), (PosixPath, PosixPath("/a/b/c"), "/a/b/c"), (PurePosixPath, PurePosixPath("/a/b/c"), "/a/b/c"), (os.PathLike, PurePosixPath("/a/b/c"), "/a/b/c"), ] ) else: inner_values.extend( [ (Path, Path("/a/b/c"), "\\a\\b\\c"), (PurePath, PurePath("/a/b/c"), "\\a\\b\\c"), (WindowsPath, WindowsPath("C:/Windows"), "C:\\Windows"), ( PureWindowsPath, PureWindowsPath("C:/Program Files"), "C:\\Program Files", ), (os.PathLike, PureWindowsPath("/a/b/c"), "\\a\\b\\c"), ] ) if PEP_585_COMPATIBLE: inner_values.extend( [ (list[int], Fixture.LIST, Fixture.LIST), (list, Fixture.LIST, Fixture.LIST), (collections.deque[int], Fixture.DEQUE, Fixture.LIST), (collections.deque, Fixture.DEQUE, Fixture.LIST), (tuple[int], Fixture.TUPLE, Fixture.TUPLE_DUMPED), (tuple, Fixture.TUPLE, Fixture.TUPLE_DUMPED), (set[int], Fixture.SET, Fixture.LIST), (set, Fixture.SET, Fixture.LIST), (frozenset[int], Fixture.FROZEN_SET, Fixture.LIST), (frozenset, Fixture.FROZEN_SET, Fixture.LIST), (collections.abc.Set[int], Fixture.SET, Fixture.LIST), (collections.abc.Set, Fixture.SET, Fixture.LIST), (collections.abc.MutableSet[int], Fixture.SET, Fixture.LIST), (collections.abc.MutableSet, Fixture.SET, Fixture.LIST), ( collections.ChainMap[str, int], Fixture.CHAIN_MAP, Fixture.MAPS_LIST, ), (collections.ChainMap, Fixture.CHAIN_MAP, Fixture.MAPS_LIST), (dict[str, int], Fixture.DICT, Fixture.DICT), (dict, Fixture.DICT, Fixture.DICT), (collections.abc.Mapping[str, int], Fixture.DICT, Fixture.DICT), (collections.abc.Mapping, Fixture.DICT, Fixture.DICT), ( collections.OrderedDict[str, int], Fixture.ORDERED_DICT, Fixture.DICT, ), (collections.OrderedDict, Fixture.ORDERED_DICT, Fixture.DICT), ( collections.defaultdict[str, int], Fixture.DEFAULT_DICT, Fixture.DICT, ), (collections.defaultdict, Fixture.DEFAULT_NONE_DICT, Fixture.DICT), (collections.Counter[str], Fixture.COUNTER, Fixture.DICT), (collections.Counter, Fixture.COUNTER, Fixture.DICT), ( collections.abc.MutableMapping[str, int], Fixture.DICT, Fixture.DICT, ), (collections.abc.MutableMapping, Fixture.DICT, Fixture.DICT), (collections.abc.Sequence[int], Fixture.LIST, Fixture.LIST), (collections.abc.Sequence, Fixture.LIST, Fixture.LIST), (collections.abc.MutableSequence[int], Fixture.LIST, Fixture.LIST), (collections.abc.MutableSequence, Fixture.LIST, Fixture.LIST), ] ) if PY_39_MIN: inner_values.extend( ( (ZoneInfo, ZoneInfo("Europe/Moscow"), "Europe/Moscow"), (MappingProxyType[str, int], Fixture.MAPPING_PROXY, Fixture.DICT), (MappingProxyType, Fixture.MAPPING_PROXY, Fixture.DICT), ) ) hashable_inner_values = [ (type_, value, value_dumped) for type_, value, value_dumped in inner_values if isinstance(value, Hashable) and isinstance(value_dumped, Hashable) ] generic_sequence_types = [List, Deque, Tuple, Set, FrozenSet] generic_mapping_types = [ Dict, Mapping, OrderedDict, DefaultDict, MutableMapping, ] if PEP_585_COMPATIBLE: generic_sequence_types.extend( [ list, collections.deque, tuple, set, frozenset, collections.abc.Set, collections.abc.MutableSet, collections.Counter, collections.abc.Sequence, collections.abc.MutableSequence, ] ) generic_mapping_types.extend( [ collections.ChainMap, dict, collections.abc.Mapping, collections.OrderedDict, collections.defaultdict, collections.abc.MutableMapping, ] ) unsupported_field_types = [Queue] if not PEP_585_COMPATIBLE: unsupported_field_types.extend( [ list, collections.deque, tuple, set, frozenset, collections.ChainMap, dict, collections.OrderedDict, collections.defaultdict, collections.Counter, ] ) unsupported_typing_primitives = [AnyStr] x_factory_mapping = { List: list, Deque: collections.deque, Tuple: tuple, Set: set, FrozenSet: frozenset, MutableSet: set, Dict: lambda items: {k: v for k, v in items}, Mapping: lambda items: {k: v for k, v in items}, MutableMapping: lambda items: {k: v for k, v in items}, OrderedDict: lambda items: {k: v for k, v in items}, DefaultDict: lambda items: {k: v for k, v in items}, Counter: lambda items: collections.Counter({k: v for k, v in items}), ChainMap: lambda items: collections.ChainMap(*({k: v} for k, v in items)), } if PEP_585_COMPATIBLE: x_factory_mapping.update( { list: list, collections.deque: collections.deque, tuple: tuple, set: set, frozenset: frozenset, collections.abc.MutableSet: set, dict: lambda items: {k: v for k, v in items}, collections.abc.Mapping: lambda items: {k: v for k, v in items}, collections.abc.MutableMapping: lambda items: { k: v for k, v in items }, collections.OrderedDict: lambda items: {k: v for k, v in items}, collections.defaultdict: lambda items: {k: v for k, v in items}, collections.Counter: lambda items: collections.Counter( {k: v for k, v in items} ), collections.ChainMap: lambda items: collections.ChainMap( *({k: v} for k, v in items) ), } ) # noinspection PyCallingNonCallable def check_collection_generic(type_, value_info, x_values_number=3): x_type, x_value, x_value_dumped = value_info @dataclass class DataClass(DataClassDictMixin): x: type_[x_type] x_factory = x_factory_mapping[type_] x = x_factory([x_value for _ in range(x_values_number)]) instance = DataClass(x) dumped = { "x": list(x_factory([x_value_dumped for _ in range(x_values_number)])) } instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, x) # noinspection PyCallingNonCallable def check_mapping_generic(type_, key_info, value_info): k_type, k_value, k_value_dumped = key_info v_type, v_value, v_value_dumped = value_info if type_ is Counter: x_type = type_[k_type] else: x_type = type_[k_type, v_type] @dataclass class DataClass(DataClassDictMixin): x: x_type x_factory = x_factory_mapping[type_] if type_ is Counter: x = x_factory([(k_value, 1) for _ in range(3)]) else: x = x_factory([(k_value, v_value) for _ in range(3)]) instance = DataClass(x) k_dumped = k_value_dumped v_dumped = v_value_dumped if type_ is ChainMap: dumped = { "x": x_factory([(k_dumped, v_dumped) for _ in range(3)]).maps } elif type_ is Counter: dumped = {"x": x_factory([(k_dumped, 1) for _ in range(3)])} else: dumped = {"x": x_factory([(k_dumped, v_dumped) for _ in range(3)])} instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, x) @pytest.mark.parametrize("value_info", inner_values) def test_one_level(value_info): x_type, x_value, x_value_dumped = value_info @dataclass class DataClass(DataClassDictMixin): x: x_type instance = DataClass(x_value) dumped = {"x": x_value_dumped} instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, x_value) @pytest.mark.parametrize("value_info", inner_values) def test_with_generic_list(value_info): check_collection_generic(List, value_info) @pytest.mark.parametrize("value_info", inner_values) def test_with_generic_deque(value_info): check_collection_generic(Deque, value_info) @pytest.mark.parametrize("value_info", inner_values) def test_with_generic_tuple(value_info): check_collection_generic(Tuple, value_info, 1) @pytest.mark.parametrize("value_info", hashable_inner_values) def test_with_generic_set(value_info): check_collection_generic(Set, value_info) @pytest.mark.parametrize("value_info", hashable_inner_values) def test_with_generic_frozenset(value_info): check_collection_generic(FrozenSet, value_info) @pytest.mark.parametrize("value_info", hashable_inner_values) def test_with_generic_mutable_set(value_info): check_collection_generic(MutableSet, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_dict(key_info, value_info): check_mapping_generic(Dict, key_info, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_mapping(key_info, value_info): check_mapping_generic(Mapping, key_info, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_ordered_dict(key_info, value_info): check_mapping_generic(OrderedDict, key_info, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_counter(key_info, value_info): check_mapping_generic(Counter, key_info, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_mutable_mapping(key_info, value_info): check_mapping_generic(MutableMapping, key_info, value_info) @pytest.mark.parametrize("value_info", inner_values) @pytest.mark.parametrize("key_info", hashable_inner_values) def test_with_generic_chain_map(key_info, value_info): check_mapping_generic(ChainMap, key_info, value_info) @pytest.mark.parametrize("x_type", unsupported_field_types) @pytest.mark.parametrize("generic_type", generic_sequence_types) def test_unsupported_generic_field_types(x_type, generic_type): with pytest.raises(UnserializableField): @dataclass class _(DataClassDictMixin): # noinspection PyTypeChecker x: generic_type[x_type] with add_unpack_method: with pytest.raises(UnserializableField): @dataclass class _(DataClassDictMixin): # noinspection PyTypeChecker x: generic_type[x_type] @pytest.mark.parametrize("x_type", unsupported_typing_primitives) @pytest.mark.parametrize("generic_type", generic_sequence_types) def test_unsupported_generic_typing_primitives(x_type, generic_type): with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): # noinspection PyTypeChecker x: generic_type[x_type] with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): # noinspection PyTypeChecker x: generic_type[x_type] @pytest.mark.parametrize("x_type", unsupported_field_types) def test_unsupported_field_types(x_type): with pytest.raises(UnserializableField): @dataclass class _(DataClassDictMixin): x: x_type with add_unpack_method: with pytest.raises(UnserializableField): @dataclass class _(DataClassDictMixin): x: x_type @pytest.mark.parametrize("x_type", unsupported_typing_primitives) def test_unsupported_typing_primitives(x_type): with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: x_type with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: x_type @pytest.mark.parametrize("generic_type", generic_mapping_types) def test_data_class_as_mapping_key(generic_type): @dataclass class Key(DataClassDictMixin): pass with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: generic_type[Key, int] with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: generic_type[Key, int] def test_data_class_as_mapping_key_for_counter(): @dataclass class Key(DataClassDictMixin): pass with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: Counter[Key] with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: Counter[Key] def test_data_class_as_chain_map_key(): @dataclass class Key(DataClassDictMixin): pass with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: ChainMap[Key, int] with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: ChainMap[Key, int] @pytest.mark.parametrize("value_info", inner_values) def test_with_any(value_info): @dataclass class DataClass(DataClassDictMixin): x: Any x = value_info[1] dumped = {"x": x} instance = DataClass(x) instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, x) @pytest.mark.parametrize("value_info", inner_values) def test_with_optional(value_info): x_type, x_value, x_value_dumped = value_info @dataclass class DataClass(DataClassDictMixin): x: Optional[x_type] = None for instance in [DataClass(x_value), DataClass()]: if instance.x is None: v_dumped = None else: v_dumped = x_value_dumped dumped = {"x": v_dumped} instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, instance.x) def test_raises_missing_field(): @dataclass class DataClass(DataClassDictMixin): x: int with pytest.raises(MissingField): DataClass.from_dict({}) def test_empty_dataclass(): @dataclass class DataClass(DataClassDictMixin): pass assert DataClass().to_dict() == {} assert type(DataClass.from_dict({})) is DataClass assert DataClass.from_dict({}).__dict__ == {} def test_weird_field_type(): with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: 123 with add_unpack_method: with pytest.raises(UnserializableDataError): @dataclass class _(DataClassDictMixin): x: 123 @pytest.mark.parametrize( "rounding", [None, decimal.ROUND_UP, decimal.ROUND_DOWN] ) @pytest.mark.parametrize("places", [None, 1, 2]) def test_rounded_decimal(places, rounding): @dataclass class DataClass(DataClassDictMixin): x: decimal.Decimal class Config(BaseConfig): serialization_strategy = { decimal.Decimal: RoundedDecimal(places, rounding) } digit = decimal.Decimal(0.35) if places is not None: exp = decimal.Decimal((0, (1,), -places)) quantized = digit.quantize(exp, rounding) else: quantized = digit assert DataClass(digit).to_dict() == {"x": str(quantized)} assert DataClass.from_dict({"x": str(quantized)}) == DataClass(x=quantized) def test_serializable_type(): with pytest.raises(NotImplementedError): # noinspection PyTypeChecker SerializableType._serialize(None) with pytest.raises(NotImplementedError): SerializableType._deserialize(None) def test_serialization_strategy(): with pytest.raises(NotImplementedError): # noinspection PyTypeChecker SerializationStrategy.serialize(None, None) with pytest.raises(NotImplementedError): # noinspection PyTypeChecker SerializationStrategy.deserialize(None, None) def test_class_vars(): @dataclass class DataClass(DataClassDictMixin): x: ClassVar[int] = None assert DataClass().to_dict() == {} assert DataClass.from_dict({}) == DataClass() def test_final(): @dataclass class DataClass(DataClassDictMixin): x: Final[int] = 42 assert DataClass().to_dict() == {"x": 42} assert DataClass(42).to_dict() == {"x": 42} assert DataClass(33).to_dict() == {"x": 33} assert DataClass.from_dict({}) == DataClass() assert DataClass.from_dict({"x": 42}) == DataClass() assert DataClass.from_dict({"x": 33}) == DataClass(33) def test_init_vars(): @dataclass class DataClass(DataClassDictMixin): x: InitVar[int] = None y: int = None def __post_init__(self, x: int): if self.y is None and x is not None: self.y = x assert DataClass().to_dict() == {"y": None} assert DataClass(x=1).to_dict() == {"y": 1} assert DataClass.from_dict({}) == DataClass() assert DataClass.from_dict({"x": 1}) == DataClass() def test_dataclass_with_defaults(): @dataclass class DataClass(DataClassDictMixin): x: int y: int = 1 assert DataClass.from_dict({"x": 0}) == DataClass(x=0, y=1) def test_dataclass_with_field_and_default(): @dataclass class DataClass(DataClassDictMixin): x: int = field(default=1) assert DataClass.from_dict({}) == DataClass(x=1) assert DataClass().to_dict() == {"x": 1} def test_dataclass_with_field_and_no_default(): @dataclass class DataClass(DataClassDictMixin): x: int = field(metadata={}) with pytest.raises(MissingField): assert DataClass.from_dict({}) def test_dataclass_with_field_and_default_factory(): @dataclass class DataClass(DataClassDictMixin): x: List[str] = field(default_factory=list) assert DataClass.from_dict({}) == DataClass(x=[]) assert DataClass().to_dict() == {"x": []} def test_derived_dataclass_with_ancestors_defaults(): @dataclass class A: x: int y: int = 1 @dataclass class B(A, DataClassDictMixin): z: int = 3 @dataclass class C(B, DataClassDictMixin): y: int = 4 @dataclass class D(C): pass assert B.from_dict({"x": 0}) == B(x=0, y=1, z=3) assert C.from_dict({"x": 0}) == C(x=0, y=4, z=3) assert D.from_dict({"x": 0}) == D(x=0, y=4, z=3) def test_derived_dataclass_with_ancestors_and_field_with_default(): @dataclass class A: x: int = field(default=1) @dataclass class B(A, DataClassDictMixin): y: int = field(default=2) @dataclass class C(B, DataClassDictMixin): z: int = field(default=3) @dataclass class D(C): pass assert B.from_dict({}) == B(x=1, y=2) assert C.from_dict({}) == C(x=1, y=2, z=3) assert D.from_dict({}) == D(x=1, y=2, z=3) def test_derived_dataclass_with_ancestors_and_field_with_default_factory(): @dataclass class A: x: List[int] = field(default_factory=lambda: [1]) @dataclass class B(A, DataClassDictMixin): y: List[int] = field(default_factory=lambda: [2]) @dataclass class C(B, DataClassDictMixin): z: List[int] = field(default_factory=lambda: [3]) @dataclass class D(C): pass assert B.from_dict({}) == B(x=[1], y=[2]) assert C.from_dict({}) == C(x=[1], y=[2], z=[3]) assert D.from_dict({}) == D(x=[1], y=[2], z=[3]) def test_invalid_field_value_deserialization(): @dataclass class DataClass(DataClassDictMixin): x: int with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "bad_value"}) def test_invalid_field_value_deserialization_with_default(): @dataclass class DataClass(DataClassDictMixin): x: int = 1 with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "bad_value"}) def test_invalid_field_value_deserialization_with_rounded_decimal(): @dataclass class DataClass(DataClassDictMixin): x: decimal.Decimal class Config(BaseConfig): serialization_strategy = {decimal.Decimal: RoundedDecimal()} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "bad_value"}) def test_invalid_field_value_deserialization_with_rounded_decimal_with_default(): @dataclass class DataClass(DataClassDictMixin): x: decimal.Decimal = Fixture.DECIMAL class Config(BaseConfig): serialization_strategy = {decimal.Decimal: RoundedDecimal()} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "bad_value"}) @pytest.mark.parametrize( "value_info", [ v for v in inner_values if v[0] not in [MyDataClass, NoneType, MutableString] ], ) def test_serialize_deserialize_options(value_info): x_type, x_value, x_value_dumped = value_info @dataclass class DataClass(DataClassDictMixin): x: x_type = field( metadata={ "serialize": lambda v: Fixture.CUSTOM_SERIALIZE, "deserialize": lambda v: ( x_value if v == Fixture.CUSTOM_SERIALIZE else f"!{Fixture.CUSTOM_SERIALIZE}" ), } ) instance = DataClass(x_value) v_dumped = Fixture.CUSTOM_SERIALIZE dumped = {"x": v_dumped} instance_dumped = instance.to_dict() instance_loaded = DataClass.from_dict(dumped) assert instance_dumped == dumped assert instance_loaded == instance assert same_types(instance_dumped, dumped) assert same_types(instance_loaded.x, x_value) def test_dataclass_field_without_mixin(): @dataclass class DataClass(DataClassDictMixin): p: DataClassWithoutMixin obj = DataClass(DataClassWithoutMixin(42)) assert DataClass.from_dict({"p": {"i": "42"}}) == obj assert obj.to_dict() == {"p": {"i": 42}} def test_serializable_type_dataclass(): @dataclass class DataClass(DataClassDictMixin): s: SerializableTypeDataClass s_value = SerializableTypeDataClass(a=9, b=9) assert DataClass.from_dict({"s": {"a": 10, "b": 10}}) == DataClass(s_value) assert DataClass(s_value).to_dict() == {"s": {"a": 10, "b": 10}} def test_optional_inside_collection(): @dataclass class DataClass(DataClassDictMixin): l: List[Optional[int]] d: Dict[Optional[int], Optional[int]] d = {"l": [1, None, 2], "d": {1: 1, 2: None, None: 3}} o = DataClass.from_dict(d) assert o == DataClass(l=[1, None, 2], d={1: 1, 2: None, None: 3}) assert o.to_dict() == d def test_bound_type_var_inside_collection(): @dataclass class DataClass(DataClassDictMixin): l: List[T_Optional_int] d: Dict[T_Optional_int, T_Optional_int] d = {"l": [1, None, 2], "d": {1: 1, 2: None, None: 3}} o = DataClass.from_dict(d) assert o == DataClass(l=[1, None, 2], d={1: 1, 2: None, None: 3}) assert o.to_dict() == d def test_generic_serializable_type(): with pytest.raises(NotImplementedError): # noinspection PyTypeChecker GenericSerializableType._serialize(None, None) with pytest.raises(NotImplementedError): GenericSerializableType._deserialize(None, None) def test_generic_serializable_type_dataclass(): @dataclass class DataClass(DataClassDictMixin): s: GenericSerializableTypeDataClass s_value = GenericSerializableTypeDataClass(a=9, b=9) assert DataClass.from_dict({"s": {"a": 10, "b": 10}}) == DataClass(s_value) assert DataClass(s_value).to_dict() == {"s": {"a": 10, "b": 10}} def test_dataclass_with_different_tuples(): @dataclass class DataClass(DataClassDictMixin): a: Tuple c: Tuple[int] d: Tuple[int, float, int] e: Tuple[int, ...] obj = DataClass(a=(1, "2", 3.0), c=(1,), d=(1, 2.0, 3), e=(1, 2, 3)) assert ( DataClass.from_dict( { "a": [1, "2", 3.0], "c": [1, 2, 3], "d": ["1", "2.0", "3"], "e": [1, 2, 3], } ) == obj ) assert obj.to_dict() == { "a": [1, "2", 3.0], "c": [1], "d": [1, 2.0, 3], "e": [1, 2, 3], } def test_dataclass_with_empty_tuple(): @dataclass class DataClass(DataClassDictMixin): x: Tuple[()] obj = DataClass(x=()) assert ( DataClass.from_dict( { "x": [1, 2, 3], } ) == obj ) assert obj.to_dict() == { "x": [], } def test_dataclass_with_typed_dict_required_keys(): @dataclass class DataClass(DataClassDictMixin): x: TypedDictRequiredKeys for data in ({}, {"int": 1}, {"float": 1.0}): with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": data}) obj = DataClass(x={"int": 1, "float": 2.0}) assert ( DataClass.from_dict({"x": {"int": "1", "float": "2.0", "str": "str"}}) == obj ) assert obj.to_dict() == {"x": {"int": 1, "float": 2.0}} def test_dataclass_with_typed_dict_optional_keys(): @dataclass class DataClass(DataClassDictMixin): x: TypedDictOptionalKeys for data in ({}, {"int": 1}, {"float": 1.0}): assert DataClass.from_dict({"x": data}) == DataClass(x=data) obj = DataClass(x={"int": 1, "float": 2.0}) assert ( DataClass.from_dict({"x": {"int": "1", "float": "2.0", "str": "str"}}) == obj ) assert obj.to_dict() == {"x": {"int": 1, "float": 2.0}} def test_dataclass_with_typed_dict_required_and_optional_keys(): @dataclass class DataClass(DataClassDictMixin): x: TypedDictRequiredAndOptionalKeys for data in ( {}, {"str": "str"}, {"int": 1}, {"float": 1.0}, {"int": 1, "str": "str"}, {"float": 1.0, "str": "str"}, ): with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": data}) assert DataClass.from_dict( {"x": {"int": "1", "float": "2.0", "unknown": "unknown"}} ) == DataClass(x={"int": 1, "float": 2.0}) assert DataClass.from_dict( {"x": {"int": "1", "float": "2.0", "str": "str"}} ) == DataClass(x={"int": 1, "float": 2.0, "str": "str"}) assert DataClass(x={"int": 1, "float": 2.0}).to_dict() == { "x": {"int": 1, "float": 2.0} } assert DataClass(x={"int": 1, "float": 2.0, "str": "str"}).to_dict() == { "x": {"int": 1, "float": 2.0, "str": "str"} } def test_dataclass_with_named_tuple(): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTuple obj = DataClass(x=MyNamedTuple(1, 2.0)) assert DataClass.from_dict({"x": ["1", "2.0"]}) == obj assert obj.to_dict() == {"x": [1, 2.0]} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": ["1", "2.0"]}) == obj assert encoder.encode(obj) == {"x": [1, 2.0]} def test_dataclass_with_named_tuple_with_defaults(): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTupleWithDefaults obj = DataClass(x=MyNamedTupleWithDefaults()) assert DataClass.from_dict({"x": ["1"]}) == obj assert obj.to_dict() == {"x": [1, 2.0]} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": ["1"]}) == obj assert encoder.encode(obj) == {"x": [1, 2.0]} def test_dataclass_with_untyped_named_tuple(): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTuple obj = DataClass(x=MyUntypedNamedTuple("1", "2.0")) assert DataClass.from_dict({"x": ["1", "2.0"]}) == obj assert obj.to_dict() == {"x": ["1", "2.0"]} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": ["1", "2.0"]}) == obj assert encoder.encode(obj) == {"x": ["1", "2.0"]} def test_dataclass_with_untyped_named_tuple_with_defaults(): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTupleWithDefaults obj = DataClass(x=MyUntypedNamedTupleWithDefaults(i="1")) assert DataClass.from_dict({"x": ["1"]}) == obj assert obj.to_dict() == {"x": ["1", 2.0]} def test_data_class_with_none(): @dataclass class DataClass(DataClassDictMixin): x: None y: NoneType z: List[None] obj = DataClass(x=None, y=None, z=[None]) assert DataClass.from_dict({"x": None, "y": None, "z": [None]}) == obj assert obj.to_dict() == {"x": None, "y": None, "z": [None]} def test_data_class_with_new_type_overridden(): MyStr = NewType("MyStr", str) @dataclass class DataClass(DataClassDictMixin): x: str y: MyStr class Config(BaseConfig): serialization_strategy = { str: { "serialize": lambda x: f"str_{x}", "deserialize": lambda x: x[4:], }, MyStr: { "serialize": lambda x: f"MyStr_{x}", "deserialize": lambda x: x[6:], }, } instance = DataClass("a", MyStr("b")) assert DataClass.from_dict({"x": "str_a", "y": "MyStr_b"}) == instance assert instance.to_dict() == {"x": "str_a", "y": "MyStr_b"} def test_tuple_with_optional(): @dataclass class DataClass(DataClassDictMixin): x: Tuple[Optional[int], int] = field(default_factory=lambda: (None, 7)) assert DataClass.from_dict({"x": [None, 42]}) == DataClass((None, 42)) assert DataClass((None, 42)).to_dict() == {"x": [None, 42]} assert DataClass.from_dict({}) == DataClass((None, 7)) assert DataClass().to_dict() == {"x": [None, 7]} def test_tuple_with_optional_and_ellipsis(): @dataclass class DataClass(DataClassDictMixin): x: Tuple[Optional[int], ...] = field(default_factory=lambda: (None, 7)) assert DataClass.from_dict({"x": [None, 42]}) == DataClass((None, 42)) assert DataClass((None, 42)).to_dict() == {"x": [None, 42]} assert DataClass.from_dict({}) == DataClass((None, 7)) assert DataClass().to_dict() == {"x": [None, 7]} def test_named_tuple_with_optional(): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTupleWithOptional = field( default_factory=lambda: MyNamedTupleWithOptional(None, 7) ) assert DataClass.from_dict({"x": [None, 42]}) == DataClass( MyNamedTupleWithOptional(None, 42) ) assert DataClass(MyNamedTupleWithOptional(None, 42)).to_dict() == { "x": [None, 42] } assert DataClass.from_dict({}) == DataClass( MyNamedTupleWithOptional(None, 7) ) assert DataClass().to_dict() == {"x": [None, 7]} def test_unbound_generic_named_tuple(): @dataclass class DataClass(DataClassDictMixin): x: GenericNamedTuple obj = DataClass(GenericNamedTuple("2023-01-22", 42)) assert DataClass.from_dict({"x": ["2023-01-22", "42"]}) == obj assert obj.to_dict() == {"x": ["2023-01-22", 42]} def test_bound_generic_named_tuple(): @dataclass class DataClass(DataClassDictMixin): x: GenericNamedTuple[date] obj = DataClass(GenericNamedTuple(date(2023, 1, 22), 42)) assert DataClass.from_dict({"x": ["2023-01-22", "42"]}) == obj assert obj.to_dict() == {"x": ["2023-01-22", 42]} def test_typed_dict_required_keys_with_optional(): @dataclass class DataClass(DataClassDictMixin): x: TypedDictRequiredKeysWithOptional obj = DataClass({"x": None, "y": 42}) assert DataClass.from_dict({"x": {"x": None, "y": 42}}) == obj assert obj.to_dict() == {"x": {"x": None, "y": 42}} obj = DataClass({"x": 33, "y": 42}) assert DataClass.from_dict({"x": {"x": 33, "y": 42}}) == obj assert obj.to_dict() == {"x": {"x": 33, "y": 42}} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": {"x": 33, "y": 42}}) == obj assert encoder.encode(obj) == {"x": {"x": 33, "y": 42}} def test_typed_dict_optional_keys_with_optional(): @dataclass class DataClass(DataClassDictMixin): x: TypedDictOptionalKeysWithOptional obj = DataClass({"x": None, "y": 42}) assert DataClass.from_dict({"x": {"x": None, "y": 42}}) == obj assert obj.to_dict() == {"x": {"x": None, "y": 42}} obj = DataClass({"x": 33, "y": 42}) assert DataClass.from_dict({"x": {"x": 33, "y": 42}}) == obj assert obj.to_dict() == {"x": {"x": 33, "y": 42}} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": {"x": 33, "y": 42}}) == obj assert encoder.encode(obj) == {"x": {"x": 33, "y": 42}} def test_unbound_generic_typed_dict(): @dataclass class DataClass(DataClassDictMixin): x: GenericTypedDict obj = DataClass({"x": "2023-01-22", "y": 42}) assert DataClass.from_dict({"x": {"x": "2023-01-22", "y": "42"}}) == obj assert obj.to_dict() == {"x": {"x": "2023-01-22", "y": 42}} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": {"x": "2023-01-22", "y": "42"}}) == obj assert encoder.encode(obj) == {"x": {"x": "2023-01-22", "y": 42}} def test_bound_generic_typed_dict(): @dataclass class DataClass(DataClassDictMixin): x: GenericTypedDict[date] obj = DataClass({"x": date(2023, 1, 22), "y": 42}) assert DataClass.from_dict({"x": {"x": "2023-01-22", "y": "42"}}) == obj assert obj.to_dict() == {"x": {"x": "2023-01-22", "y": 42}} decoder = BasicDecoder(DataClass) encoder = BasicEncoder(DataClass) assert decoder.decode({"x": {"x": "2023-01-22", "y": "42"}}) == obj assert encoder.encode(obj) == {"x": {"x": "2023-01-22", "y": 42}} def test_dataclass_with_init_false_field(): @dataclass class DataClass(DataClassDictMixin): x: int = field(init=False) def __post_init__(self): self.x = 42 obj = DataClass() assert obj.to_dict() == {"x": 42} assert DataClass.from_dict({"x": 42}) == obj assert DataClass.from_dict({}) == obj def test_dataclass_with_non_optional_none_value(): @dataclass class DataClass(DataClassDictMixin): x: int y: int = None z: int = 42 with pytest.raises(InvalidFieldValue) as e: DataClass.from_dict({"x": None}) assert e.value.field_name == "x" obj = DataClass(x=42) assert DataClass.from_dict({"x": 42}) == obj assert obj.to_dict() == {"x": 42, "y": None, "z": 42} def test_dataclass_with_optional_list_with_optional_ints(): @dataclass class DataClass(DataClassDictMixin): x: Optional[List[Optional[int]]] obj = DataClass(x=[42, None]) assert DataClass.from_dict({"x": [42, None]}) == obj assert obj.to_dict() == {"x": [42, None]} def test_dataclass_with_default_nan_and_inf_with_omit_default(): @dataclass class DataClass(DataClassDictMixin): a: float = float("nan") b: float = float("inf") c: float = float("-inf") class Config(BaseConfig): omit_default = True assert DataClass().to_dict() == {} assert DataClass(float("nan"), float("inf"), float("-inf")).to_dict() == {} assert ( DataClass(float("nan"), float("+inf"), float("-inf")).to_dict() == {} ) assert DataClass(float("inf"), float("-inf"), float("+inf")).to_dict() == { "a": float("inf"), "b": float("-inf"), "c": float("+inf"), } def test_dataclass_with_default_int_flag_omit_default(): @dataclass class DataClass(DataClassDictMixin): a: MyIntFlag = MyIntFlag.a b: MyIntFlag = MyIntFlag.b class Config(BaseConfig): omit_default = True assert DataClass().to_dict() == {} assert DataClass(MyIntFlag.a, MyIntFlag.b).to_dict() == {} @pytest.mark.parametrize("value_info", inner_values) def test_decoder(value_info): x_type, x_value, x_value_dumped = value_info decoder = BasicDecoder(x_type) assert decoder.decode(x_value_dumped) == x_value assert decode(x_value_dumped, x_type) == x_value @pytest.mark.parametrize("value_info", inner_values) def test_encoder(value_info): x_type, x_value, x_value_dumped = value_info encoder = BasicEncoder(x_type) assert encoder.encode(x_value) == x_value_dumped assert encode(x_value, x_type) == x_value_dumped mashumaro-3.13.1/tests/test_dialect.py000066400000000000000000001023441463331001200177500ustar00rootroot00000000000000import collections import enum import typing from dataclasses import dataclass, field from datetime import date, datetime from typing import ( FrozenSet, Generic, List, NamedTuple, Optional, Set, Tuple, TypeVar, Union, ) import pytest from typing_extensions import TypedDict from mashumaro import DataClassDictMixin, pass_through from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.exceptions import BadDialect from mashumaro.mixins.msgpack import DataClassMessagePackMixin from mashumaro.mixins.msgpack import default_encoder as msgpack_encoder from mashumaro.types import SerializationStrategy from .conftest import add_unpack_method T_Date = TypeVar("T_Date", bound=date) class HexSerializationStrategy(SerializationStrategy): def serialize(self, value: int) -> str: return hex(value) def deserialize(self, value: str) -> int: return int(value, 16) class OrdinalDialect(Dialect): serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, int: HexSerializationStrategy(), } class FormattedDialect(Dialect): serialization_strategy = { date: { "serialize": lambda dt: dt.strftime("%Y/%m/%d"), "deserialize": lambda s: datetime.strptime(s, "%Y/%m/%d").date(), }, int: HexSerializationStrategy(), } class ISODialect(Dialect): serialization_strategy = { date: { "serialize": date.isoformat, "deserialize": date.fromisoformat, }, int: HexSerializationStrategy(), } class EmptyDialect(Dialect): pass class OmitNoneDialect(Dialect): omit_none = True class NotOmitNoneDialect(Dialect): omit_none = False class OmitDefaultDialect(Dialect): omit_default = True class NotOmitDefaultDialect(Dialect): omit_default = False @dataclass class DataClassWithoutDialects(DataClassDictMixin): dt: date i: int @dataclass class DataClassWithDefaultDialect(DataClassDictMixin): dt: date i: int class Config(BaseConfig): dialect = OrdinalDialect @dataclass class DataClassWithDialectSupport(DataClassDictMixin): dt: date i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class BaseEntityWithDialect(DataClassDictMixin): class Config: code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class Entity1(BaseEntityWithDialect): dt1: date @dataclass class Entity2(BaseEntityWithDialect): dt2: date @dataclass class DataClassWithDialectSupportAndDefaultDialect(DataClassDictMixin): dt: date i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect @dataclass class GenericDataClassWithoutDialects(Generic[T_Date], DataClassDictMixin): dt: T_Date i: int @dataclass class GenericDataClassWithDefaultDialect(Generic[T_Date], DataClassDictMixin): dt: T_Date i: int class Config(BaseConfig): dialect = OrdinalDialect @dataclass class GenericDataClassWithDialectSupport(Generic[T_Date], DataClassDictMixin): dt: T_Date i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class GenericDataClassWithDialectSupportAndDefaultDialect( Generic[T_Date], DataClassDictMixin ): dt: T_Date i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect class MyNamedTuple(NamedTuple): x: DataClassWithDialectSupport = DataClassWithDialectSupport( dt=date(2022, 1, 1), i=999, ) y: DataClassWithoutDialects = DataClassWithoutDialects( dt=date(2022, 1, 1), i=999, ) class MyTypedDict(TypedDict): x: DataClassWithDialectSupport y: DataClassWithoutDialects @dataclass class DataClassWithNamedTupleWithDialectSupport(DataClassDictMixin): x: MyNamedTuple class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class DataClassWithNamedTupleWithoutDialectSupport(DataClassDictMixin): x: MyNamedTuple @dataclass class DataClassWithTypedDictWithDialectSupport(DataClassDictMixin): x: MyTypedDict class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class DataClassWithTypedDictWithoutDialectSupport(DataClassDictMixin): x: MyTypedDict @dataclass class DataClassWithUnionWithDialectSupport(DataClassDictMixin): x: List[Union[DataClassWithDialectSupport, DataClassWithoutDialects]] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class MessagePackDataClass(DataClassMessagePackMixin): b_1: bytes b_2: bytearray dep_1: DataClassWithoutDialects dep_2: GenericDataClassWithoutDialects[date] @dataclass class DataClassWithOptionalAndOmitNoneDialect(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): dialect = OmitNoneDialect @dataclass class DataClassWithOptionalAndOmitNoneDialectAndOmitNoneFalse( DataClassDictMixin ): x: Optional[int] = None class Config(BaseConfig): dialect = OmitNoneDialect omit_none = False @dataclass class DataClassWithOptionalAndNotOmitNoneDialectAndOmitNoneTrue( DataClassDictMixin ): x: Optional[int] = None class Config(BaseConfig): dialect = NotOmitNoneDialect omit_none = True @dataclass class DataClassWithOptionalAndEmptyDialect(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): dialect = EmptyDialect omit_none = True @dataclass class DataClassWithOptionalAndDialectSupport(DataClassDictMixin): x: Optional[int] = None class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] class MyStrEnum(str, enum.Enum): VAL1 = "val1" VAL2 = "val2" @dataclass class DataClassWithDefaultAndOmitDefaultDialect(DataClassDictMixin): a: int = 42 b: float = 3.14 c: str = "foo" d: bool = False e: None = None f: type(None) = None g: Tuple[int] = (1, 2, 3) h: MyNamedTuple = field(default_factory=lambda: MyNamedTuple()) i: List[int] = field(default_factory=list) j: List[int] = field(default_factory=lambda: [1, 2, 3]) k: Set[int] = field(default_factory=set) l: Set[int] = field(default_factory=lambda: {1, 2, 3}) m: FrozenSet[int] = field(default_factory=frozenset) n: FrozenSet[int] = field(default_factory=lambda: frozenset({1, 2, 3})) o: str = MyStrEnum.VAL1 class Config(BaseConfig): dialect = OmitDefaultDialect @dataclass class DataClassWithDefaultAndOmitDefaultDialectAndOmitDefaultFalse( DataClassDictMixin ): a: int = 42 b: float = 3.14 c: str = "foo" d: bool = False e: None = None f: type(None) = None g: Tuple[int] = (1, 2, 3) h: MyNamedTuple = field(default_factory=lambda: MyNamedTuple()) i: List[int] = field(default_factory=list) j: List[int] = field(default_factory=lambda: [1, 2, 3]) k: Set[int] = field(default_factory=set) l: Set[int] = field(default_factory=lambda: {1, 2, 3}) m: FrozenSet[int] = field(default_factory=frozenset) n: FrozenSet[int] = field(default_factory=lambda: frozenset({1, 2, 3})) o: str = MyStrEnum.VAL1 class Config(BaseConfig): dialect = OmitDefaultDialect omit_default = False @dataclass class DataClassWithDefaultAndNotOmitDefaultDialectAndOmitDefaultTrue( DataClassDictMixin ): a: int = 42 b: float = 3.14 c: str = "foo" d: bool = False e: None = None f: type(None) = None g: Tuple[int] = (1, 2, 3) h: MyNamedTuple = field(default_factory=lambda: MyNamedTuple()) i: List[int] = field(default_factory=list) j: List[int] = field(default_factory=lambda: [1, 2, 3]) k: Set[int] = field(default_factory=set) l: Set[int] = field(default_factory=lambda: {1, 2, 3}) m: FrozenSet[int] = field(default_factory=frozenset) n: FrozenSet[int] = field(default_factory=lambda: frozenset({1, 2, 3})) o: str = MyStrEnum.VAL1 class Config(BaseConfig): dialect = NotOmitDefaultDialect omit_default = True @dataclass class DataClassWithDefaultAndEmptyDialect(DataClassDictMixin): a: int = 42 b: float = 3.14 c: str = "foo" d: bool = False e: None = None f: type(None) = None g: Tuple[int] = (1, 2, 3) h: MyNamedTuple = field(default_factory=lambda: MyNamedTuple()) i: List[int] = field(default_factory=list) j: List[int] = field(default_factory=lambda: [1, 2, 3]) k: Set[int] = field(default_factory=set) l: Set[int] = field(default_factory=lambda: {1, 2, 3}) m: FrozenSet[int] = field(default_factory=frozenset) n: FrozenSet[int] = field(default_factory=lambda: frozenset({1, 2, 3})) o: str = MyStrEnum.VAL1 class Config(BaseConfig): dialect = EmptyDialect omit_default = True @dataclass class DataClassWithDefaultAndDialectSupport(DataClassDictMixin): a: int = 42 b: float = 3.14 c: str = "foo" d: bool = False e: None = None f: type(None) = None g: Tuple[int] = (1, 2, 3) h: MyNamedTuple = field(default_factory=lambda: MyNamedTuple()) i: List[int] = field(default_factory=list) j: List[int] = field(default_factory=lambda: [1, 2, 3]) k: Set[int] = field(default_factory=set) l: Set[int] = field(default_factory=lambda: {1, 2, 3}) m: FrozenSet[int] = field(default_factory=frozenset) n: FrozenSet[int] = field(default_factory=lambda: frozenset({1, 2, 3})) o: str = MyStrEnum.VAL1 class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] def test_default_dialect(): dt = date.today() ordinal = dt.toordinal() obj = DataClassWithDefaultDialect(dt, 255) assert obj.to_dict() == {"dt": ordinal, "i": "0xff"} assert ( DataClassWithDefaultDialect.from_dict({"dt": ordinal, "i": "0xff"}) == obj ) with pytest.raises(TypeError): obj.to_dict(dialect=OrdinalDialect) def test_dialect(): dt = date.today() ordinal = dt.toordinal() obj = DataClassWithDialectSupport(dt, 255) assert obj.to_dict(dialect=OrdinalDialect) == {"dt": ordinal, "i": "0xff"} assert ( DataClassWithDialectSupport.from_dict( {"dt": ordinal, "i": "0xff"}, dialect=OrdinalDialect ) == obj ) def test_dialect_with_default(): dt = date.today() ordinal = dt.toordinal() formatted = dt.strftime("%Y/%m/%d") obj = DataClassWithDialectSupportAndDefaultDialect(dt, 255) assert obj.to_dict() == {"dt": formatted, "i": "0xff"} assert ( DataClassWithDialectSupportAndDefaultDialect.from_dict( {"dt": formatted, "i": "0xff"} ) == obj ) assert obj.to_dict(dialect=None) == {"dt": formatted, "i": "0xff"} assert ( DataClassWithDialectSupportAndDefaultDialect.from_dict( {"dt": formatted, "i": "0xff"}, dialect=None ) == obj ) assert obj.to_dict(dialect=OrdinalDialect) == {"dt": ordinal, "i": "0xff"} assert ( DataClassWithDialectSupportAndDefaultDialect.from_dict( {"dt": ordinal, "i": "0xff"}, dialect=OrdinalDialect ) == obj ) def test_bad_default_dialect(): with pytest.raises(BadDialect): @dataclass class _(DataClassDictMixin): dt: date class Config(BaseConfig): dialect = int with add_unpack_method: with pytest.raises(BadDialect): @dataclass class _(DataClassDictMixin): dt: date class Config(BaseConfig): dialect = int def test_bad_dialect(): dt = date.today() ordinal = dt.toordinal() obj = DataClassWithDialectSupport(dt, 255) with pytest.raises(BadDialect): DataClassWithDialectSupport.from_dict( {"dt": ordinal, "i": "0xff"}, dialect=int ) with pytest.raises(BadDialect): obj.to_dict(dialect=int) def test_inner_without_dialects(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: DataClassWithoutDialects inners: List[DataClassWithoutDialects] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClass( dt=dt, inner=DataClassWithoutDialects(dt, 255), inners=[DataClassWithoutDialects(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert obj.to_dict(dialect=OrdinalDialect) == { "dt": ordinal, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": ordinal, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", }, dialect=OrdinalDialect, ) == obj ) def test_inner_with_default_dialect(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: DataClassWithDefaultDialect inners: List[DataClassWithDefaultDialect] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClass( dt=dt, inner=DataClassWithDefaultDialect(dt, 255), inners=[DataClassWithDefaultDialect(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert obj.to_dict(dialect=ISODialect) == { "dt": iso, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": iso, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", }, dialect=ISODialect, ) == obj ) def test_inner_with_dialect_support(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: DataClassWithDialectSupport inners: List[DataClassWithDialectSupport] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() ordinal = dt.toordinal() formatted = dt.strftime("%Y/%m/%d") iso = dt.isoformat() obj = DataClass( dt=dt, inner=DataClassWithDialectSupport(dt, 255), inners=[DataClassWithDialectSupport(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert obj.to_dict(dialect=OrdinalDialect) == { "dt": ordinal, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": ordinal, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", }, dialect=OrdinalDialect, ) == obj ) def test_inner_with_dialect_support_and_default(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: DataClassWithDialectSupportAndDefaultDialect inners: List[DataClassWithDialectSupportAndDefaultDialect] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") iso = dt.isoformat() obj = DataClass( dt=dt, inner=DataClassWithDialectSupportAndDefaultDialect(dt, 255), inners=[DataClassWithDialectSupportAndDefaultDialect(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": formatted, "i": "0xff"}, "inners": [{"dt": formatted, "i": "0xff"}], "i": "0xff", } assert obj.to_dict(dialect=ISODialect) == { "dt": iso, "inner": {"dt": iso, "i": "0xff"}, "inners": [{"dt": iso, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": formatted, "i": "0xff"}, "inners": [{"dt": formatted, "i": "0xff"}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": iso, "inner": {"dt": iso, "i": "0xff"}, "inners": [{"dt": iso, "i": "0xff"}], "i": "0xff", }, dialect=ISODialect, ) == obj ) def test_generic_without_dialects(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: GenericDataClassWithoutDialects[date] inners: List[GenericDataClassWithoutDialects[date]] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClass( dt=dt, inner=GenericDataClassWithoutDialects(dt, 255), inners=[GenericDataClassWithoutDialects(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert obj.to_dict(dialect=OrdinalDialect) == { "dt": ordinal, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": ordinal, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", }, dialect=OrdinalDialect, ) == obj ) def test_generic_with_default_dialect(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: GenericDataClassWithDefaultDialect[date] inners: List[GenericDataClassWithDefaultDialect[date]] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClass( dt=dt, inner=GenericDataClassWithDefaultDialect(dt, 255), inners=[GenericDataClassWithDefaultDialect(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert obj.to_dict(dialect=ISODialect) == { "dt": iso, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": iso, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", }, dialect=ISODialect, ) == obj ) def test_generic_with_dialect_support(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: GenericDataClassWithDialectSupport[date] inners: List[GenericDataClassWithDialectSupport[date]] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() ordinal = dt.toordinal() formatted = dt.strftime("%Y/%m/%d") iso = dt.isoformat() obj = DataClass( dt=dt, inner=GenericDataClassWithDialectSupport(dt, 255), inners=[GenericDataClassWithDialectSupport(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } assert obj.to_dict(dialect=OrdinalDialect) == { "dt": ordinal, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": iso, "i": 255}, "inners": [{"dt": iso, "i": 255}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": ordinal, "inner": {"dt": ordinal, "i": "0xff"}, "inners": [{"dt": ordinal, "i": "0xff"}], "i": "0xff", }, dialect=OrdinalDialect, ) == obj ) def test_generic_with_dialect_support_and_default(): @dataclass class DataClass(DataClassDictMixin): dt: date inner: GenericDataClassWithDialectSupportAndDefaultDialect[date] inners: List[GenericDataClassWithDialectSupportAndDefaultDialect[date]] i: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = FormattedDialect dt = date.today() formatted = dt.strftime("%Y/%m/%d") iso = dt.isoformat() obj = DataClass( dt=dt, inner=GenericDataClassWithDialectSupportAndDefaultDialect(dt, 255), inners=[GenericDataClassWithDialectSupportAndDefaultDialect(dt, 255)], i=255, ) assert obj.to_dict() == { "dt": formatted, "inner": {"dt": formatted, "i": "0xff"}, "inners": [{"dt": formatted, "i": "0xff"}], "i": "0xff", } assert obj.to_dict(dialect=ISODialect) == { "dt": iso, "inner": {"dt": iso, "i": "0xff"}, "inners": [{"dt": iso, "i": "0xff"}], "i": "0xff", } assert ( DataClass.from_dict( { "dt": formatted, "inner": {"dt": formatted, "i": "0xff"}, "inners": [{"dt": formatted, "i": "0xff"}], "i": "0xff", } ) == obj ) assert ( DataClass.from_dict( { "dt": iso, "inner": {"dt": iso, "i": "0xff"}, "inners": [{"dt": iso, "i": "0xff"}], "i": "0xff", }, dialect=ISODialect, ) == obj ) def test_debug_true_option_with_dialect(mocker): mocked_print = mocker.patch("builtins.print") @dataclass class DataClass(DataClassDictMixin): dt: date class Config(BaseConfig): debug = True code_generation_options = [ADD_DIALECT_SUPPORT] DataClass(date.today()).to_dict(dialect=FormattedDialect) mocked_print.assert_called() assert mocked_print.call_count == 6 def test_dialect_with_named_tuple_with_dialect_support(): dt = date.today() ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClassWithNamedTupleWithDialectSupport( x=MyNamedTuple( x=DataClassWithDialectSupport(dt, 255), y=DataClassWithoutDialects(dt, 255), ) ) dumped = {"x": [{"dt": ordinal, "i": "0xff"}, {"dt": iso, "i": 255}]} assert obj.to_dict(dialect=OrdinalDialect) == dumped assert ( DataClassWithNamedTupleWithDialectSupport.from_dict( dumped, dialect=OrdinalDialect ) == obj ) def test_dialect_with_named_tuple_without_dialect_support(): dt = date.today() iso = dt.isoformat() obj = DataClassWithNamedTupleWithoutDialectSupport( x=MyNamedTuple( x=DataClassWithDialectSupport(dt, 255), y=DataClassWithoutDialects(dt, 255), ) ) dumped = {"x": [{"dt": iso, "i": 255}, {"dt": iso, "i": 255}]} assert obj.to_dict() == dumped assert ( DataClassWithNamedTupleWithoutDialectSupport.from_dict(dumped) == obj ) def test_dialect_with_typed_dict_with_dialect_support(): dt = date.today() ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClassWithTypedDictWithDialectSupport( x=MyTypedDict( x=DataClassWithDialectSupport(dt, 255), y=DataClassWithoutDialects(dt, 255), ) ) dumped = { "x": {"x": {"dt": ordinal, "i": "0xff"}, "y": {"dt": iso, "i": 255}} } assert obj.to_dict(dialect=OrdinalDialect) == dumped assert ( DataClassWithTypedDictWithDialectSupport.from_dict( dumped, dialect=OrdinalDialect ) == obj ) def test_dialect_with_typed_dict_without_dialect_support(): dt = date.today() iso = dt.isoformat() obj = DataClassWithTypedDictWithoutDialectSupport( x=MyTypedDict( x=DataClassWithDialectSupport(dt, 255), y=DataClassWithoutDialects(dt, 255), ) ) dumped = {"x": {"x": {"dt": iso, "i": 255}, "y": {"dt": iso, "i": 255}}} assert obj.to_dict() == dumped assert DataClassWithTypedDictWithoutDialectSupport.from_dict(dumped) == obj def test_dialect_with_union_with_dialect_support(): dt = date.today() ordinal = dt.toordinal() iso = dt.isoformat() obj = DataClassWithUnionWithDialectSupport( x=[ DataClassWithDialectSupport(dt, 255), DataClassWithoutDialects(dt, 255), ] ) dumped = {"x": [{"dt": ordinal, "i": "0xff"}, {"dt": iso, "i": 255}]} assert obj.to_dict(dialect=OrdinalDialect) == dumped assert ( DataClassWithUnionWithDialectSupport.from_dict( dumped, dialect=OrdinalDialect ) == obj ) def test_dialect_with_inheritance(): dt = date.today() formatted = dt.strftime("%Y/%m/%d") entity1 = Entity1(dt) entity2 = Entity2(dt) assert ( Entity1.from_dict({"dt1": formatted}, dialect=FormattedDialect) == entity1 ) assert ( Entity2.from_dict({"dt2": formatted}, dialect=FormattedDialect) == entity2 ) assert entity1.to_dict(dialect=FormattedDialect) == {"dt1": formatted} assert entity2.to_dict(dialect=FormattedDialect) == {"dt2": formatted} def test_msgpack_dialect_class_with_dependency_without_dialect(): dt = date(2022, 6, 8) obj = MessagePackDataClass( b_1=b"123", b_2=bytearray([4, 5, 6]), dep_1=DataClassWithoutDialects(dt, 123), dep_2=GenericDataClassWithoutDialects(dt, 123), ) d = { "b_1": b"123", "b_2": bytearray([4, 5, 6]), "dep_1": {"dt": "2022-06-08", "i": 123}, "dep_2": {"dt": "2022-06-08", "i": 123}, } encoded = msgpack_encoder(d) assert obj.to_msgpack() == encoded assert MessagePackDataClass.from_msgpack(encoded) == obj def test_dataclass_omit_none_dialects(): assert DataClassWithOptionalAndOmitNoneDialect().to_dict() == {} assert ( DataClassWithOptionalAndOmitNoneDialectAndOmitNoneFalse().to_dict() == {} ) assert ( DataClassWithOptionalAndNotOmitNoneDialectAndOmitNoneTrue().to_dict() == {"x": None} ) assert DataClassWithOptionalAndEmptyDialect().to_dict() == {} assert DataClassWithOptionalAndDialectSupport().to_dict() == {"x": None} assert ( DataClassWithOptionalAndDialectSupport().to_dict( dialect=OmitNoneDialect ) == {} ) assert DataClassWithOptionalAndDialectSupport().to_dict( dialect=NotOmitNoneDialect ) == {"x": None} assert DataClassWithOptionalAndDialectSupport().to_dict( dialect=EmptyDialect ) == {"x": None} def test_dataclass_omit_default_dialects(): complete_dict = { "a": 42, "b": 3.14, "c": "foo", "d": False, "e": None, "f": None, "g": [1], "h": [{"dt": "2022-01-01", "i": 999}, {"dt": "2022-01-01", "i": 999}], "i": [], "j": [1, 2, 3], "k": [], "l": [1, 2, 3], "m": [], "n": [1, 2, 3], "o": MyStrEnum.VAL1, } assert DataClassWithDefaultAndOmitDefaultDialect().to_dict() == {} assert ( DataClassWithDefaultAndOmitDefaultDialectAndOmitDefaultFalse().to_dict() == {} ) assert ( DataClassWithDefaultAndNotOmitDefaultDialectAndOmitDefaultTrue().to_dict() == complete_dict ) assert DataClassWithDefaultAndEmptyDialect().to_dict() == {} assert DataClassWithDefaultAndDialectSupport().to_dict() == complete_dict assert ( DataClassWithDefaultAndDialectSupport().to_dict( dialect=OmitDefaultDialect ) == {} ) assert ( DataClassWithDefaultAndDialectSupport().to_dict( dialect=NotOmitDefaultDialect ) == complete_dict ) assert ( DataClassWithDefaultAndDialectSupport().to_dict(dialect=EmptyDialect) == complete_dict ) def test_dialect_no_copy(): class NoCopyDialect(Dialect): no_copy_collections = (list, dict, set) serialization_strategy = {int: {"serialize": pass_through}} @dataclass class DataClass(DataClassDictMixin): a: List[str] b: Set[str] c: typing.ChainMap[str, str] d: typing.OrderedDict[str, str] e: typing.Counter[str] f: typing.Dict[str, str] g: typing.Sequence[str] class Config(BaseConfig): dialect = NoCopyDialect obj = DataClass( a=["foo"], b={"foo"}, c=collections.ChainMap({"foo": "bar"}), d=collections.OrderedDict({"foo": "bar"}), e=collections.Counter({"foo": 1}), f={"foo": "bar"}, g=["foo"], ) data = obj.to_dict() assert data["a"] is obj.a assert data["b"] is obj.b assert data["c"] is not obj.c assert data["d"] is not obj.d assert data["e"] is not obj.e assert data["f"] is obj.f assert data["g"] is not obj.g def test_dialect_merge(): class DialectA(Dialect): omit_none = True omit_default = True no_copy_collections = [set, dict] serialization_strategy = { date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, int: HexSerializationStrategy(), float: { "serialize": pass_through, "deserialize": float, }, } class DialectB(Dialect): omit_none = False omit_default = False no_copy_collections = [list] serialization_strategy = { date: pass_through, int: { "serialize": int, "deserialize": int, }, float: { "serialize": float, }, } DialectC = DialectA.merge(DialectB) assert DialectC.omit_none is False assert DialectC.omit_default is False assert DialectC.no_copy_collections == [list] assert DialectC.serialization_strategy == { date: pass_through, int: { "serialize": int, "deserialize": int, }, float: { "serialize": float, "deserialize": float, }, } mashumaro-3.13.1/tests/test_discriminated_unions/000077500000000000000000000000001463331001200221775ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_discriminated_unions/__init__.py000066400000000000000000000000001463331001200242760ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_discriminated_unions/test_common.py000066400000000000000000000007671463331001200251120ustar00rootroot00000000000000import pytest from mashumaro.types import Discriminator def test_discriminator_without_necessary_params(): with pytest.raises(ValueError) as exc_info: Discriminator() assert str(exc_info.value) == ( "Either 'include_supertypes' or 'include_subtypes' must be enabled" ) with pytest.raises(ValueError) as exc_info: Discriminator(field="type") assert str(exc_info.value) == ( "Either 'include_supertypes' or 'include_subtypes' must be enabled" ) mashumaro-3.13.1/tests/test_discriminated_unions/test_dialects.py000066400000000000000000000373731463331001200254150ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from typing import Dict, Union from typing_extensions import Annotated, Literal from mashumaro.codecs import BasicDecoder from mashumaro.codecs.orjson import ORJSONDecoder from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.mixins.orjson import DataClassORJSONMixin from mashumaro.types import Discriminator class DefaultDialect(Dialect): serialization_strategy = { date: { "deserialize": date.fromisoformat, "serialize": date.isoformat, }, dict: { "deserialize": lambda x: {k: v for k, v in x}, "serialize": lambda x: [(k, v) for k, v in x.items()], }, } class MyDialect(Dialect): serialization_strategy = { date: { "deserialize": date.fromordinal, "serialize": date.toordinal, }, dict: { "deserialize": dict, "serialize": dict, }, } @dataclass class Variant1(DataClassORJSONMixin): x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class _Variant1: x: date y: Dict[str, int] class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class Variant1Subtype1(Variant1): type: Literal[1] = 1 @dataclass class _Variant1Subtype1(_Variant1): type: Literal[1] = 1 @dataclass class Variant2(DataClassORJSONMixin): x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect @dataclass class _Variant2: x: date y: Dict[str, int] class Config(BaseConfig): dialect = DefaultDialect @dataclass class Variant2Subtype1(Variant2): type: Literal[1] = 1 @dataclass class _Variant2Subtype1(_Variant2): type: Literal[1] = 1 @dataclass class Variant2Wrapper(DataClassORJSONMixin): x: Annotated[Variant2, Discriminator(field="type", include_subtypes=True)] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class _Variant2Wrapper: x: Annotated[_Variant2, Discriminator(field="type", include_subtypes=True)] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class Variant3(DataClassORJSONMixin): x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect @dataclass class _Variant3: x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect @dataclass class Variant3Subtype(Variant3): type: Literal[3] = 3 @dataclass class _Variant3Subtype(_Variant3): type: Literal[3] = 3 @dataclass class Variant4(DataClassORJSONMixin): x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect @dataclass class _Variant4: x: date y: Dict[str, int] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] dialect = DefaultDialect @dataclass class Variant4Subtype(Variant4): type: Literal[4] = 4 @dataclass class _Variant4Subtype(_Variant4): type: Literal[4] = 4 @dataclass class Variant34Wrapper(DataClassORJSONMixin): x: Annotated[ Union[Variant3, Variant4], Discriminator(field="type", include_subtypes=True), ] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class _Variant34Wrapper: x: Annotated[ Union[_Variant3, _Variant4], Discriminator(field="type", include_subtypes=True), ] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] def test_passing_dialect_to_config_based_variant_subtypes(): assert Variant1.from_dict( {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]} ) == Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) decoder1 = BasicDecoder(_Variant1, default_dialect=DefaultDialect) assert decoder1.decode( {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]} ) == _Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_json( '{"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}' ) == Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) decoder2 = ORJSONDecoder(_Variant1, default_dialect=DefaultDialect) assert decoder2.decode( '{"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}' ) == _Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_dict( {"type": 1, "x": 738674, "y": {"1": 2, "3": 4}}, dialect=MyDialect ) == Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) decoder3 = BasicDecoder(_Variant1, default_dialect=MyDialect) assert decoder3.decode( {"type": 1, "x": 738674, "y": {"1": 2, "3": 4}} ) == _Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_json( '{"type": 1, "x": 738674, "y": {"1": 2, "3": 4}}', dialect=MyDialect ) == Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) decoder4 = ORJSONDecoder(_Variant1, default_dialect=MyDialect) assert decoder4.decode( '{"type": 1, "x": 738674, "y": {"1": 2, "3": 4}}' ) == _Variant1Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) @dataclass class Variant1Subtype2(Variant1): type: Literal[2] = 2 @dataclass class _Variant1Subtype2(_Variant1): type: Literal[2] = 2 assert Variant1.from_dict( {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]} ) == Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert decoder1.decode( {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]} ) == _Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_json( '{"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}' ) == Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert decoder2.decode( '{"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}' ) == _Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_dict( {"type": 2, "x": 738674, "y": {"1": 2, "3": 4}}, dialect=MyDialect ) == Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert decoder3.decode( {"type": 2, "x": 738674, "y": {"1": 2, "3": 4}} ) == _Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert Variant1.from_json( '{"type": 2, "x": 738674, "y": {"1": 2, "3": 4}}', dialect=MyDialect ) == Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) assert decoder4.decode( '{"type": 2, "x": 738674, "y": {"1": 2, "3": 4}}' ) == _Variant1Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) def test_passing_dialect_to_annotation_based_variant_subtypes(): assert Variant2Wrapper.from_dict( {"x": {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant2Wrapper(Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4})) decoder1 = BasicDecoder(_Variant2Wrapper) assert decoder1.decode( {"x": {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant2Wrapper( _Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant2Wrapper.from_json( '{"x": {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant2Wrapper(Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4})) decoder2 = ORJSONDecoder(_Variant2Wrapper) assert decoder2.decode( '{"x": {"type": 1, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant2Wrapper( _Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant2Wrapper.from_dict( {"x": {"type": 1, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant2Wrapper(Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant2Wrapper.from_json( '{"x": {"type": 1, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant2Wrapper(Variant2Subtype1(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it @dataclass class Variant2Subtype2(Variant2): type: Literal[2] = 2 @dataclass class _Variant2Subtype2(_Variant2): type: Literal[2] = 2 assert Variant2Wrapper.from_dict( {"x": {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant2Wrapper(Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder1.decode( {"x": {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant2Wrapper( _Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant2Wrapper.from_json( '{"x": {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant2Wrapper(Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder2.decode( '{"x": {"type": 2, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant2Wrapper( _Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant2Wrapper.from_dict( {"x": {"type": 2, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant2Wrapper(Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant2Wrapper.from_json( '{"x": {"type": 2, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant2Wrapper(Variant2Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it def test_passing_dialect_to_annotation_based_union_subtypes(): assert Variant34Wrapper.from_dict( {"x": {"type": 3, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant34Wrapper(Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) decoder1 = BasicDecoder(_Variant34Wrapper) assert decoder1.decode( {"x": {"type": 3, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant34Wrapper( _Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_dict( {"x": {"type": 4, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant34Wrapper(Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder1.decode( {"x": {"type": 4, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant34Wrapper( _Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_json( '{"x": {"type": 3, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant34Wrapper(Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) decoder2 = ORJSONDecoder(_Variant34Wrapper) assert decoder2.decode( '{"x": {"type": 3, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant34Wrapper( _Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_json( '{"x": {"type": 4, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant34Wrapper(Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder2.decode( '{"x": {"type": 4, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant34Wrapper( _Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_dict( {"x": {"type": 3, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant34Wrapper(Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_dict( {"x": {"type": 4, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant34Wrapper(Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_json( '{"x": {"type": 3, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant34Wrapper(Variant3Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_json( '{"x": {"type": 4, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant34Wrapper(Variant4Subtype(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it @dataclass class Variant3Subtype2(Variant3): type: Literal[5] = 5 @dataclass class _Variant3Subtype2(_Variant3): type: Literal[5] = 5 @dataclass class Variant4Subtype2(Variant4): type: Literal[6] = 6 @dataclass class _Variant4Subtype2(_Variant4): type: Literal[6] = 6 assert Variant34Wrapper.from_dict( {"x": {"type": 5, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant34Wrapper(Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder1.decode( {"x": {"type": 5, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant34Wrapper( _Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_dict( {"x": {"type": 6, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == Variant34Wrapper(Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder1.decode( {"x": {"type": 6, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}} ) == _Variant34Wrapper( _Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_json( '{"x": {"type": 5, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant34Wrapper(Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder2.decode( '{"x": {"type": 5, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant34Wrapper( _Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_json( '{"x": {"type": 6, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == Variant34Wrapper(Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) assert decoder2.decode( '{"x": {"type": 6, "x": "2023-06-03", "y": [["1", 2], ["3", 4]]}}' ) == _Variant34Wrapper( _Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4}) ) assert Variant34Wrapper.from_dict( {"x": {"type": 5, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant34Wrapper(Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_dict( {"x": {"type": 6, "x": 738674, "y": {"1": 2, "3": 4}}}, dialect=MyDialect, ) == Variant34Wrapper(Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_json( '{"x": {"type": 5, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant34Wrapper(Variant3Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it assert Variant34Wrapper.from_json( '{"x": {"type": 6, "x": 738674, "y": {"1": 2, "3": 4}}}', dialect=MyDialect, ) == Variant34Wrapper(Variant4Subtype2(date(2023, 6, 3), {"1": 2, "3": 4})) # "decode" doesn't accept a dialect, so we can't pass MyDialect into it mashumaro-3.13.1/tests/test_discriminated_unions/test_parent_by_field.py000066400000000000000000000350771463331001200267520ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date import pytest from typing_extensions import Annotated, Literal from mashumaro import DataClassDictMixin from mashumaro.codecs.basic import decode from mashumaro.exceptions import InvalidFieldValue from mashumaro.types import Discriminator DT_STR = "2023-05-30" DT_DATE = date(2023, 5, 30) X_STR_1 = {"x": DT_STR, "type": "str_1"} X_STR_12 = {"x": DT_STR, "type": "str_12"} X_DATE_12 = {"x": DT_STR, "type": "date_12"} X_DATE_1 = {"x": DT_STR, "type": "date_1"} X_STR_21 = {"x": DT_STR, "type": "str_21"} X_DATE_22 = {"x": DT_STR, "type": "date_22"} @dataclass class BaseVariant(DataClassDictMixin): pass @dataclass class _BaseVariant: pass @dataclass class VariantStr1(BaseVariant): x: str type: Literal["str_1"] = "str_1" @dataclass class _VariantStr1(_BaseVariant): x: str type: Literal["str_1"] = "str_1" @dataclass class VariantDate1(BaseVariant): x: date type: Literal["date_1"] = "date_1" @dataclass class _VariantDate1(_BaseVariant): x: date type: Literal["date_1"] = "date_1" @dataclass class VariantStr12(VariantStr1): x: str type: Literal["str_12"] = "str_12" @dataclass class _VariantStr12(_VariantStr1): x: str type: Literal["str_12"] = "str_12" @dataclass class VariantDate12(VariantStr1): x: date type: Literal["date_12"] = "date_12" @dataclass class _VariantDate12(_VariantStr1): x: date type: Literal["date_12"] = "date_12" @dataclass class VariantStr21(VariantDate1): x: str type: Literal["str_21"] = "str_21" @dataclass class _VariantStr21(_VariantDate1): x: str type: Literal["str_21"] = "str_21" @dataclass class VariantDate22(VariantDate1): x: date type: Literal["date_22"] = "date_22" @dataclass class _VariantDate22(_VariantDate1): x: date type: Literal["date_22"] = "date_22" @dataclass class BySubtypes(DataClassDictMixin): x: Annotated[BaseVariant, Discriminator(include_subtypes=True)] @dataclass class _BySubtypes: x: Annotated[_BaseVariant, Discriminator(include_subtypes=True)] @dataclass class BySupertypes(DataClassDictMixin): x: Annotated[BaseVariant, Discriminator(include_supertypes=True)] x1: Annotated[VariantStr1, Discriminator(include_supertypes=True)] x2: Annotated[VariantDate1, Discriminator(include_supertypes=True)] @dataclass class _BySupertypes: x: Annotated[_BaseVariant, Discriminator(include_supertypes=True)] x1: Annotated[_VariantStr1, Discriminator(include_supertypes=True)] x2: Annotated[_VariantDate1, Discriminator(include_supertypes=True)] @dataclass class ByFieldWithSubtypes(DataClassDictMixin): x: Annotated[ BaseVariant, Discriminator(field="type", include_subtypes=True) ] @dataclass class _ByFieldWithSubtypes: x: Annotated[ _BaseVariant, Discriminator(field="type", include_subtypes=True) ] @dataclass class ByFieldWithSupertypes(DataClassDictMixin): x1: Annotated[ VariantStr1, Discriminator(field="type", include_supertypes=True) ] x2: Annotated[ VariantDate1, Discriminator(field="type", include_supertypes=True) ] @dataclass class _ByFieldWithSupertypes: x1: Annotated[ _VariantStr1, Discriminator(field="type", include_supertypes=True) ] x2: Annotated[ _VariantDate1, Discriminator(field="type", include_supertypes=True) ] @dataclass class ByFieldWithSupertypesAndSubtypes(DataClassDictMixin): x1: Annotated[ VariantStr1, Discriminator( field="type", include_supertypes=True, include_subtypes=True ), ] x2: Annotated[ VariantDate1, Discriminator( field="type", include_supertypes=True, include_subtypes=True ), ] @dataclass class _ByFieldWithSupertypesAndSubtypes: x1: Annotated[ _VariantStr1, Discriminator( field="type", include_supertypes=True, include_subtypes=True ), ] x2: Annotated[ _VariantDate1, Discriminator( field="type", include_supertypes=True, include_subtypes=True ), ] @dataclass class BySupertypesAndSubtypes(DataClassDictMixin): x1: Annotated[ VariantStr1, Discriminator(include_supertypes=True, include_subtypes=True), ] x2: Annotated[ VariantDate1, Discriminator(include_supertypes=True, include_subtypes=True), ] @dataclass class _BySupertypesAndSubtypes: x1: Annotated[ _VariantStr1, Discriminator(include_supertypes=True, include_subtypes=True), ] x2: Annotated[ _VariantDate1, Discriminator(include_supertypes=True, include_subtypes=True), ] @dataclass class Foo1(DataClassDictMixin): x1: int @dataclass class _Foo1: x1: int @dataclass class Foo2(Foo1): x2: int @dataclass class _Foo2(_Foo1): x2: int @dataclass class Foo3(Foo2): x: int type = 3 @dataclass class _Foo3(_Foo2): x: int type = 3 @dataclass class Foo4(Foo2): x: int type = 4 @dataclass class _Foo4(_Foo2): x: int type = 4 @dataclass class Bar(DataClassDictMixin): baz1: Annotated[Foo1, Discriminator(field="type", include_subtypes=True)] baz2: Annotated[Foo1, Discriminator(include_subtypes=True)] @dataclass class _Bar: baz1: Annotated[_Foo1, Discriminator(field="type", include_subtypes=True)] baz2: Annotated[_Foo1, Discriminator(include_subtypes=True)] @dataclass class BaseVariantWitCustomTagger(DataClassDictMixin): pass @dataclass class _BaseVariantWitCustomTagger: pass @dataclass class VariantWitCustomTaggerSub1(BaseVariantWitCustomTagger): pass @dataclass class _VariantWitCustomTaggerSub1(_BaseVariantWitCustomTagger): pass @dataclass class VariantWitCustomTaggerSub2(BaseVariantWitCustomTagger): pass @dataclass class _VariantWitCustomTaggerSub2(_BaseVariantWitCustomTagger): pass @dataclass class VariantWitCustomTaggerOwner(DataClassDictMixin): x: Annotated[ BaseVariantWitCustomTagger, Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: cls.__name__.lower(), ), ] @dataclass class _VariantWitCustomTaggerOwner: x: Annotated[ _BaseVariantWitCustomTagger, Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: cls.__name__.lower(), ), ] @pytest.mark.parametrize( ["variant_data", "variant"], [ (X_STR_1, VariantStr1(DT_STR)), (X_STR_12, VariantStr12(DT_STR)), (X_DATE_12, VariantDate12(DT_DATE)), (X_DATE_1, VariantDate1(DT_DATE)), (X_STR_21, VariantStr21(DT_STR)), (X_DATE_22, VariantDate22(DT_DATE)), ], ) def test_by_subtypes(variant_data, variant): assert BySubtypes.from_dict({"x": variant_data}) == BySubtypes(variant) @pytest.mark.parametrize( ["variant_data", "variant"], [ (X_STR_1, _VariantStr1(DT_STR)), (X_STR_12, _VariantStr12(DT_STR)), (X_DATE_12, _VariantDate12(DT_DATE)), (X_DATE_1, _VariantDate1(DT_DATE)), (X_STR_21, _VariantStr21(DT_STR)), (X_DATE_22, _VariantDate22(DT_DATE)), ], ) def test_by_subtypes_with_decoder(variant_data, variant): assert decode({"x": variant_data}, _BySubtypes) == _BySubtypes(variant) def test_by_subtypes_exceptions(): with pytest.raises(InvalidFieldValue): BySubtypes.from_dict({"x": {"type": "unknown"}}) with pytest.raises(InvalidFieldValue): decode({"x": {"type": "unknown"}}, _BySubtypes) def test_by_supertypes(): assert BySupertypes.from_dict( {"x": {}, "x1": X_STR_1, "x2": X_DATE_1} ) == BySupertypes( BaseVariant(), VariantStr1(DT_STR), VariantDate1(DT_DATE) ) assert decode( {"x": {}, "x1": X_STR_1, "x2": X_DATE_1}, _BySupertypes ) == _BySupertypes( _BaseVariant(), _VariantStr1(DT_STR), _VariantDate1(DT_DATE) ) assert BySupertypes.from_dict( {"x": X_STR_1, "x1": X_STR_1, "x2": X_DATE_1} ) == BySupertypes( BaseVariant(), VariantStr1(DT_STR), VariantDate1(DT_DATE) ) assert decode( {"x": X_STR_1, "x1": X_STR_1, "x2": X_DATE_1}, _BySupertypes ) == _BySupertypes( _BaseVariant(), _VariantStr1(DT_STR), _VariantDate1(DT_DATE) ) with pytest.raises(InvalidFieldValue) as exc_info: BySupertypes.from_dict({"x": {}, "x1": X_STR_12, "x2": X_DATE_1}) assert exc_info.value.field_name == "x1" with pytest.raises(InvalidFieldValue) as exc_info: decode({"x": {}, "x1": X_STR_12, "x2": X_DATE_1}, _BySupertypes) assert exc_info.value.field_name == "x1" with pytest.raises(InvalidFieldValue) as exc_info: BySupertypes.from_dict({"x": {}, "x1": X_STR_1, "x2": X_DATE_22}) assert exc_info.value.field_name == "x2" with pytest.raises(InvalidFieldValue) as exc_info: decode({"x": {}, "x1": X_STR_1, "x2": X_DATE_22}, _BySupertypes) assert exc_info.value.field_name == "x2" @pytest.mark.parametrize( ["variant_data", "variant"], [ (X_STR_1, VariantStr1(DT_STR)), (X_STR_12, VariantStr12(DT_STR)), (X_DATE_12, VariantDate12(DT_DATE)), (X_DATE_1, VariantDate1(DT_DATE)), (X_STR_21, VariantStr21(DT_STR)), (X_DATE_22, VariantDate22(DT_DATE)), ], ) def test_by_field_with_subtypes(variant_data, variant): assert ByFieldWithSubtypes.from_dict( {"x": variant_data} ) == ByFieldWithSubtypes(variant) @pytest.mark.parametrize( ["variant_data", "variant"], [ (X_STR_1, _VariantStr1(DT_STR)), (X_STR_12, _VariantStr12(DT_STR)), (X_DATE_12, _VariantDate12(DT_DATE)), (X_DATE_1, _VariantDate1(DT_DATE)), (X_STR_21, _VariantStr21(DT_STR)), (X_DATE_22, _VariantDate22(DT_DATE)), ], ) def test_by_field_with_subtypes_with_decoder(variant_data, variant): assert decode( {"x": variant_data}, _ByFieldWithSubtypes ) == _ByFieldWithSubtypes(variant) def test_by_field_with_subtypes_exceptions(): with pytest.raises(InvalidFieldValue): ByFieldWithSubtypes.from_dict({"x": {"type": "unknown"}}) with pytest.raises(InvalidFieldValue): decode({"x": {"type": "unknown"}}, _ByFieldWithSubtypes) def test_by_field_with_supertypes(): assert ByFieldWithSupertypes.from_dict( {"x1": X_STR_1, "x2": X_DATE_1} ) == ByFieldWithSupertypes(VariantStr1(DT_STR), VariantDate1(DT_DATE)) assert decode( {"x1": X_STR_1, "x2": X_DATE_1}, _ByFieldWithSupertypes ) == _ByFieldWithSupertypes(_VariantStr1(DT_STR), _VariantDate1(DT_DATE)) with pytest.raises(InvalidFieldValue) as exc_info: ByFieldWithSupertypes.from_dict({"x1": X_STR_12, "x2": X_DATE_1}) assert exc_info.value.field_name == "x1" with pytest.raises(InvalidFieldValue) as exc_info: decode({"x1": X_STR_12, "x2": X_DATE_1}, _ByFieldWithSupertypes) assert exc_info.value.field_name == "x1" with pytest.raises(InvalidFieldValue) as exc_info: ByFieldWithSupertypes.from_dict({"x1": X_STR_1, "x2": X_DATE_22}) assert exc_info.value.field_name == "x2" with pytest.raises(InvalidFieldValue) as exc_info: decode({"x1": X_STR_1, "x2": X_DATE_22}, _ByFieldWithSupertypes) assert exc_info.value.field_name == "x2" @pytest.mark.parametrize( ["dataclass_cls"], [[ByFieldWithSupertypesAndSubtypes], [BySupertypesAndSubtypes]], ) def test_by_field_with_supertypes_and_subtypes(dataclass_cls): assert dataclass_cls.from_dict( {"x1": X_STR_1, "x2": X_DATE_1} ) == dataclass_cls(VariantStr1(DT_STR), VariantDate1(DT_DATE)) assert dataclass_cls.from_dict( {"x1": X_STR_12, "x2": X_STR_21} ) == dataclass_cls(VariantStr12(DT_STR), VariantStr21(DT_STR)) assert dataclass_cls.from_dict( {"x1": X_DATE_12, "x2": X_DATE_22} ) == dataclass_cls(VariantDate12(DT_DATE), VariantDate22(DT_DATE)) with pytest.raises(InvalidFieldValue): dataclass_cls.from_dict({"x1": X_DATE_1, "x2": X_STR_1}) with pytest.raises(InvalidFieldValue): dataclass_cls.from_dict({"x1": X_STR_21, "x2": X_STR_12}) with pytest.raises(InvalidFieldValue): dataclass_cls.from_dict({"x1": X_DATE_22, "x2": X_DATE_12}) @pytest.mark.parametrize( ["dataclass_cls"], [[_ByFieldWithSupertypesAndSubtypes], [_BySupertypesAndSubtypes]], ) def test_by_field_with_supertypes_and_subtypes_with_decoder(dataclass_cls): assert decode( {"x1": X_STR_1, "x2": X_DATE_1}, dataclass_cls ) == dataclass_cls(_VariantStr1(DT_STR), _VariantDate1(DT_DATE)) assert decode( {"x1": X_STR_12, "x2": X_STR_21}, dataclass_cls ) == dataclass_cls(_VariantStr12(DT_STR), _VariantStr21(DT_STR)) assert decode( {"x1": X_DATE_12, "x2": X_DATE_22}, dataclass_cls ) == dataclass_cls(_VariantDate12(DT_DATE), _VariantDate22(DT_DATE)) with pytest.raises(InvalidFieldValue): decode({"x1": X_DATE_1, "x2": X_STR_1}, dataclass_cls) with pytest.raises(InvalidFieldValue): decode({"x1": X_STR_21, "x2": X_STR_12}, dataclass_cls) with pytest.raises(InvalidFieldValue): decode({"x1": X_DATE_22, "x2": X_DATE_12}, dataclass_cls) def test_subclass_tree_with_class_without_field(): assert Bar.from_dict( { "baz1": {"type": 4, "x1": 1, "x2": 2, "x": 42}, "baz2": {"type": 4, "x1": 1, "x2": 2, "x": 42}, } ) == Bar(baz1=Foo4(1, 2, 42), baz2=Foo2(1, 2)) assert decode( { "baz1": {"type": 4, "x1": 1, "x2": 2, "x": 42}, "baz2": {"type": 4, "x1": 1, "x2": 2, "x": 42}, }, _Bar, ) == _Bar(baz1=_Foo4(1, 2, 42), baz2=_Foo2(1, 2)) def test_by_field_with_custom_variant_tagger(): assert VariantWitCustomTaggerOwner.from_dict( {"x": {"type": "variantwitcustomtaggersub1"}} ) == VariantWitCustomTaggerOwner(VariantWitCustomTaggerSub1()) assert decode( {"x": {"type": "_variantwitcustomtaggersub1"}}, _VariantWitCustomTaggerOwner, ) == _VariantWitCustomTaggerOwner(_VariantWitCustomTaggerSub1()) assert VariantWitCustomTaggerOwner.from_dict( {"x": {"type": "variantwitcustomtaggersub2"}} ) == VariantWitCustomTaggerOwner(VariantWitCustomTaggerSub2()) assert decode( {"x": {"type": "_variantwitcustomtaggersub2"}}, _VariantWitCustomTaggerOwner, ) == _VariantWitCustomTaggerOwner(_VariantWitCustomTaggerSub2()) with pytest.raises(InvalidFieldValue): VariantWitCustomTaggerOwner.from_dict({"x": {"type": "unknown"}}) with pytest.raises(InvalidFieldValue): decode({"x": {"type": "unknown"}}, _VariantWitCustomTaggerOwner) mashumaro-3.13.1/tests/test_discriminated_unions/test_parent_via_config.py000066400000000000000000000506571463331001200273020ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from typing import Optional import pytest from typing_extensions import Literal from mashumaro import DataClassDictMixin from mashumaro.codecs.basic import decode from mashumaro.config import BaseConfig from mashumaro.exceptions import ( InvalidFieldValue, SuitableVariantNotFoundError, ) from mashumaro.types import Discriminator DT_STR = "2023-05-30" DT_DATE = date(2023, 5, 30) X_1 = {"x": DT_STR, "type": 1} X_2 = {"x": DT_STR, "type": 2} X_3 = {"x": DT_STR, "type": 3} @dataclass class VariantBySubtypes(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class _VariantBySubtypes: class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class VariantBySubtypesSub1(VariantBySubtypes): x: str type: Literal[1] = 1 @dataclass class _VariantBySubtypesSub1(_VariantBySubtypes): x: str type: Literal[1] = 1 @dataclass class VariantBySubtypesSub2(VariantBySubtypesSub1): x: date type: Literal[2] = 2 @dataclass class _VariantBySubtypesSub2(_VariantBySubtypesSub1): x: date type: Literal[2] = 2 @dataclass class VariantBySubtypesSub3(VariantBySubtypes): x: date type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class _VariantBySubtypesSub3(_VariantBySubtypes): x: date type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class VariantBySubtypesSub4(VariantBySubtypesSub3): pass @dataclass class _VariantBySubtypesSub4(_VariantBySubtypesSub3): pass @dataclass class VariantByFieldWithSubtypes(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class _VariantByFieldWithSubtypes: class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class VariantByFieldWithSubtypesSub1(VariantByFieldWithSubtypes): x: Optional[str] = None type: Literal[1] = 1 @dataclass class _VariantByFieldWithSubtypesSub1(_VariantByFieldWithSubtypes): x: Optional[str] = None type: Literal[1] = 1 @dataclass class VariantByFieldWithSubtypesSub2(VariantByFieldWithSubtypesSub1): x: Optional[date] = None type: Literal[2] = 2 @dataclass class _VariantByFieldWithSubtypesSub2(_VariantByFieldWithSubtypesSub1): x: Optional[date] = None type: Literal[2] = 2 @dataclass class VariantByFieldWithSubtypesSub3(VariantByFieldWithSubtypes): x: Optional[date] = None type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class _VariantByFieldWithSubtypesSub3(_VariantByFieldWithSubtypes): x: Optional[date] = None type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class VariantByFieldWithSubtypesSub4(VariantByFieldWithSubtypesSub3): pass @dataclass class _VariantByFieldWithSubtypesSub4(_VariantByFieldWithSubtypesSub3): pass @dataclass class VariantByFieldWithSupertypesAndSubtypes(DataClassDictMixin): type: Literal["unknown"] = "unknown" class Config(BaseConfig): discriminator = Discriminator( field="type", include_supertypes=True, include_subtypes=True ) @dataclass class _VariantByFieldWithSupertypesAndSubtypes: type: Literal["unknown"] = "unknown" class Config(BaseConfig): discriminator = Discriminator( field="type", include_supertypes=True, include_subtypes=True ) @dataclass class VariantByFieldWithSupertypesAndSubtypesSub1( VariantByFieldWithSupertypesAndSubtypes ): x: Optional[str] = None type: Literal[1] = 1 @dataclass class _VariantByFieldWithSupertypesAndSubtypesSub1( _VariantByFieldWithSupertypesAndSubtypes ): x: Optional[str] = None type: Literal[1] = 1 @dataclass class VariantByFieldWithSupertypesAndSubtypesSub2( VariantByFieldWithSupertypesAndSubtypesSub1 ): x: Optional[date] = None type: Literal[2] = 2 @dataclass class _VariantByFieldWithSupertypesAndSubtypesSub2( _VariantByFieldWithSupertypesAndSubtypesSub1 ): x: Optional[date] = None type: Literal[2] = 2 @dataclass class VariantByFieldWithSupertypesAndSubtypesSub3( VariantByFieldWithSupertypesAndSubtypes ): x: Optional[date] = None type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator( field="type", include_supertypes=True, include_subtypes=True ) @dataclass class _VariantByFieldWithSupertypesAndSubtypesSub3( _VariantByFieldWithSupertypesAndSubtypes ): x: Optional[date] = None type: Literal[3] = 3 class Config(BaseConfig): discriminator = Discriminator( field="type", include_supertypes=True, include_subtypes=True ) @dataclass class VariantByFieldWithSupertypesAndSubtypesSub4( VariantByFieldWithSupertypesAndSubtypesSub3 ): pass @dataclass class _VariantByFieldWithSupertypesAndSubtypesSub4( _VariantByFieldWithSupertypesAndSubtypesSub3 ): pass @dataclass class VariantBySupertypesAndSubtypes(DataClassDictMixin): type: Literal["unknown"] = "unknown" class Config(BaseConfig): discriminator = Discriminator( include_supertypes=True, include_subtypes=True ) @dataclass class _VariantBySupertypesAndSubtypes: type: Literal["unknown"] = "unknown" class Config(BaseConfig): discriminator = Discriminator( include_supertypes=True, include_subtypes=True ) @dataclass class VariantBySupertypesAndSubtypesSub1(VariantBySupertypesAndSubtypes): x: Optional[str] = None type: Literal[1] = 1 @dataclass class _VariantBySupertypesAndSubtypesSub1(_VariantBySupertypesAndSubtypes): x: Optional[str] = None type: Literal[1] = 1 @dataclass class VariantBySupertypesAndSubtypesSub2(VariantBySupertypesAndSubtypesSub1): x: Optional[date] = None type: Literal[2] = 2 @dataclass class _VariantBySupertypesAndSubtypesSub2(_VariantBySupertypesAndSubtypesSub1): x: Optional[date] = None type: Literal[2] = 2 @dataclass class Foo1(DataClassDictMixin): x1: int class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class _Foo1: x1: int class Config(BaseConfig): discriminator = Discriminator(field="type", include_subtypes=True) @dataclass class Foo2(Foo1): x2: int @dataclass class _Foo2(_Foo1): x2: int @dataclass class Foo3(Foo2): x: int type = 3 @dataclass class _Foo3(_Foo2): x: int type = 3 @dataclass class Foo4(Foo2): x: int type = 4 @dataclass class _Foo4(_Foo2): x: int type = 4 @dataclass class Bar1(DataClassDictMixin): x1: int class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class _Bar1: x1: int class Config(BaseConfig): discriminator = Discriminator(include_subtypes=True) @dataclass class Bar2(Bar1): x2: int @dataclass class _Bar2(_Bar1): x2: int @dataclass class Bar3(Bar2): x: int type = 3 @dataclass class _Bar3(_Bar2): x: int type = 3 @dataclass class Bar4(Bar2): x: int type = 4 @dataclass class _Bar4(_Bar2): x: int type = 4 @dataclass class VariantWitCustomTagger(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: cls.__name__.lower(), ) @dataclass class _VariantWitCustomTagger: class Config(BaseConfig): discriminator = Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: cls.__name__.lower(), ) @dataclass class VariantWitCustomTaggerSub1(VariantWitCustomTagger): pass @dataclass class _VariantWitCustomTaggerSub1(_VariantWitCustomTagger): pass @dataclass class VariantWitCustomTaggerSub2(VariantWitCustomTagger): pass @dataclass class _VariantWitCustomTaggerSub2(_VariantWitCustomTagger): pass @dataclass class VariantWithMultipleTags(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: [ cls.__name__.lower(), cls.__name__.upper(), ], ) @dataclass class VariantWithMultipleTagsOne(VariantWithMultipleTags): pass @dataclass class VariantWithMultipleTagsTwo(VariantWithMultipleTags): pass @dataclass class _VariantWithMultipleTags: class Config(BaseConfig): discriminator = Discriminator( field="type", include_subtypes=True, variant_tagger_fn=lambda cls: [ cls.__name__.lower(), cls.__name__.upper(), ], ) @dataclass class _VariantWithMultipleTagsOne(_VariantWithMultipleTags): pass @dataclass class _VariantWithMultipleTagsTwo(_VariantWithMultipleTags): pass def test_by_subtypes(): assert VariantBySubtypes.from_dict(X_1) == VariantBySubtypesSub1(x=DT_STR) assert decode(X_1, _VariantBySubtypes) == _VariantBySubtypesSub1(x=DT_STR) assert VariantBySubtypes.from_dict(X_2) == VariantBySubtypesSub2(x=DT_DATE) assert decode(X_2, _VariantBySubtypes) == _VariantBySubtypesSub2(x=DT_DATE) assert VariantBySubtypes.from_dict(X_3) == VariantBySubtypesSub4(DT_DATE) assert decode(X_3, _VariantBySubtypes) == _VariantBySubtypesSub4(DT_DATE) assert VariantBySubtypesSub3.from_dict(X_3) == VariantBySubtypesSub4( DT_DATE ) assert decode(X_3, _VariantBySubtypesSub3) == _VariantBySubtypesSub4( DT_DATE ) with pytest.raises(SuitableVariantNotFoundError): VariantBySubtypesSub3.from_dict(X_1) with pytest.raises(SuitableVariantNotFoundError): decode(X_1, _VariantBySubtypesSub3) with pytest.raises(SuitableVariantNotFoundError): VariantBySubtypesSub3.from_dict(X_2) with pytest.raises(SuitableVariantNotFoundError): decode(X_2, _VariantBySubtypesSub3) @dataclass class MyClass(DataClassDictMixin): x: VariantBySubtypes @dataclass class _MyClass: x: _VariantBySubtypes assert MyClass.from_dict({"x": X_1}) == MyClass( VariantBySubtypesSub1(DT_STR) ) assert decode({"x": X_1}, _MyClass) == _MyClass( _VariantBySubtypesSub1(DT_STR) ) assert MyClass.from_dict({"x": X_2}) == MyClass( VariantBySubtypesSub2(DT_DATE) ) assert decode({"x": X_2}, _MyClass) == _MyClass( _VariantBySubtypesSub2(DT_DATE) ) assert MyClass.from_dict({"x": X_3}) == MyClass( VariantBySubtypesSub4(DT_DATE) ) assert decode({"x": X_3}, _MyClass) == _MyClass( _VariantBySubtypesSub4(DT_DATE) ) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": {}}) with pytest.raises(InvalidFieldValue): decode({"x": {}}, _MyClass) def test_by_supertypes(): with pytest.raises(ValueError) as exc_info: @dataclass class VariantBySupertypes(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator(include_supertypes=True) assert str(exc_info.value) == ( "Config based discriminator must have 'include_subtypes' enabled" ) def test_by_field_with_subtypes(): assert VariantByFieldWithSubtypes.from_dict( X_1 ) == VariantByFieldWithSubtypesSub1(x=DT_STR) assert decode( X_1, _VariantByFieldWithSubtypes ) == _VariantByFieldWithSubtypesSub1(x=DT_STR) assert VariantByFieldWithSubtypes.from_dict( X_2 ) == VariantByFieldWithSubtypesSub2(x=DT_DATE) assert decode( X_2, _VariantByFieldWithSubtypes ) == _VariantByFieldWithSubtypesSub2(x=DT_DATE) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSubtypes.from_dict(X_3) with pytest.raises(SuitableVariantNotFoundError): decode(X_3, _VariantByFieldWithSubtypes) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSubtypesSub3.from_dict(X_3) with pytest.raises(SuitableVariantNotFoundError): decode(X_3, _VariantByFieldWithSubtypesSub3) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSubtypesSub3.from_dict(X_1) with pytest.raises(SuitableVariantNotFoundError): decode(X_1, _VariantByFieldWithSubtypesSub3) @dataclass class MyClass(DataClassDictMixin): x: VariantByFieldWithSubtypes @dataclass class _MyClass: x: _VariantByFieldWithSubtypes assert MyClass.from_dict({"x": X_1}) == MyClass( VariantByFieldWithSubtypesSub1(DT_STR) ) assert decode({"x": X_1}, _MyClass) == _MyClass( _VariantByFieldWithSubtypesSub1(DT_STR) ) assert MyClass.from_dict({"x": X_2}) == MyClass( VariantByFieldWithSubtypesSub2(DT_DATE) ) assert decode({"x": X_2}, _MyClass) == _MyClass( _VariantByFieldWithSubtypesSub2(DT_DATE) ) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": X_3}) with pytest.raises(InvalidFieldValue): decode({"x": X_3}, _MyClass) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": {}}) with pytest.raises(InvalidFieldValue): decode({"x": {}}, _MyClass) def test_by_field_with_supertypes(): with pytest.raises(ValueError) as exc_info: @dataclass class VariantByFieldWithSupertypes(DataClassDictMixin): class Config(BaseConfig): discriminator = Discriminator( field="type", include_supertypes=True ) assert str(exc_info.value) == ( "Config based discriminator must have 'include_subtypes' enabled" ) def test_by_field_with_supertypes_and_subtypes(): assert VariantByFieldWithSupertypesAndSubtypes.from_dict( X_1 ) == VariantByFieldWithSupertypesAndSubtypesSub1(x=DT_STR) assert decode( X_1, _VariantByFieldWithSupertypesAndSubtypes ) == _VariantByFieldWithSupertypesAndSubtypesSub1(x=DT_STR) assert VariantByFieldWithSupertypesAndSubtypes.from_dict( X_2 ) == VariantByFieldWithSupertypesAndSubtypesSub2(x=DT_DATE) assert decode( X_2, _VariantByFieldWithSupertypesAndSubtypes ) == _VariantByFieldWithSupertypesAndSubtypesSub2(x=DT_DATE) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSupertypesAndSubtypes.from_dict(X_3) with pytest.raises(SuitableVariantNotFoundError): decode(X_3, _VariantByFieldWithSupertypesAndSubtypes) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSupertypesAndSubtypesSub3.from_dict(X_3) with pytest.raises(SuitableVariantNotFoundError): decode(X_3, _VariantByFieldWithSupertypesAndSubtypesSub3) with pytest.raises(SuitableVariantNotFoundError): VariantByFieldWithSupertypesAndSubtypesSub3.from_dict(X_1) with pytest.raises(SuitableVariantNotFoundError): decode(X_1, _VariantByFieldWithSupertypesAndSubtypesSub3) @dataclass class MyClass(DataClassDictMixin): x: VariantByFieldWithSupertypesAndSubtypes @dataclass class _MyClass: x: _VariantByFieldWithSupertypesAndSubtypes assert MyClass.from_dict({"x": X_1}) == MyClass( VariantByFieldWithSupertypesAndSubtypesSub1(x=DT_STR) ) assert decode({"x": X_1}, _MyClass) == _MyClass( _VariantByFieldWithSupertypesAndSubtypesSub1(x=DT_STR) ) assert MyClass.from_dict({"x": X_2}) == MyClass( VariantByFieldWithSupertypesAndSubtypesSub2(x=DT_DATE) ) assert decode({"x": X_2}, _MyClass) == _MyClass( _VariantByFieldWithSupertypesAndSubtypesSub2(x=DT_DATE) ) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": X_3}) with pytest.raises(InvalidFieldValue): decode({"x": X_3}, _MyClass) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": {"type": "unknown"}}) with pytest.raises(InvalidFieldValue): decode({"x": {"type": "unknown"}}, _MyClass) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": {}}) with pytest.raises(InvalidFieldValue): decode({"x": {}}, _MyClass) with pytest.raises(InvalidFieldValue): MyClass.from_dict({"x": {"type": "invalid"}}) with pytest.raises(InvalidFieldValue): decode({"x": {"type": "invalid"}}, _MyClass) def test_by_supertypes_and_subtypes(): assert VariantBySupertypesAndSubtypes.from_dict( X_1 ) == VariantBySupertypesAndSubtypesSub1(x=DT_STR) assert decode( X_1, _VariantBySupertypesAndSubtypes ) == _VariantBySupertypesAndSubtypesSub1(x=DT_STR) assert VariantBySupertypesAndSubtypes.from_dict( X_2 ) == VariantBySupertypesAndSubtypesSub2(x=DT_DATE) assert decode( X_2, _VariantBySupertypesAndSubtypes ) == _VariantBySupertypesAndSubtypesSub2(x=DT_DATE) @dataclass class MyClass(DataClassDictMixin): x: VariantBySupertypesAndSubtypes @dataclass class _MyClass: x: _VariantBySupertypesAndSubtypes assert MyClass.from_dict({"x": X_1}) == MyClass( VariantBySupertypesAndSubtypesSub1(x=DT_STR) ) assert decode({"x": X_1}, _MyClass) == _MyClass( _VariantBySupertypesAndSubtypesSub1(x=DT_STR) ) assert MyClass.from_dict({"x": X_2}) == MyClass( VariantBySupertypesAndSubtypesSub2(x=DT_DATE) ) assert decode({"x": X_2}, _MyClass) == _MyClass( _VariantBySupertypesAndSubtypesSub2(x=DT_DATE) ) assert MyClass.from_dict({"x": {}}) == MyClass( VariantBySupertypesAndSubtypesSub1() ) assert decode({"x": {}}, _MyClass) == _MyClass( _VariantBySupertypesAndSubtypesSub1() ) def test_subclass_tree_with_class_without_field(): assert Foo1.from_dict({"type": 3, "x1": 1, "x2": 2, "x": 42}) == Foo3( 1, 2, 42 ) assert decode({"type": 3, "x1": 1, "x2": 2, "x": 42}, _Foo1) == _Foo3( 1, 2, 42 ) assert Foo1.from_dict({"type": 4, "x1": 1, "x2": 2, "x": 42}) == Foo4( 1, 2, 42 ) assert decode({"type": 4, "x1": 1, "x2": 2, "x": 42}, _Foo1) == _Foo4( 1, 2, 42 ) assert Bar1.from_dict({"type": 3, "x1": 1, "x2": 2, "x": 42}) == Bar2(1, 2) assert decode({"type": 3, "x1": 1, "x2": 2, "x": 42}, _Bar1) == _Bar2(1, 2) assert Bar1.from_dict({"type": 4, "x1": 1, "x2": 2, "x": 42}) == Bar2(1, 2) assert decode({"type": 4, "x1": 1, "x2": 2, "x": 42}, _Bar1) == _Bar2(1, 2) def test_by_subtypes_with_custom_variant_tagger(): assert ( VariantWitCustomTagger.from_dict( {"type": "variantwitcustomtaggersub1"} ) == VariantWitCustomTaggerSub1() ) assert ( decode( {"type": "_variantwitcustomtaggersub1"}, _VariantWitCustomTagger ) == _VariantWitCustomTaggerSub1() ) assert ( VariantWitCustomTagger.from_dict( {"type": "variantwitcustomtaggersub2"} ) == VariantWitCustomTaggerSub2() ) assert ( decode( {"type": "_variantwitcustomtaggersub2"}, _VariantWitCustomTagger ) == _VariantWitCustomTaggerSub2() ) with pytest.raises(SuitableVariantNotFoundError): VariantWitCustomTagger.from_dict({"type": "unknown"}) with pytest.raises(SuitableVariantNotFoundError): decode({"type": "unknown"}, _VariantWitCustomTagger) def test_by_subtypes_with_custom_variant_tagger_and_multiple_tags(): for variant in (VariantWithMultipleTagsOne, VariantWithMultipleTagsTwo): for tag in (variant.__name__.lower(), variant.__name__.upper()): assert ( VariantWithMultipleTags.from_dict({"type": tag}) == variant() ) for variant in (_VariantWithMultipleTagsOne, _VariantWithMultipleTagsTwo): for tag in (variant.__name__.lower(), variant.__name__.upper()): assert decode({"type": tag}, _VariantWithMultipleTags) == variant() with pytest.raises(SuitableVariantNotFoundError): VariantWithMultipleTags.from_dict({"type": "unknown"}) with pytest.raises(SuitableVariantNotFoundError): decode({"type": "unknown"}, _VariantWithMultipleTags) mashumaro-3.13.1/tests/test_discriminated_unions/test_union_by_field.py000066400000000000000000000560171463331001200266060ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from enum import Enum from typing import ClassVar, Tuple, Union import pytest from typing_extensions import Annotated, Final, Literal from mashumaro import DataClassDictMixin from mashumaro.codecs import BasicDecoder from mashumaro.exceptions import InvalidFieldValue from mashumaro.types import Discriminator DT_STR = "2023-05-30" DT_DATE = date(2023, 5, 30) X_STR = {"x": "2023-05-30", "type": "str"} X_DATE = {"x": "2023-05-30", "type": "date"} X_DATE_SUBTYPE = {"x": "2023-05-30", "type": "date_subtype"} class VariantType(str, Enum): STR = "str" DATE = "date" DATE_SUBTYPE = "date_subtype" @dataclass class UnannotatedVariantStr: x: str type = "str" @dataclass class ClassVarVariantStr: x: str type: ClassVar[str] = "str" @dataclass class FinalVariantStr: x: str type: Final[str] = "str" @dataclass class LiteralVariantStr: x: str type: Literal["str"] = "str" @dataclass class EnumVariantStr: x: str type: VariantType = VariantType.STR @dataclass class UnannotatedVariantDate: x: date type = "date" @dataclass class ClassVarVariantDate: x: date type: ClassVar[str] = "date" @dataclass class FinalVariantDate: x: date type: Final[str] = "date" @dataclass class LiteralVariantDate: x: date type: Literal["date"] = "date" @dataclass class EnumVariantDate: x: date type: VariantType = VariantType.DATE @dataclass class UnannotatedVariantDateSubtype(UnannotatedVariantDate): x: date type = "date_subtype" @dataclass class ClassVarVariantDateSubtype(ClassVarVariantDate): x: date type: ClassVar[str] = "date_subtype" @dataclass class FinalVariantDateSubtype(FinalVariantDate): x: date type: Final[str] = "date_subtype" @dataclass class LiteralVariantDateSubtype(LiteralVariantDate): x: date type: Literal["date_subtype"] = "date_subtype" @dataclass class EnumVariantDateSubtype(EnumVariantDate): x: date type: VariantType = VariantType.DATE_SUBTYPE @dataclass class _ByFieldWithSupertypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator("type", include_supertypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator("type", include_supertypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator("type", include_supertypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator("type", include_supertypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator("type", include_supertypes=True), ] @dataclass class ByFieldWithSupertypes(_ByFieldWithSupertypes, DataClassDictMixin): pass @dataclass class _ByFieldWithSubtypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator("type", include_subtypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator("type", include_subtypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator("type", include_subtypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator("type", include_subtypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator("type", include_subtypes=True), ] @dataclass class ByFieldWithSubtypes(_ByFieldWithSubtypes, DataClassDictMixin): pass @dataclass class _BySupertypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator(include_supertypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator(include_supertypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator(include_supertypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator(include_supertypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator(include_supertypes=True), ] @dataclass class BySupertypes(_BySupertypes, DataClassDictMixin): pass @dataclass class _BySubtypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator(include_subtypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator(include_subtypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator(include_subtypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator(include_subtypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator(include_subtypes=True), ] @dataclass class BySubtypes(_BySubtypes, DataClassDictMixin): pass @dataclass class _BySupertypesAndSubtypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator(include_supertypes=True, include_subtypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator(include_supertypes=True, include_subtypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator(include_supertypes=True, include_subtypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator(include_supertypes=True, include_subtypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator(include_supertypes=True, include_subtypes=True), ] @dataclass class BySupertypesAndSubtypes(_BySupertypesAndSubtypes, DataClassDictMixin): pass @dataclass class _ByFieldWithSupertypesAndSubtypes: unannotated: Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator("type", include_supertypes=True, include_subtypes=True), ] class_var: Annotated[ Union[ClassVarVariantStr, ClassVarVariantDate], Discriminator("type", include_supertypes=True, include_subtypes=True), ] final: Annotated[ Union[FinalVariantStr, FinalVariantDate], Discriminator("type", include_supertypes=True, include_subtypes=True), ] literal: Annotated[ Union[LiteralVariantStr, LiteralVariantDate], Discriminator("type", include_supertypes=True, include_subtypes=True), ] enum: Annotated[ Union[EnumVariantStr, EnumVariantDate], Discriminator("type", include_supertypes=True, include_subtypes=True), ] @dataclass class ByFieldWithSupertypesAndSubtypes( _ByFieldWithSupertypesAndSubtypes, DataClassDictMixin ): pass @dataclass class _ByFieldAndByFieldWithSubtypesInOneField: x: Tuple[ Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator("type", include_supertypes=True), ], Annotated[ Union[UnannotatedVariantStr, UnannotatedVariantDate], Discriminator("type", include_subtypes=True), ], ] @dataclass class ByFieldAndByFieldWithSubtypesInOneField( _ByFieldAndByFieldWithSubtypesInOneField, DataClassDictMixin ): pass @dataclass class VariantWitCustomTagger1: pass @dataclass class VariantWitCustomTagger2: pass @dataclass class _VariantWitCustomTaggerOwner: x: Annotated[ Union[VariantWitCustomTagger1, VariantWitCustomTagger2], Discriminator( field="type", include_supertypes=True, variant_tagger_fn=lambda cls: cls.__name__.lower(), ), ] @dataclass class VariantWitCustomTaggerOwner( _VariantWitCustomTaggerOwner, DataClassDictMixin ): pass @dataclass class _VariantWitCustomTaggerWithMultipleTagsOwner: x: Annotated[ Union[VariantWitCustomTagger1, VariantWitCustomTagger2], Discriminator( field="type", include_supertypes=True, variant_tagger_fn=lambda cls: [ cls.__name__.lower(), cls.__name__.upper(), ], ), ] @dataclass class VariantWitCustomTaggerWithMultipleTagsOwner( _VariantWitCustomTaggerWithMultipleTagsOwner, DataClassDictMixin ): pass def test_by_field_with_supertypes(): decoder = BasicDecoder(_ByFieldWithSupertypes) for func, cls in ( (ByFieldWithSupertypes.from_dict, ByFieldWithSupertypes), (decoder.decode, _ByFieldWithSupertypes), ): assert func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) == cls( unannotated=UnannotatedVariantStr(DT_STR), class_var=ClassVarVariantStr(DT_STR), literal=LiteralVariantStr(DT_STR), final=FinalVariantStr(DT_STR), enum=EnumVariantStr(DT_STR), ) for func, cls in ( (ByFieldWithSupertypes.from_dict, ByFieldWithSupertypes), (decoder.decode, _ByFieldWithSupertypes), ): assert func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE, "final": X_DATE, "enum": X_DATE, } ) == cls( unannotated=UnannotatedVariantDate(DT_DATE), class_var=ClassVarVariantDate(DT_DATE), literal=LiteralVariantDate(DT_DATE), final=FinalVariantDate(DT_DATE), enum=EnumVariantDate(DT_DATE), ) for func in (ByFieldWithSupertypes.from_dict, decoder.decode): with pytest.raises(InvalidFieldValue) as exc_info: func({"unannotated": {"x": "2023-05-30", "type": "date_subtype"}}) assert exc_info.value.field_name == "unannotated" def test_by_field_with_subtypes(): decoder = BasicDecoder(_ByFieldWithSubtypes) for func, cls in ( (ByFieldWithSubtypes.from_dict, ByFieldWithSubtypes), (decoder.decode, _ByFieldWithSubtypes), ): assert func( { "unannotated": X_DATE_SUBTYPE, "class_var": X_DATE_SUBTYPE, "literal": X_DATE_SUBTYPE, "final": X_DATE_SUBTYPE, "enum": X_DATE_SUBTYPE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDateSubtype(DT_DATE), final=FinalVariantDateSubtype(DT_DATE), enum=EnumVariantDateSubtype(DT_DATE), ) for func in (ByFieldWithSubtypes.from_dict, decoder.decode): with pytest.raises(InvalidFieldValue) as exc_info: func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) assert exc_info.value.field_name == "unannotated" for func in (ByFieldWithSubtypes.from_dict, decoder.decode): with pytest.raises(InvalidFieldValue) as exc_info: func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE, "final": X_DATE, "enum": X_DATE, } ) assert exc_info.value.field_name == "unannotated" def test_by_field_with_supertypes_and_subtypes(): decoder = BasicDecoder(_ByFieldWithSupertypesAndSubtypes) for func, cls in ( ( ByFieldWithSupertypesAndSubtypes.from_dict, ByFieldWithSupertypesAndSubtypes, ), (decoder.decode, _ByFieldWithSupertypesAndSubtypes), ): assert func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) == cls( unannotated=UnannotatedVariantStr(DT_STR), class_var=ClassVarVariantStr(DT_STR), literal=LiteralVariantStr(DT_STR), final=FinalVariantStr(DT_STR), enum=EnumVariantStr(DT_STR), ) for func, cls in ( ( ByFieldWithSupertypesAndSubtypes.from_dict, ByFieldWithSupertypesAndSubtypes, ), (decoder.decode, _ByFieldWithSupertypesAndSubtypes), ): assert func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE, "final": X_DATE, "enum": X_DATE, } ) == cls( unannotated=UnannotatedVariantDate(DT_DATE), class_var=ClassVarVariantDate(DT_DATE), literal=LiteralVariantDate(DT_DATE), final=FinalVariantDate(DT_DATE), enum=EnumVariantDate(DT_DATE), ) for func, cls in ( ( ByFieldWithSupertypesAndSubtypes.from_dict, ByFieldWithSupertypesAndSubtypes, ), (decoder.decode, _ByFieldWithSupertypesAndSubtypes), ): assert func( { "unannotated": X_DATE_SUBTYPE, "class_var": X_DATE_SUBTYPE, "literal": X_DATE_SUBTYPE, "final": X_DATE_SUBTYPE, "enum": X_DATE_SUBTYPE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDateSubtype(DT_DATE), final=FinalVariantDateSubtype(DT_DATE), enum=EnumVariantDateSubtype(DT_DATE), ) def test_by_supertypes(): decoder = BasicDecoder(_BySupertypes) for func, cls in ( (BySupertypes.from_dict, BySupertypes), (decoder.decode, _BySupertypes), ): assert func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) == cls( unannotated=UnannotatedVariantStr(DT_STR), class_var=ClassVarVariantStr(DT_STR), literal=LiteralVariantStr(DT_STR), final=FinalVariantStr(DT_STR), enum=EnumVariantStr(DT_STR), ) for func, cls in ( (BySupertypes.from_dict, BySupertypes), (decoder.decode, _BySupertypes), ): assert func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE, "final": X_DATE, "enum": X_DATE, } ) == cls( unannotated=UnannotatedVariantStr(DT_STR), class_var=ClassVarVariantStr(DT_STR), literal=LiteralVariantDate(DT_DATE), # final without field discriminator can lead to unexpected result final=FinalVariantStr(DT_STR, type=VariantType.DATE), # enum without field discriminator can lead to unexpected result enum=EnumVariantStr(DT_STR, type=VariantType.DATE), ) for func in (BySupertypes.from_dict, decoder.decode): with pytest.raises(InvalidFieldValue) as exc_info: func( { "unannotated": X_DATE_SUBTYPE, "class_var": X_DATE_SUBTYPE, "literal": X_DATE_SUBTYPE, "final": X_DATE_SUBTYPE, "enum": X_DATE_SUBTYPE, } ) assert exc_info.value.field_name == "literal" def test_by_subtypes(): decoder = BasicDecoder(_BySubtypes) for func, cls in ( (BySubtypes.from_dict, BySubtypes), (decoder.decode, _BySubtypes), ): assert func( { "unannotated": X_DATE_SUBTYPE, "class_var": X_DATE_SUBTYPE, "literal": X_DATE_SUBTYPE, "final": X_DATE_SUBTYPE, "enum": X_DATE_SUBTYPE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDateSubtype(DT_DATE), final=FinalVariantDateSubtype(DT_DATE), enum=EnumVariantDateSubtype(DT_DATE), ) for func, cls in ( (BySubtypes.from_dict, BySubtypes), (decoder.decode, _BySubtypes), ): assert func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE_SUBTYPE, "final": X_DATE, "enum": X_DATE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDateSubtype(DT_DATE), # final without field discriminator can lead to unexpected result final=FinalVariantDateSubtype(DT_DATE, type=VariantType.DATE), # enum without field discriminator can lead to unexpected result enum=EnumVariantDateSubtype(DT_DATE, type=VariantType.DATE), ) for func in (BySubtypes.from_dict, decoder.decode): with pytest.raises(InvalidFieldValue) as exc_info: func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) assert exc_info.value.field_name == "literal" def test_by_supertypes_and_subtypes(): decoder = BasicDecoder(_BySupertypesAndSubtypes) for func, cls in ( (BySupertypesAndSubtypes.from_dict, BySupertypesAndSubtypes), (decoder.decode, _BySupertypesAndSubtypes), ): assert func( { "unannotated": X_DATE_SUBTYPE, "class_var": X_DATE_SUBTYPE, "literal": X_DATE_SUBTYPE, "final": X_DATE_SUBTYPE, "enum": X_DATE_SUBTYPE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDateSubtype(DT_DATE), final=FinalVariantDateSubtype(DT_DATE), enum=EnumVariantDateSubtype(DT_DATE), ) for func, cls in ( (BySupertypesAndSubtypes.from_dict, BySupertypesAndSubtypes), (decoder.decode, _BySupertypesAndSubtypes), ): assert func( { "unannotated": X_STR, "class_var": X_STR, "literal": X_STR, "final": X_STR, "enum": X_STR, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantStr(DT_STR), # using final without field discriminator # can lead to unexpected result final=FinalVariantDateSubtype(DT_DATE, type=VariantType.STR), # using enum without field discriminator # can lead to unexpected result enum=EnumVariantDateSubtype(DT_DATE, type=VariantType.STR), ) for func, cls in ( (BySupertypesAndSubtypes.from_dict, BySupertypesAndSubtypes), (decoder.decode, _BySupertypesAndSubtypes), ): assert func( { "unannotated": X_DATE, "class_var": X_DATE, "literal": X_DATE, "final": X_DATE, "enum": X_DATE, } ) == cls( unannotated=UnannotatedVariantDateSubtype(DT_DATE), class_var=ClassVarVariantDateSubtype(DT_DATE), literal=LiteralVariantDate(DT_DATE), # final without field discriminator can lead to unexpected result final=FinalVariantDateSubtype(DT_DATE, type=VariantType.DATE), # enum without field discriminator can lead to unexpected result enum=EnumVariantDateSubtype(DT_DATE, type=VariantType.DATE), ) def test_tuple_with_discriminated_elements(): decoder = BasicDecoder(_ByFieldAndByFieldWithSubtypesInOneField) for func, cls in ( ( ByFieldAndByFieldWithSubtypesInOneField.from_dict, ByFieldAndByFieldWithSubtypesInOneField, ), (decoder.decode, _ByFieldAndByFieldWithSubtypesInOneField), ): assert func({"x": [X_STR, X_DATE_SUBTYPE]}) == cls( ( UnannotatedVariantStr(DT_STR), UnannotatedVariantDateSubtype(DT_DATE), ), ) for func in ( ByFieldAndByFieldWithSubtypesInOneField.from_dict, decoder.decode, ): with pytest.raises(InvalidFieldValue): func({"x": [X_DATE_SUBTYPE, X_DATE_SUBTYPE]}) for func in ( ByFieldAndByFieldWithSubtypesInOneField.from_dict, decoder.decode, ): with pytest.raises(InvalidFieldValue): func({"x": [X_STR, X_STR]}) def test_by_field_with_subtypes_with_custom_variant_tagger(): decoder = BasicDecoder(_VariantWitCustomTaggerOwner) for func, cls in ( (VariantWitCustomTaggerOwner.from_dict, VariantWitCustomTaggerOwner), (decoder.decode, _VariantWitCustomTaggerOwner), ): assert func({"x": {"type": "variantwitcustomtagger1"}}) == cls( VariantWitCustomTagger1() ) assert func({"x": {"type": "variantwitcustomtagger2"}}) == cls( VariantWitCustomTagger2() ) with pytest.raises(InvalidFieldValue): func({"x": {"type": "unknown"}}) def test_by_field_with_subtypes_with_custom_variant_tagger_and_multiple_tags(): decoder = BasicDecoder(_VariantWitCustomTaggerWithMultipleTagsOwner) for func, cls in ( ( VariantWitCustomTaggerWithMultipleTagsOwner.from_dict, VariantWitCustomTaggerWithMultipleTagsOwner, ), (decoder.decode, _VariantWitCustomTaggerWithMultipleTagsOwner), ): assert func({"x": {"type": "variantwitcustomtagger1"}}) == cls( VariantWitCustomTagger1() ) assert func({"x": {"type": "variantwitcustomtagger2"}}) == cls( VariantWitCustomTagger2() ) assert func({"x": {"type": "VARIANTWITCUSTOMTAGGER1"}}) == cls( VariantWitCustomTagger1() ) assert func({"x": {"type": "VARIANTWITCUSTOMTAGGER2"}}) == cls( VariantWitCustomTagger2() ) with pytest.raises(InvalidFieldValue): func({"x": {"type": "unknown"}}) mashumaro-3.13.1/tests/test_exceptions.py000066400000000000000000000144621463331001200205270ustar00rootroot00000000000000from dataclasses import dataclass from typing import List, Union import pytest from mashumaro import DataClassDictMixin from mashumaro.codecs import BasicDecoder from mashumaro.core.meta.helpers import type_name from mashumaro.exceptions import ( ExtraKeysError, InvalidFieldValue, MissingDiscriminatorError, MissingField, SuitableVariantNotFoundError, ThirdPartyModuleNotFoundError, UnresolvedTypeReferenceError, UnserializableField, UnsupportedDeserializationEngine, UnsupportedSerializationEngine, ) def test_missing_field_simple_field_type_name(): exc = MissingField("x", int, object) assert exc.field_type_name == "int" def test_missing_field_generic_field_type_name(): exc = MissingField("x", List[int], object) assert exc.field_type_name == "List[int]" def test_missing_field_holder_class_name(): exc = MissingField("x", int, object) assert exc.holder_class_name == "object" exc = MissingField("x", int, List[int]) assert exc.holder_class_name == "List[int]" def test_missing_field_str(): exc = MissingField("x", int, object) assert str(exc) == 'Field "x" of type int is missing in object instance' def test_unserializable_field_simple_field_type_name(): exc = UnserializableField("x", int, object) assert exc.field_type_name == "int" def test_unserializable_field_generic_field_type_name(): exc = UnserializableField("x", List[int], object) assert exc.field_type_name == "List[int]" def test_unserializable_field_holder_class_name(): exc = UnserializableField("x", int, object) assert exc.holder_class_name == "object" exc = UnserializableField("x", int, List[int]) assert exc.holder_class_name == "List[int]" def test_unserializable_field_str(): exc = UnserializableField("x", int, object) assert str(exc) == 'Field "x" of type int in object is not serializable' def test_unserializable_field_with_msg_str(): exc = UnserializableField("x", int, object, "test message") assert ( str(exc) == 'Field "x" of type int in object ' "is not serializable: test message" ) def test_invalid_field_value_simple_field_type_name(): exc = InvalidFieldValue("x", int, "y", object) assert exc.field_type_name == "int" def test_invalid_field_value_generic_field_type_name(): exc = InvalidFieldValue("x", List[int], "y", object) assert exc.field_type_name == "List[int]" def test_invalid_field_value_holder_class_name(): exc = InvalidFieldValue("x", int, "y", object) assert exc.holder_class_name == "object" exc = InvalidFieldValue("x", int, "y", List[int]) assert exc.holder_class_name == "List[int]" def test_invalid_field_value_str(): exc = InvalidFieldValue("x", int, "y", object) assert ( str(exc) == "Field \"x\" of type int in object has invalid value 'y'" ) def test_invalid_field_value_with_msg_str(): exc = InvalidFieldValue("x", int, "y", object, "test message") assert ( str(exc) == 'Field "x" of type int in object ' "has invalid value 'y': test message" ) def test_third_party_module_not_found_error_holder_class_name(): exc = ThirdPartyModuleNotFoundError("third_party", "x", object) assert exc.holder_class_name == "object" exc = ThirdPartyModuleNotFoundError("third_party", "x", List[int]) assert exc.holder_class_name == "List[int]" def test_third_party_module_not_found_error_str(): exc = ThirdPartyModuleNotFoundError("third_party", "x", object) assert ( str(exc) == 'Install "third_party" to use it as the serialization ' 'method for the field "x" in object' ) def test_unsupported_deserialization_engine(): exc = UnsupportedDeserializationEngine("x", int, object, "engine_name") assert exc.field_type_name == "int" assert exc.holder_class_name == "object" assert ( str(exc) == 'Field "x" of type int in object is not serializable: ' 'Unsupported deserialization engine "engine_name"' ) def test_unsupported_serialization_engine(): exc = UnsupportedSerializationEngine("x", int, object, "engine_name") assert exc.field_type_name == "int" assert exc.holder_class_name == "object" assert ( str(exc) == 'Field "x" of type int in object is not serializable: ' 'Unsupported serialization engine "engine_name"' ) def test_unresolved_type_reference_error(): exc = UnresolvedTypeReferenceError(object, "x") assert exc.holder_class_name == "object" assert exc.name == "x" assert ( str(exc) == "Class object has unresolved type reference " "x in some of its fields" ) def test_missing_discriminator_error(): exc = MissingDiscriminatorError("x") assert exc.field_name == "x" assert str(exc) == "Discriminator 'x' is missing" def test_suitable_variant_not_found_error(): exc = SuitableVariantNotFoundError(Union[str, int], "type", 42) assert exc.discriminator_value == 42 assert exc.variants_type == Union[str, int] assert exc.discriminator_name == "type" assert str(exc) == ( "typing.Union[str, int] has no subtype with attribute 'type' " "equal to 42" ) exc = SuitableVariantNotFoundError(Union[str, int], "type") assert exc.discriminator_value is None assert exc.variants_type == Union[str, int] assert exc.discriminator_name == "type" assert str(exc) == "typing.Union[str, int] has no suitable subtype" def test_deserialize_dataclass_from_wrong_value_type(): @dataclass class MyClass(DataClassDictMixin): x: str with pytest.raises(ValueError) as exc_info: MyClass.from_dict(42) assert str(exc_info.value) == ( f"Argument for {type_name(MyClass)}." f"__mashumaro_from_dict__ method should be a dict instance" ) decoder = BasicDecoder(MyClass) with pytest.raises(ValueError) as exc_info: decoder.decode(42) assert str(exc_info.value) == ( f"Argument for {type_name(MyClass)}." f"__mashumaro_from_dict__ method should be a dict instance" ) def test_extra_keys_error(): @dataclass class MyClass(DataClassDictMixin): x: str class Config: forbid_extra_keys = True with pytest.raises(ExtraKeysError) as exc_info: MyClass.from_dict({"x": "x", "y": "y"}) assert str(exc_info.value).endswith(".MyClass: y") mashumaro-3.13.1/tests/test_forward_refs/000077500000000000000000000000001463331001200204505ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_forward_refs/__init__.py000066400000000000000000000000001463331001200225470ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_forward_refs/test_baz.py000066400000000000000000000004341463331001200226360ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from .test_foobar import Bar, Foo @dataclass class Baz(Foo[int]): pass def test_class_with_base_in_another_module(): assert Bar(x=1).to_json() == '{"x": 1}' assert Baz(x=1).to_json() == '{"x": 1}' mashumaro-3.13.1/tests/test_forward_refs/test_foobar.py000066400000000000000000000004361463331001200233340ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from typing import Generic, TypeVar from mashumaro.mixins.json import DataClassJSONMixin T = TypeVar("T") @dataclass class Foo(Generic[T], DataClassJSONMixin): x: T @dataclass class Bar(Foo[int]): pass mashumaro-3.13.1/tests/test_forward_refs/test_generic_serializable_type.py000066400000000000000000000016731463331001200272730ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from datetime import date from typing import Dict, Generic, TypeVar from mashumaro import DataClassDictMixin from mashumaro.types import SerializableType T = TypeVar("T") class Foo(Generic[T], SerializableType, use_annotations=True): a: T def __init__(self, a: T) -> None: self.a = a @classmethod def _deserialize(cls, value: Dict[str, T]) -> Foo[T]: return cls(**value) def _serialize(self) -> Dict[str, T]: return {"a": self.a} def __eq__(self, other: Foo) -> bool: return self.a == other.a @dataclass class Bar(DataClassDictMixin): x_str: Foo[str] x_date: Foo[date] def test_generic_serializable_type(): data = {"x_str": {"a": "2023-11-14"}, "x_date": {"a": "2023-11-14"}} obj = Bar(Foo("2023-11-14"), Foo(date(2023, 11, 14))) assert obj.to_dict() == data assert Bar.from_dict(data) == obj mashumaro-3.13.1/tests/test_forward_refs/test_generic_serialization_strategy.py000066400000000000000000000021171463331001200303550ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from datetime import date from typing import Dict, Generic, TypeVar from mashumaro import DataClassDictMixin from mashumaro.config import BaseConfig from mashumaro.types import SerializationStrategy T = TypeVar("T") class Foo(Generic[T]): a: T def __init__(self, a: T) -> None: self.a = a def __eq__(self, other: Foo) -> bool: return self.a == other.a class FooStrategy(Generic[T], SerializationStrategy): def deserialize(self, value: Dict[str, T]) -> Foo[T]: return Foo(**value) def serialize(self, value: Foo[T]) -> Dict[str, T]: return {"a": value.a} @dataclass class Bar(DataClassDictMixin): x_str: Foo[str] x_date: Foo[date] class Config(BaseConfig): serialization_strategy = {Foo: FooStrategy()} def test_generic_serialization_strategy(): data = {"x_str": {"a": "2023-11-14"}, "x_date": {"a": "2023-11-14"}} obj = Bar(Foo("2023-11-14"), Foo(date(2023, 11, 14))) assert obj.to_dict() == data assert Bar.from_dict(data) == obj mashumaro-3.13.1/tests/test_forward_refs/test_linked_list.py000066400000000000000000000007021463331001200243610ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from typing import Optional from mashumaro import DataClassDictMixin @dataclass class Node(DataClassDictMixin): next: Optional[Node] = None def test_self_reference(): assert Node.from_dict({}) == Node() assert Node.from_dict({"next": {}}) == Node(Node()) assert Node().to_dict() == {"next": None} assert Node(Node()).to_dict() == {"next": {"next": None}} mashumaro-3.13.1/tests/test_forward_refs/test_self_referenced_generic.py000066400000000000000000000012551463331001200266730ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from typing import Any, Generic, Optional, TypeVar from mashumaro.mixins.json import DataClassJSONMixin T = TypeVar("T") @dataclass class Foo(Generic[T], DataClassJSONMixin): x: T y: Optional[Foo[Any]] @dataclass class Bar(Foo): ... def test_self_referenced_generic_no_max_recursion_error(): obj = Bar(42, Foo(33, None)) assert obj.to_dict() == {"x": 42, "y": {"x": 33, "y": None}} assert Bar.from_dict({"x": 42, "y": {"x": 33, "y": None}}) == obj assert obj.to_json() == '{"x": 42, "y": {"x": 33, "y": null}}' assert Bar.from_json('{"x": 42, "y": {"x": 33, "y": null}}') == obj mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/000077500000000000000000000000001463331001200267025ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/__init__.py000066400000000000000000000000001463331001200310010ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/bar.py000066400000000000000000000001611463331001200300160ustar00rootroot00000000000000from __future__ import annotations from typing_extensions import TypedDict class Bar(TypedDict): baz: str mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/foo.py000066400000000000000000000002071463331001200300360ustar00rootroot00000000000000from __future__ import annotations from typing_extensions import TypedDict from .bar import Bar class Foo(TypedDict): bar: Bar mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/test_foobar_1.py000066400000000000000000000005731463331001200320100ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from mashumaro import DataClassDictMixin from .foo import Foo @dataclass class FooBar(DataClassDictMixin): foo: Foo def test_foobar(): obj = FooBar.from_dict({"foo": {"bar": {"baz": "baz"}}}) assert obj.foo["bar"]["baz"] == "baz" assert obj.to_dict() == {"foo": {"bar": {"baz": "baz"}}} mashumaro-3.13.1/tests/test_forward_refs/test_typed_dict_as_forward_ref/test_foobar_2.py000066400000000000000000000007301463331001200320040ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from typing_extensions import TypedDict from mashumaro import DataClassDictMixin class Bar(TypedDict): baz: str class Foo(TypedDict): bar: Bar @dataclass class FooBar(DataClassDictMixin): foo: Foo def test_foobar(): obj = FooBar.from_dict({"foo": {"bar": {"baz": "baz"}}}) assert obj.foo["bar"]["baz"] == "baz" assert obj.to_dict() == {"foo": {"bar": {"baz": "baz"}}} mashumaro-3.13.1/tests/test_generics.py000066400000000000000000000143361463331001200201450ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime from typing import Any, Generic, List, Mapping, Optional, TypeVar from mashumaro import DataClassDictMixin from mashumaro.mixins.json import DataClassJSONMixin from tests.entities import MyGenericDataClass, SerializableTypeGenericList T = TypeVar("T") S = TypeVar("S") P = TypeVar("P", Mapping[int, int], List[float]) @dataclass class Foo(Generic[T], DataClassJSONMixin): x: T y: "Optional[Foo[Any]]" @dataclass class Bar(Foo): ... def test_one_generic(): @dataclass class A(Generic[T]): x: T @dataclass class B(A[datetime], DataClassDictMixin): pass obj = B(datetime(2021, 8, 15)) assert B.from_dict({"x": "2021-08-15T00:00:00"}) == obj assert obj.to_dict() == {"x": "2021-08-15T00:00:00"} def test_one_generic_list(): @dataclass class A(List[T]): x: List[T] @dataclass class B(A[datetime], DataClassDictMixin): pass obj = B(x=[datetime(2021, 8, 15)]) assert B.from_dict({"x": ["2021-08-15T00:00:00"]}) == obj assert obj.to_dict() == {"x": ["2021-08-15T00:00:00"]} def test_two_generics(): @dataclass class A1(Generic[T]): x: List[T] @dataclass class A2(Generic[T, S]): y: Mapping[T, S] @dataclass class B(A1[datetime], A2[datetime, date], DataClassDictMixin): pass obj = B( x=[datetime(2021, 8, 15), datetime(2021, 8, 16)], y={datetime(2021, 8, 17): date(2021, 8, 18)}, ) dump = { "x": ["2021-08-15T00:00:00", "2021-08-16T00:00:00"], "y": {"2021-08-17T00:00:00": "2021-08-18"}, } assert B.from_dict(dump) == obj assert obj.to_dict() == dump def test_partially_concrete_generic(): @dataclass class A(Generic[T, S]): x: Mapping[T, S] @dataclass class B(A[datetime, S], DataClassDictMixin): pass obj = B(x={datetime(2022, 11, 21): "3.14"}) assert B.from_dict({"x": {"2022-11-21T00:00:00": "3.14"}}) == obj assert obj.to_dict() == {"x": {"2022-11-21T00:00:00": "3.14"}} def test_partially_concrete_generic_with_bound(): @dataclass class A(Generic[T, P]): x: Mapping[T, P] @dataclass class B(A[date, P], DataClassDictMixin): pass obj1 = B(x={date(2022, 11, 21): {1: 2, 3: 4}}) assert B.from_dict({"x": {"2022-11-21": {"1": "2", "3": "4"}}}) == obj1 assert obj1.to_dict() == {"x": {"2022-11-21": {1: 2, 3: 4}}} obj2 = B(x={date(2022, 11, 21): [1.1, 3.3]}) assert ( B.from_dict({"x": {"2022-11-21": {"1.1": "2.2", "3.3": "4.4"}}}) == obj2 ) assert obj2.to_dict() == {"x": {"2022-11-21": [1.1, 3.3]}} obj3 = B(x={date(2022, 11, 21): [1.1, 2.2, 3.3, 4.4]}) assert ( B.from_dict({"x": {"2022-11-21": ["1.1", "2.2", "3.3", "4.4"]}}) == obj3 ) assert obj3.to_dict() == {"x": {"2022-11-21": [1.1, 2.2, 3.3, 4.4]}} def test_concrete_generic_with_different_type_var(): @dataclass class A(Generic[T]): x: T @dataclass class B(A[P], DataClassDictMixin): pass obj = B.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == B(x={1: 2, 3: 4}) obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x=[1.1, 3.3]) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=[1.1, 2.2, 3.3, 4.4]) def test_loose_generic_info_with_any_type(): @dataclass class A(Generic[T]): x: T @dataclass class B(A, DataClassDictMixin): pass obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x={"1.1": "2.2", "3.3": "4.4"}) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=["1.1", "2.2", "3.3", "4.4"]) def test_loose_generic_info_with_bound(): @dataclass class A(Generic[P]): x: P @dataclass class B(A, DataClassDictMixin): pass obj = B.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == B(x={1: 2, 3: 4}) obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x=[1.1, 3.3]) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=[1.1, 2.2, 3.3, 4.4]) def test_loose_generic_info_in_first_generic(): @dataclass class A(Generic[P]): x: P @dataclass class B(A): pass @dataclass class C(B, Generic[P]): y: P @dataclass class D(C[List[float]], DataClassDictMixin): pass obj = D.from_dict({"x": {"1": "2"}, "y": {"3.3": "4.4"}}) assert obj == D(x={1: 2}, y=[3.3]) obj = D.from_dict({"x": {"1.1": "2.2"}, "y": {"3.3": "4.4"}}) assert obj == D(x=[1.1], y=[3.3]) def test_not_dataclass_generic(): class MyGeneric(Generic[P, T]): pass @dataclass class GenericDataClass(Generic[P]): x: P @dataclass class DataClass(MyGeneric[P, T], GenericDataClass[P]): pass @dataclass class ConcreteDataClass(DataClass[List[float], float], DataClassDictMixin): pass obj = ConcreteDataClass.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == ConcreteDataClass(x=[1.0, 3.0]) def test_generic_dataclass_as_field_type(): @dataclass class DataClass(DataClassDictMixin): date: MyGenericDataClass[date] str: MyGenericDataClass[str] obj = DataClass( date=MyGenericDataClass(date(2021, 9, 14)), str=MyGenericDataClass("2021-09-14"), ) dictionary = {"date": {"x": "2021-09-14"}, "str": {"x": "2021-09-14"}} assert DataClass.from_dict(dictionary) == obj assert obj.to_dict() == dictionary def test_serializable_type_generic_class(): @dataclass class DataClass(DataClassDictMixin): x: SerializableTypeGenericList[str] obj = DataClass(SerializableTypeGenericList(["a", "b", "c"])) assert DataClass.from_dict({"x": ["a", "b", "c"]}) == obj assert obj.to_dict() == {"x": ["a", "b", "c"]} def test_self_referenced_generic_no_max_recursion_error(): obj = Bar(42, Foo(33, None)) assert obj.to_dict() == {"x": 42, "y": {"x": 33, "y": None}} assert Bar.from_dict({"x": 42, "y": {"x": 33, "y": None}}) == obj assert obj.to_json() == '{"x": 42, "y": {"x": 33, "y": null}}' assert Bar.from_json('{"x": 42, "y": {"x": 33, "y": null}}') == obj mashumaro-3.13.1/tests/test_generics_pep_695.py000066400000000000000000000145251463331001200214140ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime from typing import Any, Mapping from mashumaro import DataClassDictMixin from mashumaro.mixins.json import DataClassJSONMixin from tests.entities import MyGenericDataClass, SerializableTypeGenericList @dataclass class Foo[T](DataClassJSONMixin): x: T y: "Foo[Any] | None" @dataclass class Bar(Foo): pass def test_one_generic(): @dataclass class A[T]: x: T @dataclass class B(A[datetime], DataClassDictMixin): pass obj = B(datetime(2021, 8, 15)) assert B.from_dict({"x": "2021-08-15T00:00:00"}) == obj assert obj.to_dict() == {"x": "2021-08-15T00:00:00"} def test_one_generic_list(): @dataclass class A[T](list[T]): x: list[T] @dataclass class B(A[datetime], DataClassDictMixin): pass obj = B(x=[datetime(2021, 8, 15)]) assert B.from_dict({"x": ["2021-08-15T00:00:00"]}) == obj assert obj.to_dict() == {"x": ["2021-08-15T00:00:00"]} def test_two_generics(): @dataclass class A1[T]: x: list[T] @dataclass class A2[T, S]: y: Mapping[T, S] @dataclass class B(A1[datetime], A2[datetime, date], DataClassDictMixin): pass obj = B( x=[datetime(2021, 8, 15), datetime(2021, 8, 16)], y={datetime(2021, 8, 17): date(2021, 8, 18)}, ) dump = { "x": ["2021-08-15T00:00:00", "2021-08-16T00:00:00"], "y": {"2021-08-17T00:00:00": "2021-08-18"}, } assert B.from_dict(dump) == obj assert obj.to_dict() == dump def test_partially_concrete_generic(): @dataclass class A[T, S]: x: Mapping[T, S] @dataclass class B[S](A[datetime, S], DataClassDictMixin): pass obj = B(x={datetime(2022, 11, 21): "3.14"}) assert B.from_dict({"x": {"2022-11-21T00:00:00": "3.14"}}) == obj assert obj.to_dict() == {"x": {"2022-11-21T00:00:00": "3.14"}} def test_partially_concrete_generic_with_bound(): @dataclass class A[T, P: (Mapping[int, int], list[float])]: x: Mapping[T, P] @dataclass class B[P: (Mapping[int, int], list[float])]( A[date, P], DataClassDictMixin ): pass obj1 = B(x={date(2022, 11, 21): {1: 2, 3: 4}}) assert B.from_dict({"x": {"2022-11-21": {"1": "2", "3": "4"}}}) == obj1 assert obj1.to_dict() == {"x": {"2022-11-21": {1: 2, 3: 4}}} obj2 = B(x={date(2022, 11, 21): [1.1, 3.3]}) assert ( B.from_dict({"x": {"2022-11-21": {"1.1": "2.2", "3.3": "4.4"}}}) == obj2 ) assert obj2.to_dict() == {"x": {"2022-11-21": [1.1, 3.3]}} obj3 = B(x={date(2022, 11, 21): [1.1, 2.2, 3.3, 4.4]}) assert ( B.from_dict({"x": {"2022-11-21": ["1.1", "2.2", "3.3", "4.4"]}}) == obj3 ) assert obj3.to_dict() == {"x": {"2022-11-21": [1.1, 2.2, 3.3, 4.4]}} def test_concrete_generic_with_different_type_var(): @dataclass class A[T]: x: T @dataclass class B[P: (Mapping[int, int], list[float])](A[P], DataClassDictMixin): pass obj = B.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == B(x={1: 2, 3: 4}) obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x=[1.1, 3.3]) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=[1.1, 2.2, 3.3, 4.4]) def test_loose_generic_info_with_any_type(): @dataclass class A[T]: x: T @dataclass class B(A, DataClassDictMixin): pass obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x={"1.1": "2.2", "3.3": "4.4"}) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=["1.1", "2.2", "3.3", "4.4"]) def test_loose_generic_info_with_bound(): @dataclass class A[P: (Mapping[int, int], list[float])]: x: P @dataclass class B(A, DataClassDictMixin): pass obj = B.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == B(x={1: 2, 3: 4}) obj = B.from_dict({"x": {"1.1": "2.2", "3.3": "4.4"}}) assert obj == B(x=[1.1, 3.3]) obj = B.from_dict({"x": ["1.1", "2.2", "3.3", "4.4"]}) assert obj == B(x=[1.1, 2.2, 3.3, 4.4]) def test_loose_generic_info_in_first_generic(): @dataclass class A[P: (Mapping[int, int], list[float])]: x: P @dataclass class B(A): pass @dataclass class C[P: (Mapping[int, int], list[float])](B): y: P @dataclass class D(C[list[float]], DataClassDictMixin): pass obj = D.from_dict({"x": {"1": "2"}, "y": {"3.3": "4.4"}}) assert obj == D(x={1: 2}, y=[3.3]) obj = D.from_dict({"x": {"1.1": "2.2"}, "y": {"3.3": "4.4"}}) assert obj == D(x=[1.1], y=[3.3]) def test_not_dataclass_generic(): class MyGeneric[P: (Mapping[int, int], list[float]), T]: pass @dataclass class GenericDataClass[P: (Mapping[int, int], list[float])]: x: P @dataclass class DataClass[P: (Mapping[int, int], list[float]), T]( MyGeneric[P, T], GenericDataClass[P] ): pass @dataclass class ConcreteDataClass(DataClass[list[float], float], DataClassDictMixin): pass obj = ConcreteDataClass.from_dict({"x": {"1": "2", "3": "4"}}) assert obj == ConcreteDataClass(x=[1.0, 3.0]) def test_generic_dataclass_as_field_type(): @dataclass class DataClass(DataClassDictMixin): date: MyGenericDataClass[date] str: MyGenericDataClass[str] obj = DataClass( date=MyGenericDataClass(date(2021, 9, 14)), str=MyGenericDataClass("2021-09-14"), ) dictionary = {"date": {"x": "2021-09-14"}, "str": {"x": "2021-09-14"}} assert DataClass.from_dict(dictionary) == obj assert obj.to_dict() == dictionary def test_serializable_type_generic_class(): @dataclass class DataClass(DataClassDictMixin): x: SerializableTypeGenericList[str] obj = DataClass(SerializableTypeGenericList(["a", "b", "c"])) assert DataClass.from_dict({"x": ["a", "b", "c"]}) == obj assert obj.to_dict() == {"x": ["a", "b", "c"]} def test_self_referenced_generic_no_max_recursion_error(): obj = Bar(42, Foo(33, None)) assert obj.to_dict() == {"x": 42, "y": {"x": 33, "y": None}} assert Bar.from_dict({"x": 42, "y": {"x": 33, "y": None}}) == obj assert obj.to_json() == '{"x": 42, "y": {"x": 33, "y": null}}' assert Bar.from_json('{"x": 42, "y": {"x": 33, "y": null}}') == obj mashumaro-3.13.1/tests/test_helper.py000066400000000000000000000035761463331001200176310ustar00rootroot00000000000000from dataclasses import dataclass, field from datetime import date, datetime import pytest from mashumaro import DataClassDictMixin, field_options from mashumaro.helper import pass_through from mashumaro.types import SerializationStrategy def test_field_options_helper(): assert field_options() == { "serialize": None, "deserialize": None, "serialization_strategy": None, "alias": None, } def serialize(x): return x # pragma: no cover def deserialize(x): return x # pragma: no cover class TestSerializationStrategy(SerializationStrategy): # pragma: no cover def deserialize(self, value): return value def serialize(self, value): return value serialization_strategy = TestSerializationStrategy() alias = "alias" assert field_options( serialize=serialize, deserialize=deserialize, serialization_strategy=serialization_strategy, alias=alias, ) == { "serialize": serialize, "deserialize": deserialize, "serialization_strategy": serialization_strategy, "alias": alias, } def test_pass_through(): with pytest.raises(NotImplementedError): pass_through() assert pass_through.serialize(123) == 123 assert pass_through.deserialize(123) == 123 def test_dataclass_with_pass_through(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field( metadata=field_options( serialize=pass_through, deserialize=pass_through, ) ) y: date = field( metadata=field_options(serialization_strategy=pass_through) ) x = datetime.now() y = x.date() instance = DataClass(x, y) assert instance.to_dict() == {"x": x, "y": y} assert instance.from_dict({"x": x, "y": y}) == instance mashumaro-3.13.1/tests/test_hooks.py000066400000000000000000000135561463331001200174740ustar00rootroot00000000000000from dataclasses import dataclass, field from typing import Any, ClassVar, Dict, Optional, no_type_check import pytest from mashumaro import DataClassDictMixin, field_options, pass_through from mashumaro.config import ADD_SERIALIZATION_CONTEXT, BaseConfig from mashumaro.exceptions import BadHookSignature class BaseClassWithSerializationContext(DataClassDictMixin): class Config(BaseConfig): code_generation_options = [ADD_SERIALIZATION_CONTEXT] @dataclass class Foo(BaseClassWithSerializationContext): baz: int class Config(BaseConfig): code_generation_options = [] def __pre_serialize__(self): return self def __post_serialize__(self, d: Dict): return d @dataclass class Bar(BaseClassWithSerializationContext): baz: int def __pre_serialize__(self, context: Optional[Dict] = None): return self def __post_serialize__(self, d: Dict, context: Optional[Dict] = None): if context and context.get("omit_baz"): d.pop("baz") return d @dataclass class FooBarBaz(BaseClassWithSerializationContext): foo: Foo bar: Bar baz: int def __pre_serialize__(self, context: Optional[Dict] = None): return self def __post_serialize__(self, d: Dict, context: Optional[Dict] = None): if context and context.get("omit_baz"): d.pop("baz") return d def test_bad_pre_deserialize_hook(): with pytest.raises(BadHookSignature): class DataClass(DataClassDictMixin): x: int @no_type_check def __pre_deserialize__( self, d: Dict[Any, Any] ) -> Dict[Any, Any]: ... def test_bad_post_deserialize_hook(): with pytest.raises(BadHookSignature): class DataClass(DataClassDictMixin): x: int @no_type_check def __post_deserialize__( self, obj: "DataClass" ) -> "DataClass": ... def test_pre_deserialize_hook(): @dataclass class DataClass(DataClassDictMixin): x: int @classmethod def __pre_deserialize__(cls, d: Dict[Any, Any]) -> Dict[Any, Any]: return {k.lower(): v for k, v in d.items()} assert DataClass.from_dict({"X": 123}) == DataClass(x=123) def test_post_deserialize_hook(): @dataclass class DataClass(DataClassDictMixin): x: int @classmethod def __post_deserialize__(cls, obj: "DataClass") -> "DataClass": obj.x = 456 return obj assert DataClass.from_dict({"x": 123}) == DataClass(x=456) def test_pre_serialize_hook(): @dataclass class DataClass(DataClassDictMixin): x: int counter: ClassVar[int] = 0 def __pre_serialize__(self) -> "DataClass": DataClass.counter += 1 return self instance = DataClass(x=123) assert instance.to_dict() == {"x": 123} assert instance.counter == 1 def test_post_serialize_hook(): @dataclass class DataClass(DataClassDictMixin): x: int counter: ClassVar[int] = 0 def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: return {k.upper(): v for k, v in d.items()} instance = DataClass(x=123) assert instance.to_dict() == {"X": 123} def test_hook_stub_is_not_called(mocker): pre_deserialize_hook = mocker.patch.object( DataClassDictMixin, "__pre_deserialize__" ) post_deserialize_hook = mocker.patch.object( DataClassDictMixin, "__post_deserialize__" ) pre_serialize_hook = mocker.patch.object( DataClassDictMixin, "__pre_serialize__" ) post_serialize_hook = mocker.patch.object( DataClassDictMixin, "__post_serialize__" ) @dataclass class A(DataClassDictMixin): a: int obj = A.from_dict({"a": 1}) obj.to_dict() pre_deserialize_hook.assert_not_called() post_deserialize_hook.assert_not_called() pre_serialize_hook.assert_not_called() post_serialize_hook.assert_not_called() def test_hook_in_parent_class(mocker): class A: @classmethod def __pre_deserialize__(cls, d): return d # pragma: no cover @classmethod def __post_deserialize__(cls, obj): return obj # pragma: no cover def __pre_serialize__(self): return self # pragma: no cover def __post_serialize__(self, d): return d # pragma: no cover @dataclass class B(A, DataClassDictMixin): a: int pre_deserialize_hook = mocker.patch.object(A, "__pre_deserialize__") post_deserialize_hook = mocker.patch.object(A, "__post_deserialize__") pre_serialize_hook = mocker.patch.object( A, "__pre_serialize__", return_value=B(a=1) ) post_serialize_hook = mocker.patch.object(A, "__post_serialize__") B.from_dict({"a": 1}) B(a=1).to_dict() pre_deserialize_hook.assert_called_once() post_deserialize_hook.assert_called_once() pre_serialize_hook.assert_called_once() post_serialize_hook.assert_called_once() def test_post_deserialize_hook_with_pass_through_field(): @dataclass class MyClass(DataClassDictMixin): x: int = field(metadata=field_options(deserialize=pass_through)) @classmethod def __post_deserialize__(cls, obj): return obj assert MyClass.from_dict({"x": 42}) == MyClass(42) def test_post_deserialize_hook_with_empty_dataclass(): @dataclass class MyClass(DataClassDictMixin): @classmethod def __post_deserialize__(cls, obj): return obj # pragma: no cover assert MyClass.from_dict({}) == MyClass() def test_passing_context_into_hook(): foo = FooBarBaz(foo=Foo(1), bar=Bar(baz=2), baz=3) assert foo.to_dict() == {"foo": {"baz": 1}, "bar": {"baz": 2}, "baz": 3} assert foo.to_dict(context={"omit_baz": True}) == { "foo": {"baz": 1}, "bar": {}, } mashumaro-3.13.1/tests/test_json.py000066400000000000000000000046221463331001200173140ustar00rootroot00000000000000import json from dataclasses import dataclass from datetime import datetime from typing import List import pytest from mashumaro.exceptions import MissingField from mashumaro.mixins.json import DataClassJSONMixin from .entities import MyEnum def test_to_json(): @dataclass class DataClass(DataClassJSONMixin): x: List[int] dumped = json.dumps({"x": [1, 2, 3]}) assert DataClass([1, 2, 3]).to_json() == dumped def test_from_json(): @dataclass class DataClass(DataClassJSONMixin): x: List[int] dumped = json.dumps({"x": [1, 2, 3]}) assert DataClass.from_json(dumped) == DataClass([1, 2, 3]) def test_to_json_with_custom_encoder(): @dataclass class DataClass(DataClassJSONMixin): x: List[int] x_count: int def encoder(d): dd = dict(d) dd.pop("x_count", None) return json.dumps(dd) instance = DataClass(x=[1, 2, 3], x_count=3) dumped = json.dumps({"x": [1, 2, 3]}) assert instance.to_json(encoder=encoder) == dumped assert instance.to_json() != dumped def test_from_json_with_custom_decoder(): @dataclass class DataClass(DataClassJSONMixin): x: List[int] x_count: int def decoder(s): d = json.loads(s) d["x_count"] = len(d.get("x", [])) return d instance = DataClass(x=[1, 2, 3], x_count=3) dumped = json.dumps({"x": [1, 2, 3]}) assert DataClass.from_json(dumped, decoder=decoder) == instance with pytest.raises(MissingField): assert DataClass.from_json(dumped) def test_json_enum(): @dataclass class DataClass(DataClassJSONMixin): x: MyEnum dumped = '{"x": "letter a"}' instance = DataClass(MyEnum.a) assert instance.to_json() == dumped assert instance.to_json() == dumped assert DataClass.from_json(dumped) == instance def test_json_bytes(): @dataclass class DataClass(DataClassJSONMixin): x: bytes dumped = r'{"x": "MTIz\n"}' instance = DataClass(b"123") assert instance.to_json() == dumped assert DataClass.from_json(dumped) == instance def test_json_datetime(): dt = datetime(2018, 10, 29, 12, 46, 55, 308495) @dataclass class DataClass(DataClassJSONMixin): x: datetime dumped = json.dumps({"x": dt.isoformat()}) instance = DataClass(x=dt) assert instance.to_json() == dumped assert DataClass.from_json(dumped) == instance mashumaro-3.13.1/tests/test_jsonschema/000077500000000000000000000000001463331001200201175ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_jsonschema/__init__.py000066400000000000000000000000001463331001200222160ustar00rootroot00000000000000mashumaro-3.13.1/tests/test_jsonschema/test_json_schema_common.py000066400000000000000000000005461463331001200253760ustar00rootroot00000000000000from mashumaro.config import BaseConfig from mashumaro.jsonschema.schema import Instance def test_instance_get_configs(): instance = Instance(int) assert instance.get_owner_config() is BaseConfig assert instance.get_self_config() is BaseConfig derived = instance.derive() assert derived.get_self_config() is instance.get_self_config() mashumaro-3.13.1/tests/test_jsonschema/test_jsonschema_builder.py000066400000000000000000000076271463331001200254040ustar00rootroot00000000000000import json from dataclasses import dataclass from typing_extensions import Literal, TypeVarTuple from mashumaro.config import BaseConfig from mashumaro.jsonschema import DRAFT_2020_12, OPEN_API_3_1 from mashumaro.jsonschema.builder import JSONSchemaBuilder, build_json_schema from mashumaro.jsonschema.schema import Instance Ts = TypeVarTuple("Ts") @dataclass class A: a: int @dataclass class B: b: float def test_instance(): @dataclass class DataClass: class Config(BaseConfig): pass instance = Instance(int) assert instance.metadata == {} assert instance.owner_class is None assert instance.metadata == {} assert instance.get_self_config() is BaseConfig instance = Instance(DataClass) assert instance.owner_class is None assert instance.metadata == {} assert instance.get_self_config() is DataClass.Config derived_instance = instance.derive(type=int) assert derived_instance.owner_class is DataClass assert derived_instance.metadata == {} assert derived_instance.get_self_config() is BaseConfig def test_jsonschema_json_simple(): assert json.loads(build_json_schema(int).to_json()) == {"type": "integer"} def test_jsonschema_json_literal_none(): @dataclass class DataClass: x: Literal[None] = None assert json.loads(build_json_schema(DataClass).to_json()) == { "type": "object", "title": "DataClass", "properties": {"x": {"const": None, "default": None}}, "additionalProperties": False, } def test_jsonschema_builder_draft_2020_12(): builder = JSONSchemaBuilder(dialect=DRAFT_2020_12) assert builder.build(A).to_dict() == { "type": "object", "title": "A", "properties": {"a": {"type": "integer"}}, "additionalProperties": False, "required": ["a"], } assert builder.get_definitions().to_dict() == {} def test_jsonschema_builder_draft_2020_12_with_refs(): builder = JSONSchemaBuilder(dialect=DRAFT_2020_12, all_refs=True) assert builder.build(A).to_dict() == {"$ref": "#/$defs/A"} assert builder.build(B).to_dict() == {"$ref": "#/$defs/B"} assert builder.get_definitions().to_dict() == { "A": { "type": "object", "title": "A", "properties": {"a": {"type": "integer"}}, "additionalProperties": False, "required": ["a"], }, "B": { "type": "object", "title": "B", "properties": {"b": {"type": "number"}}, "additionalProperties": False, "required": ["b"], }, } def test_jsonschema_builder_open_api_3_1(): builder = JSONSchemaBuilder(dialect=OPEN_API_3_1) assert builder.build(A).to_dict() == {"$ref": "#/components/schemas/A"} assert builder.build(B).to_dict() == {"$ref": "#/components/schemas/B"} assert builder.get_definitions().to_dict() == { "A": { "type": "object", "title": "A", "properties": {"a": {"type": "integer"}}, "additionalProperties": False, "required": ["a"], }, "B": { "type": "object", "title": "B", "properties": {"b": {"type": "number"}}, "additionalProperties": False, "required": ["b"], }, } def test_jsonschema_builder_open_api_3_1_without_refs(): builder = JSONSchemaBuilder(dialect=OPEN_API_3_1, all_refs=False) assert builder.build(A).to_dict() == { "type": "object", "title": "A", "properties": {"a": {"type": "integer"}}, "additionalProperties": False, "required": ["a"], } assert builder.build(B).to_dict() == { "type": "object", "title": "B", "properties": {"b": {"type": "number"}}, "additionalProperties": False, "required": ["b"], } assert builder.get_definitions().to_dict() == {} mashumaro-3.13.1/tests/test_jsonschema/test_jsonschema_generation.py000066400000000000000000001212031463331001200260740ustar00rootroot00000000000000import collections import datetime import ipaddress import os from base64 import encodebytes from dataclasses import dataclass, field from decimal import Decimal from fractions import Fraction from pathlib import ( Path, PosixPath, PurePath, PurePosixPath, PureWindowsPath, WindowsPath, ) from typing import ( AbstractSet, Any, ByteString, ChainMap, Counter, DefaultDict, Deque, Dict, FrozenSet, Generic, List, Mapping, MutableMapping, Optional, OrderedDict, Sequence, Tuple, Union, ) from uuid import UUID import pytest from typing_extensions import Annotated, Literal, TypeVarTuple, Unpack from mashumaro.config import BaseConfig from mashumaro.core.const import PEP_585_COMPATIBLE, PY_39_MIN from mashumaro.core.meta.helpers import type_name from mashumaro.helper import pass_through from mashumaro.jsonschema.annotations import ( Contains, DependentRequired, ExclusiveMaximum, ExclusiveMinimum, MaxContains, Maximum, MaxItems, MaxLength, MaxProperties, MinContains, Minimum, MinItems, MinLength, MinProperties, MultipleOf, Pattern, UniqueItems, ) from mashumaro.jsonschema.builder import JSONSchemaBuilder, build_json_schema from mashumaro.jsonschema.dialects import DRAFT_2020_12, OPEN_API_3_1 from mashumaro.jsonschema.models import ( JSONArraySchema, JSONObjectSchema, JSONSchema, JSONSchemaInstanceFormatExtension, JSONSchemaInstanceType, JSONSchemaStringFormat, ) from mashumaro.jsonschema.schema import UTC_OFFSET_PATTERN, EmptyJSONSchema from mashumaro.types import Discriminator, SerializationStrategy from tests.entities import ( CustomPath, GenericNamedTuple, GenericTypedDict, MyDatetimeNewType, MyEnum, MyFlag, MyIntEnum, MyIntFlag, MyNamedTuple, MyNamedTupleWithDefaults, MyNamedTupleWithOptional, MyNativeStrEnum, MyStrEnum, MyUntypedNamedTuple, MyUntypedNamedTupleWithDefaults, T, T_Optional_int, TAny, TInt, TIntStr, TypedDictOptionalKeys, TypedDictOptionalKeysWithOptional, TypedDictRequiredAndOptionalKeys, TypedDictRequiredKeys, TypedDictRequiredKeysWithOptional, ) from tests.test_pep_655 import ( TypedDictCorrectNotRequired, TypedDictCorrectRequired, ) if PY_39_MIN: from zoneinfo import ZoneInfo Ts = TypeVarTuple("Ts") def dummy_serialize_as_str(_: Any) -> str: return "dummy" # pragma: no cover class ThirdPartyType: pass @dataclass class DataClassWithThirdPartyType: a: ThirdPartyType b: Optional[ThirdPartyType] c: ThirdPartyType = ThirdPartyType() d: Optional[ThirdPartyType] = None class Config(BaseConfig): serialization_strategy = { ThirdPartyType: { "deserialize": ThirdPartyType, "serialize": dummy_serialize_as_str, } } def test_jsonschema_for_dataclass(): @dataclass class DataClass: a: int b: float = 3.14 c: Optional[int] = field(default=None, metadata={"alias": "cc"}) d: str = "" e: int = field(init=False) f: List[int] = field( default_factory=list, metadata={"description": "description for f"} ) class Config: aliases = {"a": "aa", "d": "dd"} schema = JSONObjectSchema( title="DataClass", properties={ "aa": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "b": JSONSchema(type=JSONSchemaInstanceType.NUMBER, default=3.14), "cc": JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NULL), ], default=None, ), "dd": JSONSchema(type=JSONSchemaInstanceType.STRING, default=""), "f": JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER), description="description for f", ), }, additionalProperties=False, required=["aa"], ) assert build_json_schema(DataClass) == schema assert build_json_schema(DataClass, all_refs=True) == JSONSchema( reference="#/$defs/DataClass", definitions={"DataClass": schema} ) def test_jsonschema_for_any(): assert build_json_schema(Any) == EmptyJSONSchema() def test_jsonschema_for_literal(): assert build_json_schema(Literal[1]) == JSONSchema(const=1) assert build_json_schema(Literal[1, 2]) == JSONSchema(enum=[1, 2]) assert build_json_schema(Literal["x", "y"]) == JSONSchema(enum=["x", "y"]) assert build_json_schema(Literal[True, False]) == JSONSchema( enum=[True, False] ) assert build_json_schema(Literal[1, None]) == JSONSchema(enum=[1, None]) assert build_json_schema(Literal[MyEnum.a, MyEnum.b]) == JSONSchema( enum=["letter a", "letter b"] ) assert build_json_schema(Literal[b"x", b"y"]) == JSONSchema( enum=[encodebytes(b"x").decode(), encodebytes(b"y").decode()] ) def test_jsonschema_for_special_typing_primitives(): assert build_json_schema(Union[int, str]) == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.STRING), ] ) assert build_json_schema(TAny) == EmptyJSONSchema() assert build_json_schema(T) == EmptyJSONSchema() assert build_json_schema(TInt) == JSONSchema( type=JSONSchemaInstanceType.INTEGER ) assert build_json_schema(TIntStr) == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.STRING), ] ) assert build_json_schema(T_Optional_int) == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) assert build_json_schema(MyDatetimeNewType) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaStringFormat.DATETIME, ) @pytest.mark.parametrize( ("number_type", "instance_type"), ( (int, JSONSchemaInstanceType.INTEGER), (float, JSONSchemaInstanceType.NUMBER), ), ) def test_jsonschema_for_number(number_type, instance_type): assert build_json_schema(number_type) == JSONSchema(type=instance_type) assert build_json_schema( Annotated[ number_type, Minimum(1), Maximum(2), ExclusiveMinimum(0), ExclusiveMaximum(3), MultipleOf(2), ] ) == JSONSchema( type=instance_type, minimum=1, maximum=2, exclusiveMinimum=0, exclusiveMaximum=3, multipleOf=2, ) def test_jsonschema_for_bool(): assert build_json_schema(bool) == JSONSchema( type=JSONSchemaInstanceType.BOOLEAN ) def test_jsonschema_for_none(): for instance_type in (None, type(None)): assert build_json_schema(instance_type) == JSONSchema( type=JSONSchemaInstanceType.NULL ) @pytest.mark.parametrize( ("instance_type", "string_format"), ( (datetime.datetime, JSONSchemaStringFormat.DATETIME), (datetime.date, JSONSchemaStringFormat.DATE), (datetime.time, JSONSchemaStringFormat.TIME), ), ) def test_jsonschema_for_datetime_objects(instance_type, string_format): assert build_json_schema(instance_type) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=string_format ) def test_jsonschema_for_timedelta(): assert build_json_schema(datetime.timedelta) == JSONSchema( type=JSONSchemaInstanceType.NUMBER, format=JSONSchemaInstanceFormatExtension.TIMEDELTA, ) def test_jsonschema_for_timezone(): assert build_json_schema(datetime.timezone) == JSONSchema( type=JSONSchemaInstanceType.STRING, pattern=UTC_OFFSET_PATTERN, ) @pytest.mark.skipif(not PY_39_MIN, reason="requires py39+") def test_jsonschema_for_zone_info(): assert build_json_schema(ZoneInfo) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.TIME_ZONE, ) def test_jsonschema_for_uuid(): assert build_json_schema(UUID) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaStringFormat.UUID ) @pytest.mark.parametrize( ("instance_type", "string_format"), ( (ipaddress.IPv4Address, JSONSchemaStringFormat.IPV4ADDRESS), (ipaddress.IPv6Address, JSONSchemaStringFormat.IPV6ADDRESS), (ipaddress.IPv4Network, JSONSchemaInstanceFormatExtension.IPV4NETWORK), (ipaddress.IPv6Network, JSONSchemaInstanceFormatExtension.IPV6NETWORK), (ipaddress.IPv4Network, JSONSchemaInstanceFormatExtension.IPV4NETWORK), (ipaddress.IPv6Network, JSONSchemaInstanceFormatExtension.IPV6NETWORK), ), ) def test_jsonschema_for_ipaddress(instance_type, string_format): assert build_json_schema(instance_type) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=string_format, ) def test_jsonschema_for_decimal(): assert build_json_schema(Decimal) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.DECIMAL, ) def test_jsonschema_for_fraction(): assert build_json_schema(Fraction) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.FRACTION, ) def test_jsonschema_for_bytestring(): for instance_type in (ByteString, bytes, bytearray): assert build_json_schema(instance_type) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.BASE64, ) def test_jsonschema_for_str(): assert build_json_schema(str) == JSONSchema( type=JSONSchemaInstanceType.STRING ) assert build_json_schema( Annotated[str, MinLength(1), MaxLength(5), Pattern("$[a-z]+^")] ) == JSONSchema( type=JSONSchemaInstanceType.STRING, minLength=1, maxLength=5, pattern="$[a-z]+^", ) def test_jsonschema_for_list(): assert build_json_schema(List[int]) == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER) ) assert build_json_schema(List) == JSONArraySchema() assert build_json_schema(List[Any]) == JSONArraySchema() assert build_json_schema(Annotated[List, min]) if PEP_585_COMPATIBLE: assert build_json_schema(list) == JSONArraySchema() def test_jsonschema_for_deque(): assert build_json_schema(Deque[int]) == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER), ) assert build_json_schema(Deque) == JSONArraySchema() assert build_json_schema(Deque[Any]) == JSONArraySchema() if PEP_585_COMPATIBLE: assert build_json_schema(collections.deque) == JSONArraySchema() def test_jsonschema_for_tuple(): assert build_json_schema(Tuple[int]) == JSONArraySchema( prefixItems=[JSONSchema(type=JSONSchemaInstanceType.INTEGER)], maxItems=1, minItems=1, ) assert build_json_schema(Tuple) == JSONArraySchema() assert build_json_schema(Tuple[()]) == JSONArraySchema(maxItems=0) assert build_json_schema(Tuple[Any]) == JSONArraySchema( prefixItems=[EmptyJSONSchema()], maxItems=1, minItems=1 ) assert build_json_schema(Tuple[Any, ...]) == JSONArraySchema() assert build_json_schema(Tuple[int, ...]) == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER) ) if PEP_585_COMPATIBLE: assert build_json_schema(tuple) == JSONArraySchema() def test_jsonschema_for_named_tuple(): assert build_json_schema(MyNamedTuple) == JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NUMBER), ], maxItems=2, minItems=2, ) assert build_json_schema(MyNamedTupleWithDefaults) == JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER, default=1), JSONSchema(type=JSONSchemaInstanceType.NUMBER, default=2.0), ], maxItems=2, minItems=2, ) assert build_json_schema(MyNamedTupleWithOptional) == JSONArraySchema( prefixItems=[ JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ), JSONSchema(type=JSONSchemaInstanceType.INTEGER), ], maxItems=2, minItems=2, ) assert build_json_schema(MyUntypedNamedTuple) == JSONArraySchema( prefixItems=[EmptyJSONSchema(), EmptyJSONSchema()], maxItems=2, minItems=2, ) assert build_json_schema( MyUntypedNamedTupleWithDefaults ) == JSONArraySchema( prefixItems=[JSONSchema(default=1), JSONSchema(default=2.0)], maxItems=2, minItems=2, ) assert build_json_schema(GenericNamedTuple) == JSONArraySchema( prefixItems=[ EmptyJSONSchema(), JSONSchema(type=JSONSchemaInstanceType.INTEGER), ], maxItems=2, minItems=2, ) assert build_json_schema(GenericNamedTuple[float]) == JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.NUMBER), JSONSchema(type=JSONSchemaInstanceType.INTEGER), ], maxItems=2, minItems=2, ) def test_jsonschema_for_named_tuple_as_dict(): @dataclass class DataClassA: x: MyNamedTuple = field(metadata={"serialize": "as_dict"}) @dataclass class DataClassB: x: MyNamedTuple class Config: namedtuple_as_dict = True schema = JSONObjectSchema( properties={ "i": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "f": JSONSchema(type=JSONSchemaInstanceType.NUMBER), }, additionalProperties=False, required=["i", "f"], ) assert build_json_schema(DataClassA).properties["x"] == schema assert build_json_schema(DataClassB).properties["x"] == schema def test_jsonschema_for_named_tuple_as_list(): @dataclass class DataClassA: x: MyNamedTuple = field(metadata={"serialize": "as_list"}) @dataclass class DataClassB: x: MyNamedTuple class Config: namedtuple_as_dict = False schema = JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NUMBER), ], maxItems=2, minItems=2, ) assert build_json_schema(DataClassA).properties["x"] == schema assert build_json_schema(DataClassB).properties["x"] == schema def test_jsonschema_for_named_tuple_with_overridden_serialization_method(): class MySerializationStrategy(SerializationStrategy): def serialize(self, value: Any) -> MyNamedTuple: ... def deserialize(self, value: Any) -> Any: ... class MyAnySerializationStrategy(SerializationStrategy): def serialize(self, value: Any) -> Any: ... def deserialize(self, value: Any) -> Any: ... @dataclass class DataClassA: x: MyNamedTuple class Config: serialization_strategy = {MyNamedTuple: {"serialize": lambda x: x}} @dataclass class DataClassB: x: MyNamedTuple class Config: serialization_strategy = { MyNamedTuple: MyAnySerializationStrategy() } @dataclass class DataClassC: x: MyNamedTuple class Config: serialization_strategy = {MyNamedTuple: pass_through} @dataclass class DataClassD: x: MyNamedTuple class Config: serialization_strategy = {MyNamedTuple: MySerializationStrategy()} schema = JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NUMBER), ], maxItems=2, minItems=2, ) assert build_json_schema(DataClassA).properties["x"] == EmptyJSONSchema() assert build_json_schema(DataClassB).properties["x"] == EmptyJSONSchema() assert build_json_schema(DataClassC).properties["x"] == schema assert build_json_schema(DataClassD).properties["x"] == schema def test_jsonschema_for_set(): for generic_type in (FrozenSet, AbstractSet): assert build_json_schema(generic_type[int]) == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER), uniqueItems=True, ) assert build_json_schema(generic_type) == JSONArraySchema( uniqueItems=True ) assert build_json_schema(generic_type[Any]) == JSONArraySchema( uniqueItems=True ) if PEP_585_COMPATIBLE: assert build_json_schema(frozenset) == JSONArraySchema( uniqueItems=True ) assert build_json_schema(set) == JSONArraySchema(uniqueItems=True) def test_jsonschema_for_chainmap(): assert build_json_schema(ChainMap[str, int]) == JSONArraySchema( items=JSONObjectSchema( propertyNames=JSONSchema(type=JSONSchemaInstanceType.STRING), additionalProperties=JSONSchema( type=JSONSchemaInstanceType.INTEGER ), ) ) assert build_json_schema(ChainMap) == JSONArraySchema( items=JSONObjectSchema() ) assert build_json_schema(ChainMap[Any, Any]) == JSONArraySchema( items=JSONObjectSchema() ) def test_jsonschema_for_counter(): assert build_json_schema(Counter[str]) == JSONObjectSchema( additionalProperties=JSONSchema(type=JSONSchemaInstanceType.INTEGER), propertyNames=JSONSchema(type=JSONSchemaInstanceType.STRING), ) assert build_json_schema(Counter) == JSONObjectSchema( additionalProperties=JSONSchema(type=JSONSchemaInstanceType.INTEGER) ) assert build_json_schema(Counter[Any]) == JSONObjectSchema( additionalProperties=JSONSchema(type=JSONSchemaInstanceType.INTEGER), ) assert build_json_schema( Annotated[ Counter, MinProperties(1), MaxProperties(5), DependentRequired({"a": {"b"}, "b": {"c", "d"}}), ] ) == JSONObjectSchema( additionalProperties=JSONSchema(type=JSONSchemaInstanceType.INTEGER), minProperties=1, maxProperties=5, dependentRequired={"a": {"b"}, "b": {"c", "d"}}, ) def test_jsonschema_for_typeddict(): assert build_json_schema(TypedDictRequiredKeys) == JSONObjectSchema( properties={ "int": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "float": JSONSchema(type=JSONSchemaInstanceType.NUMBER), }, additionalProperties=False, required=["float", "int"], ) assert build_json_schema(TypedDictOptionalKeys) == JSONObjectSchema( properties={ "int": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "float": JSONSchema(type=JSONSchemaInstanceType.NUMBER), }, additionalProperties=False, ) assert build_json_schema( TypedDictRequiredAndOptionalKeys ) == JSONObjectSchema( properties={ "int": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "float": JSONSchema(type=JSONSchemaInstanceType.NUMBER), "str": JSONSchema(type=JSONSchemaInstanceType.STRING), }, additionalProperties=False, required=["float", "int"], ) assert build_json_schema( TypedDictRequiredKeysWithOptional ) == JSONObjectSchema( properties={ "x": JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ), "y": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["x", "y"], ) assert build_json_schema( TypedDictOptionalKeysWithOptional ) == JSONObjectSchema( properties={ "x": JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ), "y": JSONSchema(type=JSONSchemaInstanceType.NUMBER), }, additionalProperties=False, ) assert build_json_schema(GenericTypedDict) == JSONObjectSchema( properties={ "x": EmptyJSONSchema(), "y": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["x", "y"], ) assert build_json_schema(GenericTypedDict[float]) == JSONObjectSchema( properties={ "x": JSONSchema(type=JSONSchemaInstanceType.NUMBER), "y": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["x", "y"], ) assert build_json_schema(GenericTypedDict[Any]) == JSONObjectSchema( properties={ "x": EmptyJSONSchema(), "y": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["x", "y"], ) assert build_json_schema(TypedDictCorrectNotRequired) == JSONObjectSchema( properties={ "required": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "not_required": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["required"], ) assert build_json_schema(TypedDictCorrectRequired) == JSONObjectSchema( properties={ "required": JSONSchema(type=JSONSchemaInstanceType.INTEGER), "not_required": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=False, required=["required"], ) def test_jsonschema_for_mapping(): for generic_type in ( Dict, Mapping, MutableMapping, OrderedDict, DefaultDict, ): assert build_json_schema(generic_type[str, int]) == JSONObjectSchema( additionalProperties=JSONSchema( type=JSONSchemaInstanceType.INTEGER ), propertyNames=JSONSchema(type=JSONSchemaInstanceType.STRING), ) assert build_json_schema(generic_type) == JSONObjectSchema() assert build_json_schema(generic_type[Any, Any]) == JSONObjectSchema() assert build_json_schema( Annotated[ generic_type, MinProperties(1), MaxProperties(5), DependentRequired({"a": {"b"}, "b": {"c", "d"}}), ] ) == JSONObjectSchema( minProperties=1, maxProperties=5, dependentRequired={"a": {"b"}, "b": {"c", "d"}}, ) assert build_json_schema( DefaultDict[int, Dict[str, int]] ) == JSONObjectSchema( additionalProperties=JSONObjectSchema( additionalProperties=JSONSchema( type=JSONSchemaInstanceType.INTEGER, ), propertyNames=JSONSchema(type=JSONSchemaInstanceType.STRING), ), propertyNames=JSONSchema(type=JSONSchemaInstanceType.INTEGER), ) if PEP_585_COMPATIBLE: assert build_json_schema(dict) == JSONObjectSchema() def test_jsonschema_for_sequence(): assert build_json_schema(Sequence[int]) == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.INTEGER) ) assert build_json_schema(Sequence) == JSONArraySchema() assert build_json_schema(Sequence[Any]) == JSONArraySchema() def test_jsonschema_for_pathlike(): for pathlike_type in ( Path, PurePath, PosixPath, PurePosixPath, WindowsPath, PureWindowsPath, os.PathLike, CustomPath, ): assert build_json_schema(pathlike_type) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.PATH, ) assert build_json_schema( Annotated[Path, MinLength(10), MaxLength(20)] ) == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaInstanceFormatExtension.PATH, minLength=10, maxLength=20, ) def test_jsonschema_for_array_like_with_constraints(): for array_type in (List, Deque, Tuple, FrozenSet, AbstractSet): schema = build_json_schema( Annotated[array_type, MinItems(1), MaxItems(5)] ) assert schema.minItems == 1 assert schema.maxItems == 5 schema = build_json_schema(Annotated[array_type, UniqueItems(True)]) assert schema.uniqueItems schema = build_json_schema(Annotated[array_type, UniqueItems(False)]) assert not schema.uniqueItems contains_schema = JSONSchema(type=JSONSchemaInstanceType.INTEGER) schema = build_json_schema( Annotated[array_type, Contains(contains_schema)] ) assert schema.contains == contains_schema assert schema.minContains is None assert schema.maxContains is None schema = build_json_schema( Annotated[array_type, MinContains(1), MaxContains(2)] ) assert schema.contains is None assert schema.minContains is None assert schema.maxContains is None schema = build_json_schema( Annotated[ array_type, Contains(contains_schema), MinContains(1), MaxContains(2), ] ) assert schema.contains == contains_schema assert schema.minContains == 1 assert schema.maxContains == 2 def test_jsonschema_for_enum(): assert build_json_schema(MyEnum) == JSONSchema( enum=["letter a", "letter b"] ) assert build_json_schema(MyStrEnum) == JSONSchema( enum=["letter a", "letter b"] ) assert build_json_schema(MyNativeStrEnum) == JSONSchema( enum=["letter a", "letter b"] ) assert build_json_schema(MyIntEnum) == JSONSchema(enum=[1, 2]) assert build_json_schema(MyFlag) == JSONSchema(enum=[1, 2]) assert build_json_schema(MyIntFlag) == JSONSchema(enum=[1, 2]) def test_jsonschema_for_type_var_tuple(): assert build_json_schema(Ts) == JSONArraySchema() assert build_json_schema( Tuple[Unpack[Tuple[float, str]]] ) == JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.NUMBER), JSONSchema(type=JSONSchemaInstanceType.STRING), ], maxItems=2, minItems=2, ) assert build_json_schema( Tuple[Tuple[Unpack[Tuple[float, ...]], str], int] ) == JSONArraySchema( prefixItems=[ JSONArraySchema(minItems=1), JSONSchema(type=JSONSchemaInstanceType.INTEGER), ], maxItems=2, minItems=2, ) @dataclass class GenericDataClass(Generic[Unpack[Ts]]): x: Tuple[Unpack[Ts]] assert build_json_schema( GenericDataClass[Unpack[Tuple[int, float]], datetime.time] ) == JSONObjectSchema( title="GenericDataClass", properties={ "x": JSONArraySchema( prefixItems=[ JSONSchema(type=JSONSchemaInstanceType.INTEGER), JSONSchema(type=JSONSchemaInstanceType.NUMBER), JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaStringFormat.TIME, ), ], maxItems=3, minItems=3, ) }, additionalProperties=False, required=["x"], ) def test_jsonschema_for_unsupported_type(): with pytest.raises(NotImplementedError): build_json_schema(object) def test_overridden_serialization_method_without_signature(): @dataclass class DataClass: x: datetime.datetime y: datetime.datetime = field( metadata={"serialize": datetime.datetime.timestamp} ) class Config(BaseConfig): serialization_strategy = { datetime.datetime: { "serialize": datetime.datetime.timestamp, } } with pytest.warns( UserWarning, match=f"Type Any will be used for {type_name(DataClass)}" ): assert ( build_json_schema(DataClass).properties["x"] == EmptyJSONSchema() ) assert ( build_json_schema(DataClass).properties["y"] == EmptyJSONSchema() ) def test_overridden_serialization_method_without_return_annotation(): def as_timestamp(dt: datetime.datetime): # pragma: no cover return dt.timestamp() @dataclass class DataClass: x: datetime.datetime y: datetime.datetime = field(metadata={"serialize": as_timestamp}) class Config(BaseConfig): serialization_strategy = { datetime.datetime: {"serialize": as_timestamp} } assert build_json_schema(DataClass).properties["x"] == EmptyJSONSchema() assert build_json_schema(DataClass).properties["y"] == EmptyJSONSchema() def test_overridden_serialization_method_with_return_annotation(): def as_timestamp(dt: datetime.datetime) -> float: return dt.timestamp() # pragma: no cover def first_datetime_as_timestamp( seq: List[datetime.datetime], ) -> float: return as_timestamp(seq[0]) # pragma: no cover @dataclass class DataClass: a: datetime.datetime b: datetime.datetime = field(metadata={"serialize": as_timestamp}) c: List[datetime.datetime] d: List[datetime.datetime] = field( metadata={"serialize": first_datetime_as_timestamp} ) e: Optional[datetime.datetime] f: List[Optional[datetime.datetime]] class Config(BaseConfig): serialization_strategy = { datetime.datetime: {"serialize": as_timestamp} } schema = build_json_schema(DataClass) assert schema.properties["a"] == JSONSchema( type=JSONSchemaInstanceType.NUMBER ) assert schema.properties["b"] == JSONSchema( type=JSONSchemaInstanceType.NUMBER ) assert schema.properties["c"] == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.NUMBER) ) assert schema.properties["d"] == JSONSchema( type=JSONSchemaInstanceType.NUMBER ) assert schema.properties["e"] == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.NUMBER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) assert schema.properties["f"] == JSONArraySchema( items=JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.NUMBER), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) ) @pytest.mark.parametrize( ("basic_type", "schema_type"), ( (str, JSONSchemaInstanceType.STRING), (int, JSONSchemaInstanceType.INTEGER), (float, JSONSchemaInstanceType.NUMBER), (bool, JSONSchemaInstanceType.BOOLEAN), ), ) def test_basic_type_as_overridden_serialization_method( basic_type, schema_type ): @dataclass class DataClass: x: ThirdPartyType y: List[ThirdPartyType] class Config(BaseConfig): serialization_strategy = { ThirdPartyType: {"serialize": basic_type} } assert build_json_schema(DataClass).properties["x"] == JSONSchema( type=schema_type ) assert build_json_schema(DataClass).properties["y"] == JSONArraySchema( items=JSONSchema(type=schema_type) ) def test_dataclass_overridden_serialization_method(): def serialize_as_str(value: Any) -> str: return str(value) # pragma: no cover @dataclass class Inner: x: int @dataclass class DataClass: a: Inner b: Optional[Inner] c: List[Inner] d: List[Optional[Inner]] class Config(BaseConfig): serialization_strategy = {Inner: {"serialize": serialize_as_str}} schema = build_json_schema(DataClass) assert schema.properties["a"] == JSONSchema( type=JSONSchemaInstanceType.STRING ) assert schema.properties["b"] == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.STRING), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) assert schema.properties["c"] == JSONArraySchema( items=JSONSchema(type=JSONSchemaInstanceType.STRING) ) assert schema.properties["d"] == JSONArraySchema( items=JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.STRING), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) ) def test_third_party_overridden_serialization_method(): schema = build_json_schema(DataClassWithThirdPartyType) assert schema.properties["a"] == JSONSchema( type=JSONSchemaInstanceType.STRING ) assert schema.properties["b"] == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.STRING), JSONSchema(type=JSONSchemaInstanceType.NULL), ] ) assert schema.properties["c"] == JSONSchema( type=JSONSchemaInstanceType.STRING, default="dummy" ) assert schema.properties["d"] == JSONSchema( anyOf=[ JSONSchema(type=JSONSchemaInstanceType.STRING), JSONSchema(type=JSONSchemaInstanceType.NULL), ], default=None, ) def test_jsonschema_with_override_for_properties(): @dataclass class DataClass: x: str y: datetime.datetime class Config(BaseConfig): json_schema = { "properties": { "x": { "type": "string", "format": "date-time", "description": "Description for x", }, "y": { "type": "number", "description": "Description for y", }, } } assert build_json_schema(DataClass).properties["x"] == JSONSchema( type=JSONSchemaInstanceType.STRING, format=JSONSchemaStringFormat.DATETIME, description="Description for x", ) assert build_json_schema(DataClass).properties["y"] == JSONSchema( type=JSONSchemaInstanceType.NUMBER, description="Description for y", ) def test_jsonschema_with_dialect_uri(): schema = build_json_schema(str, with_dialect_uri=True) assert schema.schema == DRAFT_2020_12.uri assert schema.to_dict()["$schema"] == DRAFT_2020_12.uri schema = build_json_schema( str, dialect=OPEN_API_3_1, with_dialect_uri=True ) assert schema.schema == OPEN_API_3_1.uri assert schema.to_dict()["$schema"] == OPEN_API_3_1.uri def test_jsonschema_with_ref_prefix(): @dataclass class DataClass: pass schema = {"$ref": "#/components/responses/DataClass"} assert ( build_json_schema( List[DataClass], all_refs=True, ref_prefix="#/components/responses" ).items.to_dict() == schema ) assert ( build_json_schema( List[DataClass], all_refs=True, ref_prefix="#/components/responses/", ).items.to_dict() == schema ) assert ( build_json_schema( List[DataClass], dialect=OPEN_API_3_1, ref_prefix="#/components/responses/", ).items.to_dict() == schema ) assert ( JSONSchemaBuilder(all_refs=True, ref_prefix="#/components/responses") .build(List[DataClass]) .items.to_dict() == schema ) assert ( JSONSchemaBuilder( dialect=OPEN_API_3_1, ref_prefix="#/components/responses" ) .build(List[DataClass]) .items.to_dict() == schema ) def test_jsonschema_with_additional_properties_true(): @dataclass class DataClass: x: int class Config(BaseConfig): json_schema = {"additionalProperties": True} schema = JSONObjectSchema( title="DataClass", properties={ "x": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=True, required=["x"], ) assert build_json_schema(DataClass) == schema def test_jsonschema_with_additional_properties_schema(): @dataclass class DataClass: x: int class Config(BaseConfig): json_schema = { "additionalProperties": JSONSchema( type=JSONSchemaInstanceType.INTEGER ) } schema = JSONObjectSchema( title="DataClass", properties={ "x": JSONSchema(type=JSONSchemaInstanceType.INTEGER), }, additionalProperties=JSONSchema(type=JSONSchemaInstanceType.INTEGER), required=["x"], ) assert build_json_schema(DataClass) == schema def test_jsonschema_with_discriminator_for_local_types(): @dataclass class A: value = "a" @dataclass class B: value = "b" @dataclass class Main: value: Annotated[ Union[A, B], Discriminator(field="value", include_supertypes=True) ] schema = JSONObjectSchema( title="Main", properties={ "value": JSONSchema( anyOf=[ JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="A", additionalProperties=False, ), JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="B", additionalProperties=False, ), ], ), }, additionalProperties=False, required=["value"], ) assert build_json_schema(Main) == schema def test_jsonschema_with_discriminator_with_default_for_local_types(): @dataclass class A: value = "a" @dataclass class B: value = "b" @dataclass class Main: value: Annotated[ Union[A, B, None], Discriminator(field="value", include_supertypes=True), ] = None schema = JSONObjectSchema( title="Main", properties={ "value": JSONSchema( anyOf=[ JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="A", additionalProperties=False, ), JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="B", additionalProperties=False, ), JSONSchema( type=JSONSchemaInstanceType.NULL, ), ], default=None, ), }, additionalProperties=False, ) assert build_json_schema(Main) == schema def test_jsonschema_with_optional_discriminator_and_default_for_local_types(): @dataclass class A: value = "a" @dataclass class B: value = "b" @dataclass class Main: value: Optional[ Annotated[ Union[A, B], Discriminator(field="value", include_supertypes=True), ] ] = None schema = JSONObjectSchema( title="Main", properties={ "value": JSONSchema( anyOf=[ JSONSchema( anyOf=[ JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="A", additionalProperties=False, ), JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, title="B", additionalProperties=False, ), ] ), JSONSchema( type=JSONSchemaInstanceType.NULL, ), ], default=None, ), }, additionalProperties=False, ) assert build_json_schema(Main) == schema mashumaro-3.13.1/tests/test_jsonschema/test_jsonschema_generation_with_forward_refs.py000066400000000000000000000022221463331001200316710ustar00rootroot00000000000000from __future__ import annotations from typing import ForwardRef, TypedDict import pytest from mashumaro.core.const import PY_39_MIN from mashumaro.core.meta.helpers import get_function_arg_annotation from mashumaro.jsonschema import build_json_schema from mashumaro.jsonschema.models import ( JSONObjectSchema, JSONSchema, JSONSchemaInstanceType, ) class MyTypedDict(TypedDict): x: int @pytest.mark.skipif( not PY_39_MIN, reason=( "On Python 3.8 ForwardRef doesn't have __forward_module__ " "which is needed here" ), ) def test_jsonschema_generation_for_forward_refs(): def foo(x: int, y: MyTypedDict): ... x_type = get_function_arg_annotation(foo, "x") assert isinstance(x_type, ForwardRef) assert build_json_schema(x_type).type is JSONSchemaInstanceType.INTEGER y_type = get_function_arg_annotation(foo, "y") assert isinstance(y_type, ForwardRef) assert build_json_schema(y_type) == JSONObjectSchema( type=JSONSchemaInstanceType.OBJECT, properties={"x": JSONSchema(type=JSONSchemaInstanceType.INTEGER)}, additionalProperties=False, required=["x"], ) mashumaro-3.13.1/tests/test_literal.py000066400000000000000000000075341463331001200200040ustar00rootroot00000000000000from dataclasses import dataclass import pytest from typing_extensions import Literal from mashumaro import DataClassDictMixin from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.exceptions import InvalidFieldValue from mashumaro.helper import pass_through from tests.entities import MyEnum def test_literal_with_str(): @dataclass class DataClass(DataClassDictMixin): x: Literal["1", "2", "3"] assert DataClass.from_dict({"x": "1"}) == DataClass("1") assert DataClass.from_dict({"x": "2"}) == DataClass("2") assert DataClass("1").to_dict() == {"x": "1"} assert DataClass("2").to_dict() == {"x": "2"} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": 1}) with pytest.raises(InvalidFieldValue): DataClass(1).to_dict() def test_literal_with_int(): @dataclass class DataClass(DataClassDictMixin): x: Literal[1, 2] assert DataClass.from_dict({"x": 1}) == DataClass(1) assert DataClass.from_dict({"x": 2}) == DataClass(2) assert DataClass(1).to_dict() == {"x": 1} assert DataClass(2).to_dict() == {"x": 2} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "1"}) with pytest.raises(InvalidFieldValue): DataClass("1").to_dict() def test_literal_with_bool(): @dataclass class DataClass(DataClassDictMixin): x: Literal[True, False] assert DataClass.from_dict({"x": True}) == DataClass(True) assert DataClass.from_dict({"x": False}) == DataClass(False) with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "a"}) with pytest.raises(InvalidFieldValue): DataClass("a").to_dict() def test_literal_with_none(): @dataclass class DataClass(DataClassDictMixin): x: Literal[None] assert DataClass.from_dict({"x": None}) == DataClass(None) assert DataClass(None).to_dict() == {"x": None} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "1"}) with pytest.raises(InvalidFieldValue): DataClass("1").to_dict() def test_literal_with_bytes(): @dataclass class DataClass(DataClassDictMixin): x: Literal[b"\x00"] assert DataClass.from_dict({"x": "AA==\n"}) == DataClass(b"\x00") assert DataClass(b"\x00").to_dict() == {"x": "AA==\n"} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "\x00"}) with pytest.raises(InvalidFieldValue): DataClass("AA==\n").to_dict() def test_literal_with_bytes_overridden(): @dataclass class DataClass(DataClassDictMixin): x: Literal[b"\x00"] class Config(BaseConfig): serialization_strategy = {bytes: pass_through} assert DataClass.from_dict({"x": b"\x00"}) == DataClass(b"\x00") assert DataClass(b"\x00").to_dict() == {"x": b"\x00"} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "AA==\n"}) def test_literal_with_enum(): @dataclass class DataClass(DataClassDictMixin): x: Literal[MyEnum.a] assert DataClass.from_dict({"x": "letter a"}) == DataClass(MyEnum.a) with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "letter b"}) with pytest.raises(InvalidFieldValue): DataClass(MyEnum.b).to_dict() def test_literal_with_dialect(): @dataclass class DataClass(DataClassDictMixin): x: Literal[b"\x00"] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] class MyDialect(Dialect): serialization_strategy = {bytes: pass_through} instance = DataClass(b"\x00") assert DataClass.from_dict({"x": b"\x00"}, dialect=MyDialect) == instance assert instance.to_dict(dialect=MyDialect) == {"x": b"\x00"} with pytest.raises(InvalidFieldValue): DataClass.from_dict({"x": "AA==\n"}, dialect=MyDialect) mashumaro-3.13.1/tests/test_meta.py000066400000000000000000000646271463331001200173040ustar00rootroot00000000000000import collections import collections.abc import types import typing from dataclasses import InitVar, dataclass from datetime import datetime import pytest import typing_extensions from mashumaro import DataClassDictMixin from mashumaro.core.const import ( PEP_585_COMPATIBLE, PY_38, PY_39_MIN, PY_310_MIN, ) from mashumaro.core.meta.code.builder import CodeBuilder # noinspection PyProtectedMember from mashumaro.core.meta.helpers import ( collect_type_params, get_args, get_class_that_defines_field, get_class_that_defines_method, get_function_arg_annotation, get_function_return_annotation, get_generic_name, get_literal_values, get_type_annotations, get_type_origin, get_type_var_default, hash_type_args, is_annotated, is_dataclass_dict_mixin, is_dataclass_dict_mixin_subclass, is_dialect_subclass, is_generic, is_hashable, is_hashable_type, is_init_var, is_literal, is_named_tuple, is_new_type, is_optional, is_self, is_type_var_any, is_union, not_none_type_arg, resolve_type_params, substitute_type_params, type_name, type_var_has_default, ) from mashumaro.core.meta.types.common import ( FieldContext, ValueSpec, ensure_generic_collection, ensure_mapping_key_type_hashable, ) from mashumaro.dialect import Dialect from mashumaro.exceptions import UnserializableField from mashumaro.mixins.json import DataClassJSONMixin from .entities import ( MyDataClass, MyDatetimeNewType, MyEnum, MyFlag, MyFrozenDataClass, MyGenericDataClass, MyGenericList, MyIntEnum, MyIntFlag, MyNamedTuple, MyNativeStrEnum, MyStrEnum, MyUntypedNamedTuple, T, TAny, TDefaultInt, TInt, TIntStr, ) NoneType = type(None) TMyDataClass = typing.TypeVar("TMyDataClass", bound=MyDataClass) def test_is_generic_unsupported_python(mocker): mocker.patch("mashumaro.core.meta.helpers.PY_38", False) mocker.patch("mashumaro.core.meta.helpers.PY_39_MIN", False) with pytest.raises(NotImplementedError): is_generic(int) def test_is_init_var(): assert is_init_var(InitVar[int]) assert not is_init_var(int) def test_is_literal_unsupported_python(mocker): mocker.patch("mashumaro.core.meta.helpers.PY_38", False) mocker.patch("mashumaro.core.meta.helpers.PY_39", False) mocker.patch("mashumaro.core.meta.helpers.PY_310_MIN", False) assert not is_literal(typing_extensions.Literal[1]) def test_no_code_builder(mocker): mocker.patch( "mashumaro.mixins.dict.DataClassDictMixin.__init_subclass__", lambda: ..., ) @dataclass class DataClass(DataClassDictMixin): pass assert DataClass.__pre_deserialize__({}) is None assert DataClass.__post_deserialize__(DataClass()) is None assert DataClass().__pre_serialize__() is None assert DataClass().__post_serialize__({}) is None def test_get_class_that_defines_method(): class A: def foo(self): ... @classmethod def bar(cls): ... def foobar(self): ... class B(A): def foobar(self): ... assert get_class_that_defines_method("foo", B) == A assert get_class_that_defines_method("bar", B) == A assert get_class_that_defines_method("foobar", B) == B def test_get_class_that_defines_field(): @dataclass class A: x: int y: int z: int @dataclass class B(A): y: float z: int assert get_class_that_defines_field("x", B) == A assert get_class_that_defines_field("y", B) == B assert get_class_that_defines_field("z", B) == B def test_get_unknown_declared_hook(): builder = CodeBuilder(object) assert builder.get_declared_hook("unknown_name") is None def test_is_dataclass_dict_mixin(): assert is_dataclass_dict_mixin(DataClassDictMixin) assert not is_dataclass_dict_mixin(DataClassJSONMixin) def test_is_dataclass_dict_mixin_subclass(): assert is_dataclass_dict_mixin_subclass(DataClassDictMixin) assert is_dataclass_dict_mixin_subclass(DataClassJSONMixin) assert is_dataclass_dict_mixin_subclass(MyDataClass) def test_is_type_var_any(): assert is_type_var_any(T) assert is_type_var_any(TAny) assert not is_type_var_any(TInt) assert not is_type_var_any(TDefaultInt) assert not is_type_var_any(typing.Any) assert not is_type_var_any(TMyDataClass) @pytest.mark.skipif(not PY_38, reason="requires python 3.8") def test_is_type_var_any_list_38(): # noinspection PyProtectedMember # noinspection PyUnresolvedReferences assert is_type_var_any(typing.List.__args__[0]) def test_type_name(): assert type_name(TAny) == "typing.Any" assert type_name(TInt) == "int" assert type_name(TDefaultInt) == "int" assert type_name(TMyDataClass) == "tests.entities.MyDataClass" assert type_name(TIntStr) == "typing.Union[int, str]" assert type_name(typing.List[TInt]) == "typing.List[int]" assert type_name(typing.Tuple[int]) == "typing.Tuple[int]" assert type_name(typing.Tuple[int, ...]) == "typing.Tuple[int, ...]" assert type_name(typing.Tuple[()]) == "typing.Tuple[()]" assert type_name(typing.Set[int]) == "typing.Set[int]" assert type_name(typing.FrozenSet[int]) == "typing.FrozenSet[int]" assert type_name(typing.Deque[int]) == "typing.Deque[int]" assert type_name(typing.Dict[int, int]) == "typing.Dict[int, int]" assert type_name(typing.Mapping[int, int]) == "typing.Mapping[int, int]" assert ( type_name(typing.MutableMapping[int, int]) == "typing.MutableMapping[int, int]" ) assert type_name(typing.Counter[int]) == "typing.Counter[int]" assert type_name(typing.ChainMap[int, int]) == "typing.ChainMap[int, int]" assert type_name(typing.Sequence[int]) == "typing.Sequence[int]" assert type_name(typing.Union[int, str]) == "typing.Union[int, str]" assert ( type_name(typing.Union[int, typing.Any]) == "typing.Union[int, typing.Any]" ) assert ( type_name(typing.OrderedDict[int, int]) == "typing.OrderedDict[int, int]" ) assert ( type_name(typing.DefaultDict[int, int]) == "typing.DefaultDict[int, int]" ) assert type_name(typing.Optional[int]) == "typing.Optional[int]" assert type_name(typing.Union[None, int]) == "typing.Optional[int]" assert type_name(typing.Union[int, None]) == "typing.Optional[int]" assert type_name(None) == "None" assert type_name(NoneType) == "NoneType" assert type_name(NoneType, none_type_as_none=True) == "None" assert type_name(typing.List[NoneType]) == "typing.List[NoneType]" assert ( type_name(typing.Union[int, str, None]) == "typing.Union[int, str, None]" ) assert type_name(typing.Optional[NoneType]) == "NoneType" if PY_39_MIN: assert ( type_name(types.MappingProxyType[int, int]) == "mappingproxy[int, int]" ) if PY_310_MIN: assert type_name(int | None) == "typing.Optional[int]" assert type_name(None | int) == "typing.Optional[int]" assert type_name(int | str) == "typing.Union[int, str]" if PY_310_MIN: assert ( type_name(MyDatetimeNewType) == "tests.entities.MyDatetimeNewType" ) else: assert type_name(MyDatetimeNewType) == type_name(datetime) assert ( type_name(typing_extensions.Annotated[TMyDataClass, None]) == "tests.entities.MyDataClass" ) @pytest.mark.skipif(not PEP_585_COMPATIBLE, reason="requires python 3.9+") def test_type_name_pep_585(): assert type_name(list[str]) == "list[str]" assert type_name(collections.deque[str]) == "collections.deque[str]" assert type_name(tuple[str]) == "tuple[str]" assert type_name(tuple[str, ...]) == "tuple[str, ...]" assert type_name(tuple[()]) == "tuple[()]" assert type_name(set[str]) == "set[str]" assert type_name(frozenset[str]) == "frozenset[str]" assert type_name(collections.abc.Set[str]) == "collections.abc.Set[str]" assert ( type_name(collections.abc.MutableSet[str]) == "collections.abc.MutableSet[str]" ) assert type_name(collections.Counter[str]) == "collections.Counter[str]" assert ( type_name(collections.abc.Sequence[str]) == "collections.abc.Sequence[str]" ) assert ( type_name(collections.abc.MutableSequence[str]) == "collections.abc.MutableSequence[str]" ) assert ( type_name(collections.ChainMap[str, str]) == "collections.ChainMap[str, str]" ) assert type_name(dict[str, str]) == "dict[str, str]" assert ( type_name(collections.abc.Mapping[str, str]) == "collections.abc.Mapping[str, str]" ) assert ( type_name(collections.OrderedDict[str, str]) == "collections.OrderedDict[str, str]" ) assert ( type_name(collections.defaultdict[str, str]) == "collections.defaultdict[str, str]" ) def test_type_name_short(): assert type_name(TAny, short=True) == "Any" assert type_name(TInt, short=True) == "int" assert type_name(TDefaultInt, short=True) == "int" assert type_name(TMyDataClass, short=True) == "MyDataClass" assert type_name(TIntStr, short=True) == "Union[int, str]" assert type_name(typing.List[TInt], short=True) == "List[int]" assert type_name(typing.Tuple[int], short=True) == "Tuple[int]" assert type_name(typing.Tuple[int, ...], short=True) == "Tuple[int, ...]" assert type_name(typing.Tuple[()], short=True) == "Tuple[()]" assert type_name(typing.Set[int], short=True) == "Set[int]" assert type_name(typing.FrozenSet[int], short=True) == "FrozenSet[int]" assert type_name(typing.Deque[int], short=True) == "Deque[int]" assert type_name(typing.Dict[int, int], short=True) == "Dict[int, int]" assert ( type_name(typing.Mapping[int, int], short=True) == "Mapping[int, int]" ) assert ( type_name(typing.MutableMapping[int, int], short=True) == "MutableMapping[int, int]" ) assert type_name(typing.Counter[int], short=True) == "Counter[int]" assert ( type_name(typing.ChainMap[int, int], short=True) == "ChainMap[int, int]" ) assert type_name(typing.Sequence[int], short=True) == "Sequence[int]" assert type_name(typing.Union[int, str], short=True) == "Union[int, str]" assert ( type_name(typing.Union[int, typing.Any], short=True) == "Union[int, Any]" ) assert ( type_name(typing.OrderedDict[int, int], short=True) == "OrderedDict[int, int]" ) assert ( type_name(typing.DefaultDict[int, int], short=True) == "DefaultDict[int, int]" ) assert type_name(typing.Optional[int], short=True) == "Optional[int]" assert type_name(typing.Union[None, int], short=True) == "Optional[int]" assert type_name(typing.Union[int, None], short=True) == "Optional[int]" assert type_name(None, short=True) == "None" assert type_name(NoneType, short=True) == "NoneType" assert type_name(NoneType, short=True, none_type_as_none=True) == "None" assert type_name(typing.List[NoneType], short=True) == "List[NoneType]" assert ( type_name(typing.Union[int, str, None], short=True) == "Union[int, str, None]" ) assert type_name(typing.Optional[NoneType], short=True) == "NoneType" if PY_39_MIN: assert ( type_name(types.MappingProxyType[int, int], short=True) == "mappingproxy[int, int]" ) if PY_310_MIN: assert type_name(int | None, short=True) == "Optional[int]" assert type_name(None | int, short=True) == "Optional[int]" assert type_name(int | str, short=True) == "Union[int, str]" if PY_310_MIN: assert type_name(MyDatetimeNewType, short=True) == "MyDatetimeNewType" else: assert type_name(MyDatetimeNewType, short=True) == type_name( datetime, short=True ) assert ( type_name(typing_extensions.Annotated[TMyDataClass, None], short=True) == "MyDataClass" ) @pytest.mark.skipif(not PEP_585_COMPATIBLE, reason="requires python 3.9+") def test_type_name_pep_585_short(): assert type_name(list[str], short=True) == "list[str]" assert type_name(collections.deque[str], short=True) == "deque[str]" assert type_name(tuple[str], short=True) == "tuple[str]" assert type_name(tuple[str, ...], short=True) == "tuple[str, ...]" assert type_name(tuple[()], short=True) == "tuple[()]" assert type_name(set[str], short=True) == "set[str]" assert type_name(frozenset[str], short=True) == "frozenset[str]" assert type_name(collections.abc.Set[str], short=True) == "Set[str]" assert ( type_name(collections.abc.MutableSet[str], short=True) == "MutableSet[str]" ) assert type_name(collections.Counter[str], short=True) == "Counter[str]" assert ( type_name(collections.abc.Sequence[str], short=True) == "Sequence[str]" ) assert ( type_name(collections.abc.MutableSequence[str], short=True) == "MutableSequence[str]" ) assert ( type_name(collections.ChainMap[str, str], short=True) == "ChainMap[str, str]" ) assert type_name(dict[str, str], short=True) == "dict[str, str]" assert ( type_name(collections.abc.Mapping[str, str], short=True) == "Mapping[str, str]" ) assert ( type_name(collections.OrderedDict[str, str], short=True) == "OrderedDict[str, str]" ) assert ( type_name(collections.defaultdict[str, str], short=True) == "defaultdict[str, str]" ) def test_get_type_origin(): assert get_type_origin(typing.List[int]) == list assert get_type_origin(typing.List) == list assert get_type_origin(MyGenericDataClass[int]) == MyGenericDataClass assert get_type_origin(MyGenericDataClass) == MyGenericDataClass assert ( get_type_origin(typing_extensions.Annotated[datetime, None]) == datetime ) assert ( get_type_origin(typing_extensions.Required[int]) == typing_extensions.Required ) def test_resolve_type_params(): @dataclass class A(typing.Generic[T]): x: T @dataclass class B(A[int]): pass resolved = resolve_type_params(B) assert resolved[A] == {T: int} assert resolved[B] == {} def test_get_generic_name(): assert get_generic_name(typing.List[int]) == "typing.List" assert get_generic_name(typing.List[int], short=True) == "List" assert ( get_generic_name(MyGenericDataClass[int]) == "tests.entities.MyGenericDataClass" ) assert ( get_generic_name(MyGenericDataClass[int], short=True) == "MyGenericDataClass" ) def test_get_generic_collection_based_class_name(): assert get_generic_name(MyGenericList, short=True) == "MyGenericList" assert get_generic_name(MyGenericList) == "tests.entities.MyGenericList" assert get_generic_name(MyGenericList[int], short=True) == "MyGenericList" assert ( get_generic_name(MyGenericList[int]) == "tests.entities.MyGenericList" ) def test_is_dialect_subclass(): class MyDialect(Dialect): pass assert is_dialect_subclass(Dialect) assert is_dialect_subclass(MyDialect) assert not is_dialect_subclass(123) def test_is_union(): t = typing.Optional[str] assert is_union(t) assert get_args(t) == (str, NoneType) t = typing.Union[str, None] assert is_union(t) assert get_args(t) == (str, NoneType) t = typing.Union[None, str] assert is_union(t) assert get_args(t) == (NoneType, str) @pytest.mark.skipif(not PY_310_MIN, reason="requires python 3.10+") def test_is_union_pep_604(): t = str | None assert is_union(t) assert get_args(t) == (str, NoneType) t = None | str assert is_union(t) assert get_args(t) == (NoneType, str) def test_is_optional(): t = typing.Optional[str] assert is_optional(t) assert get_args(t) == (str, NoneType) t = typing.Union[str, None] assert is_optional(t) assert get_args(t) == (str, NoneType) t = typing.Union[None, str] assert is_optional(t) assert get_args(t) == (NoneType, str) @pytest.mark.skipif(not PY_310_MIN, reason="requires python 3.10+") def test_is_optional_pep_604(): t = str | None assert is_optional(t) assert get_args(t) == (str, NoneType) t = None | str assert is_optional(t) assert get_args(t) == (NoneType, str) def test_not_non_type_arg(): assert not_none_type_arg((str, int)) == str assert not_none_type_arg((NoneType, int)) == int assert not_none_type_arg((str, NoneType)) == str assert not_none_type_arg((T, int), {T: NoneType}) == int assert not_none_type_arg((NoneType,)) is None def test_is_named_tuple(): assert is_named_tuple(MyNamedTuple) assert is_named_tuple(MyUntypedNamedTuple) assert not is_named_tuple(object()) def test_is_new_type(): assert is_new_type(typing.NewType("MyNewType", int)) assert not is_new_type(int) def test_is_annotated(): assert is_annotated(typing_extensions.Annotated[datetime, None]) assert not is_annotated(datetime) def test_is_literal(): assert is_literal(typing_extensions.Literal[1, 2, 3]) assert not is_literal(typing_extensions.Literal) assert not is_literal([1, 2, 3]) def test_get_literal_values(): assert get_literal_values(typing_extensions.Literal[1, 2, 3]) == (1, 2, 3) assert get_literal_values( typing_extensions.Literal[ 1, typing_extensions.Literal[typing_extensions.Literal[2], 3] ] ) == (1, 2, 3) def test_type_name_literal(): assert type_name( getattr(typing, "Literal")[ 1, "a", b"\x00", True, False, None, MyEnum.a, MyStrEnum.a, MyNativeStrEnum.a, MyIntEnum.a, MyFlag.a, MyIntFlag.a, typing_extensions.Literal[2, 3], typing_extensions.Literal[typing_extensions.Literal["b", "c"]], ] ) == ( f"typing.Literal[1, 'a', b'\\x00', True, False, None, " "tests.entities.MyEnum.a, tests.entities.MyStrEnum.a, " "tests.entities.MyNativeStrEnum.a, tests.entities.MyIntEnum.a, " "tests.entities.MyFlag.a, tests.entities.MyIntFlag.a, 2, 3, 'b', 'c']" ) def test_code_builder_get_pack_method_name(): builder = CodeBuilder(object) type_args = (int,) type_args_hash = hash_type_args((int,)) method_name = builder.get_pack_method_name() assert method_name == "__mashumaro_to_dict__" assert method_name.public == "to_dict" method_name = builder.get_pack_method_name(type_args=type_args) assert method_name == f"__mashumaro_to_dict_{type_args_hash}__" assert method_name.public == f"to_dict_{type_args_hash}" method_name = builder.get_pack_method_name( type_args=type_args, format_name="yaml" ) assert method_name == f"__mashumaro_to_dict_yaml_{type_args_hash}__" assert method_name.public == f"to_dict_yaml_{type_args_hash}" method_name = builder.get_pack_method_name( type_args=type_args, format_name="yaml" ) assert method_name == f"__mashumaro_to_dict_yaml_{type_args_hash}__" assert method_name.public == f"to_dict_yaml_{type_args_hash}" method_name = builder.get_pack_method_name(format_name="yaml") assert method_name == "__mashumaro_to_dict_yaml__" assert method_name.public == "to_dict_yaml" method_name = builder.get_pack_method_name( format_name="yaml", encoder=object ) assert method_name == "__mashumaro_to_yaml__" assert method_name.public == "to_yaml" method_name = builder.get_pack_method_name( type_args=type_args, encoder=object ) assert method_name == f"__mashumaro_to_dict_{type_args_hash}__" assert method_name.public == f"to_dict_{type_args_hash}" method_name = builder.get_pack_method_name(encoder=object) assert method_name == "__mashumaro_to_dict__" assert method_name.public == "to_dict" method_name = builder.get_pack_method_name( type_args=type_args, format_name="yaml", encoder=object ) assert method_name == "__mashumaro_to_yaml__" assert method_name.public == "to_yaml" def test_code_builder_get_unpack_method_name(): builder = CodeBuilder(object) type_args = (int,) type_args_hash = hash_type_args((int,)) method_name = builder.get_unpack_method_name() assert method_name == "__mashumaro_from_dict__" assert method_name.public == "from_dict" method_name = builder.get_unpack_method_name(type_args=type_args) assert method_name == f"__mashumaro_from_dict_{type_args_hash}__" assert method_name.public == f"from_dict_{type_args_hash}" method_name = builder.get_unpack_method_name( type_args=type_args, format_name="yaml" ) assert method_name == f"__mashumaro_from_dict_yaml_{type_args_hash}__" assert method_name.public == f"from_dict_yaml_{type_args_hash}" method_name = builder.get_unpack_method_name(format_name="yaml") assert method_name == "__mashumaro_from_dict_yaml__" assert method_name.public == "from_dict_yaml" method_name = builder.get_unpack_method_name( format_name="yaml", decoder=object ) assert method_name == "__mashumaro_from_yaml__" assert method_name.public == "from_yaml" method_name = builder.get_unpack_method_name( type_args=type_args, decoder=object ) assert method_name == f"__mashumaro_from_dict_{type_args_hash}__" assert method_name.public == f"from_dict_{type_args_hash}" method_name = builder.get_unpack_method_name(decoder=object) assert method_name == "__mashumaro_from_dict__" assert method_name.public == "from_dict" method_name = builder.get_unpack_method_name( type_args=type_args, format_name="yaml", decoder=object ) assert method_name == "__mashumaro_from_yaml__" assert method_name.public == "from_yaml" def test_is_self(): assert is_self(typing_extensions.Self) assert not is_self(object) @pytest.mark.skipif(PEP_585_COMPATIBLE, reason="requires python <3.9") def test_ensure_generic_collection_not_pep_585(): for t in ( tuple, list, set, frozenset, dict, collections.deque, collections.ChainMap, collections.OrderedDict, collections.defaultdict, collections.Counter, ): with pytest.raises(UnserializableField): ensure_generic_collection( ValueSpec(t, "", CodeBuilder(None), FieldContext("", {})) ) def test_ensure_generic_collection_not_generic(): assert not ensure_generic_collection( ValueSpec(int, "", CodeBuilder(None), FieldContext("", {})) ) @pytest.mark.skipif(PEP_585_COMPATIBLE, reason="requires python <3.9") def test_ensure_generic_collection_with_unhashable_args(): assert ensure_generic_collection( ValueSpec( typing.Dict[str, typing_extensions.Annotated[int, {"x": 42}]], "", CodeBuilder(None), FieldContext("", {}), ) ) def test_ensure_mapping_key_type_hashable(): spec = ValueSpec(typing.Dict, "", CodeBuilder(None), FieldContext("", {})) with pytest.raises(UnserializableField): ensure_mapping_key_type_hashable(spec, (dict, str)) with pytest.raises(UnserializableField): ensure_mapping_key_type_hashable(spec, (MyDataClass, str)) assert ensure_mapping_key_type_hashable(spec, (MyFrozenDataClass, str)) def test_get_function_arg_annotation(): def foo(x: int, y: Dialect) -> None: ... assert get_function_arg_annotation(foo, "x") == int assert get_function_arg_annotation(foo, "y") == Dialect assert get_function_arg_annotation(foo, arg_name="x") == int assert get_function_arg_annotation(foo, arg_name="y") == Dialect assert get_function_arg_annotation(foo, arg_pos=0) == int assert get_function_arg_annotation(foo, arg_pos=1) == Dialect with pytest.raises(ValueError): get_function_arg_annotation(foo) def test_get_function_return_annotation(): def foo(x: int, y: Dialect) -> Dialect: ... assert get_function_return_annotation(foo) == Dialect def test_collect_type_params(): T = typing.TypeVar("T") S = typing.TypeVar("S") class MyGeneric(typing.Generic[T, S], typing.Mapping[T, S]): pass assert collect_type_params(MyGeneric[T, T]) == [T] def test_is_generic_like_with_class_getitem(): class MyClass: def __class_getitem__(cls, item): return cls assert is_generic(MyClass) assert is_generic(MyClass[int]) def test_get_type_annotations(): assert get_type_annotations(int) == [] assert get_type_annotations(typing_extensions.Annotated[int, 42]) == (42,) def test_substitute_type_params(): assert substitute_type_params(int, {}) == int assert substitute_type_params(T, {T: int}) == int assert ( substitute_type_params(typing.Dict[T, TAny], {T: str}) == typing.Dict[str, TAny] ) assert ( substitute_type_params(typing_extensions.Annotated[T, 42], {T: int}) == typing_extensions.Annotated[int, 42] ) assert ( substitute_type_params( typing.Dict[str, typing_extensions.Annotated[int, {}]], {T: int} ) == typing.Dict[str, typing_extensions.Annotated[int, {}]] ) def test_is_hashable(): assert is_hashable(42) is True assert is_hashable({}) is False assert is_hashable(typing_extensions.Annotated[int, 42]) is True assert is_hashable(typing_extensions.Annotated[int, {}]) is False def test_is_hashable_type(): assert is_hashable_type(dict) is False assert is_hashable_type(int) is True assert is_hashable_type(MyFrozenDataClass) is True assert is_hashable_type(MyDataClass) is False def test_type_var_has_default(): T_WithoutDefault = typing_extensions.TypeVar("T_WithoutDefault") T_WithDefault = typing_extensions.TypeVar("T_WithDefault", default=int) T_WithDefaultNone = typing_extensions.TypeVar( "T_WithDefaultNone", default=None ) assert not type_var_has_default(T_WithoutDefault) assert type_var_has_default(T_WithDefault) assert type_var_has_default(T_WithDefaultNone) def test_get_type_var_default(): T_WithDefault = typing_extensions.TypeVar("T_WithDefault", default=int) T_WithDefaultNone = typing_extensions.TypeVar( "T_WithDefaultNone", default=None ) assert get_type_var_default(T_WithDefault) is int assert get_type_var_default(T_WithDefaultNone) is None mashumaro-3.13.1/tests/test_metadata_options.py000066400000000000000000000362231463331001200217000ustar00rootroot00000000000000from dataclasses import dataclass, field from datetime import date, datetime, time, timezone from pathlib import Path from typing import Any, Dict, Optional, Union import ciso8601 import pytest from mashumaro import DataClassDictMixin from mashumaro.core.const import PY_312_MIN, PY_313_MIN from mashumaro.exceptions import ( UnserializableField, UnsupportedDeserializationEngine, UnsupportedSerializationEngine, ) from mashumaro.types import SerializationStrategy from .entities import ( MutableString, MyList, MyNamedTuple, MyNamedTupleWithDefaults, MyUntypedNamedTuple, MyUntypedNamedTupleWithDefaults, ThirdPartyType, TIntStr, TypedDictRequiredKeys, ) def test_ciso8601_datetime_parser(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": "ciso8601"}) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"}) assert instance == should_be def test_ciso8601_date_parser(): @dataclass class DataClass(DataClassDictMixin): x: date = field(metadata={"deserialize": "ciso8601"}) should_be = DataClass(x=date(2021, 1, 2)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"}) assert instance == should_be def test_ciso8601_time_parser(): @dataclass class DataClass(DataClassDictMixin): x: time = field(metadata={"deserialize": "ciso8601"}) should_be = DataClass(x=time(3, 4, 5)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"}) assert instance == should_be @pytest.mark.skipif(PY_313_MIN, reason="pendulum doesn't install on 3.13") def test_pendulum_datetime_parser(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": "pendulum"}) should_be = DataClass(x=datetime(2008, 12, 29, 7, tzinfo=timezone.utc)) instance = DataClass.from_dict({"x": "2009-W01 07:00"}) assert instance == should_be @pytest.mark.skipif(PY_313_MIN, reason="pendulum doesn't install on 3.13") def test_pendulum_date_parser(): @dataclass class DataClass(DataClassDictMixin): x: date = field(metadata={"deserialize": "pendulum"}) should_be = DataClass(x=date(2008, 12, 29)) instance = DataClass.from_dict({"x": "2009-W01"}) assert instance == should_be @pytest.mark.skipif(PY_313_MIN, reason="pendulum doesn't install on 3.13") def test_pendulum_time_parser(): @dataclass class DataClass(DataClassDictMixin): x: time = field(metadata={"deserialize": "pendulum"}) should_be = DataClass(x=time(3, 4, 5)) instance = DataClass.from_dict({"x": "2009-W01 03:04:05"}) assert instance == should_be def test_unsupported_datetime_parser_engine(): with pytest.raises(UnsupportedDeserializationEngine): @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": "unsupported"}) def test_global_function_datetime_parser(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field( metadata={"deserialize": ciso8601.parse_datetime_as_naive} ) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"}) assert instance == should_be def test_local_function_datetime_parser(): def parse_dt(s): return ciso8601.parse_datetime_as_naive(s) @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": parse_dt}) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05+03:00"}) assert instance == should_be def test_class_method_datetime_parser(): class DateTimeParser: @classmethod def parse_dt(cls, s: str) -> datetime: return datetime.fromisoformat(s) @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": DateTimeParser.parse_dt}) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"}) assert instance == should_be def test_class_instance_method_datetime_parser(): class DateTimeParser: def __call__(self, s: str) -> datetime: return datetime.fromisoformat(s) @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": DateTimeParser()}) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05"}) assert instance == should_be def test_callable_class_instance_datetime_parser(): class CallableDateTimeParser: def __call__(self, s): return ciso8601.parse_datetime(s) @dataclass class DataClass(DataClassDictMixin): x: datetime = field(metadata={"deserialize": CallableDateTimeParser()}) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"}) assert instance == should_be def test_lambda_datetime_parser(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field( metadata={"deserialize": lambda s: ciso8601.parse_datetime(s)} ) should_be = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc)) instance = DataClass.from_dict({"x": "2021-01-02T03:04:05Z"}) assert instance == should_be def test_derived_dataclass_metadata_deserialize_option(): @dataclass class A: x: datetime = field(metadata={"deserialize": ciso8601.parse_datetime}) @dataclass class B(A, DataClassDictMixin): y: datetime = field(metadata={"deserialize": ciso8601.parse_datetime}) should_be = B( x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc), y=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc), ) instance = B.from_dict( {"x": "2021-01-02T03:04:05Z", "y": "2021-01-02T03:04:05Z"} ) assert instance == should_be def test_derived_dataclass_metadata_deserialize_option_removed(): class MyClass: pass @dataclass class A: x: MyClass = field( metadata={"deserialize": MyClass, "serialize": lambda obj: obj.i} ) with pytest.raises(UnserializableField): @dataclass class _(A, DataClassDictMixin): x: MyClass def test_bytearray_overridden(): @dataclass class DataClass(DataClassDictMixin): x: bytearray = field( metadata={"deserialize": lambda s: s.upper().encode()} ) should_be = DataClass(x=bytearray(b"ABC")) instance = DataClass.from_dict({"x": "abc"}) assert instance == should_be def test_path_like_overridden(): @dataclass class DataClass(DataClassDictMixin): x: Path = field( metadata={"deserialize": lambda s: Path(str(s).upper())} ) should_be = DataClass(x=Path("/ABC")) instance = DataClass.from_dict({"x": "/abc"}) assert instance == should_be def test_datetime_serialize_option(): @dataclass class DataClass(DataClassDictMixin): x: datetime = field( metadata={"serialize": lambda v: v.strftime("%Y-%m-%d %H:%M:%S")} ) should_be = {"x": "2021-01-02 03:04:05"} instance = DataClass(x=datetime(2021, 1, 2, 3, 4, 5, tzinfo=timezone.utc)) assert instance.to_dict() == should_be def test_third_party_type_overridden(): @dataclass class DataClass(DataClassDictMixin): x: ThirdPartyType = field( metadata={ "deserialize": lambda v: ThirdPartyType(v), "serialize": lambda v: v.value, } ) should_be = DataClass(x=ThirdPartyType(123)) instance = DataClass.from_dict({"x": 123}) assert instance == should_be assert instance.to_dict() == {"x": 123} def test_serializable_type_overridden(): @dataclass class DataClass(DataClassDictMixin): x: MutableString = field( metadata={ "deserialize": lambda s: MutableString(s.upper()), "serialize": lambda v: str(v).lower(), } ) should_be = DataClass(x=MutableString("ABC")) instance = DataClass.from_dict({"x": "abc"}) assert instance == should_be assert instance.to_dict() == {"x": "abc"} def test_optional_overridden(): @dataclass class DataClass(DataClassDictMixin): x: Optional[ThirdPartyType] = field( metadata={ "deserialize": lambda v: ThirdPartyType(v), "serialize": lambda v: v.value, } ) instance = DataClass.from_dict({"x": 123}) assert instance assert instance.x.value == 123 dct = instance.to_dict() assert dct["x"] == 123 def test_union_overridden(): @dataclass class DataClass(DataClassDictMixin): x: Union[float, int] = field( metadata={ "deserialize": lambda v: int(v), "serialize": lambda v: int(v), } ) instance = DataClass.from_dict({"x": 1.0}) assert instance == DataClass(x=1) assert instance.to_dict() == {"x": 1} for attr in dir(DataClass): assert not attr.startswith("__unpack_union") assert not attr.startswith("__unpack_union") def test_type_var_overridden(): @dataclass class DataClass(DataClassDictMixin): x: TIntStr = field( metadata={ "deserialize": lambda v: v * 2, "serialize": lambda v: v * 2, } ) instance = DataClass.from_dict({"x": "a"}) assert instance == DataClass(x="aa") assert instance.to_dict() == {"x": "aaaa"} for attr in dir(DataClass): assert not attr.startswith("__unpack_type_var") assert not attr.startswith("__pack_type_var") def test_serialization_strategy(): class TestSerializationStrategy(SerializationStrategy): def serialize(self, value): return [value] def deserialize(self, value): return value[0] @dataclass class DataClass(DataClassDictMixin): x: int = field( metadata={"serialization_strategy": TestSerializationStrategy()} ) instance = DataClass(x=123) assert DataClass.from_dict({"x": [123]}) == instance assert instance.to_dict() == {"x": [123]} def test_collection_derived_custom_class(): @dataclass class DataClass(DataClassDictMixin): x: MyList = field( metadata={"serialize": lambda v: v, "deserialize": lambda v: v} ) instance = DataClass(x=[1, 2, 3]) assert DataClass.from_dict({"x": [1, 2, 3]}) == instance assert instance.to_dict() == {"x": [1, 2, 3]} def test_dataclass_with_typed_dict_overridden(): def serialize_x(x: TypedDictRequiredKeys) -> Dict[str, Any]: return {"int": int(x["int"]), "float": float(x["float"])} def deserialize_x(x: Dict[str, Any]) -> TypedDictRequiredKeys: return TypedDictRequiredKeys(int=x["int"], float=x["float"]) @dataclass class DataClass(DataClassDictMixin): x: TypedDictRequiredKeys = field( metadata={"serialize": serialize_x, "deserialize": deserialize_x} ) obj = DataClass(x=TypedDictRequiredKeys(int=1, float=2.0)) data = {"x": {"int": 1, "float": 2.0}} assert DataClass.from_dict(data) == obj assert obj.to_dict() == data def test_named_tuple_as_dict_engine(): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTuple = field( metadata={"serialize": "as_dict", "deserialize": "as_dict"} ) obj = DataClass(x=MyNamedTuple(i=1, f=2.0)) assert obj.to_dict() == {"x": {"i": 1, "f": 2.0}} assert DataClass.from_dict({"x": {"i": 1, "f": 2.0}}) == obj def test_named_tuple_with_defaults_as_dict_engine(): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTupleWithDefaults = field( metadata={"serialize": "as_dict", "deserialize": "as_dict"} ) obj = DataClass(x=MyNamedTupleWithDefaults(i=1, f=2.0)) assert obj.to_dict() == {"x": {"i": 1, "f": 2.0}} assert DataClass.from_dict({"x": {"i": 1, "f": 2.0}}) == obj def test_untyped_named_tuple_as_dict_engine(): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTuple = field( metadata={"serialize": "as_dict", "deserialize": "as_dict"} ) obj = DataClass(x=MyUntypedNamedTuple(i=1, f=2.0)) assert obj.to_dict() == {"x": {"i": 1, "f": 2.0}} assert DataClass.from_dict({"x": {"i": 1, "f": 2.0}}) == obj def test_untyped_named_tuple_with_defaults_as_dict_engine(): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTupleWithDefaults = field( metadata={"serialize": "as_dict", "deserialize": "as_dict"} ) obj = DataClass(x=MyUntypedNamedTupleWithDefaults(i=1, f=2.0)) assert obj.to_dict() == {"x": {"i": 1, "f": 2.0}} assert DataClass.from_dict({"x": {"i": 1, "f": 2.0}}) == obj def test_unsupported_named_tuple_deserialization_engine(): with pytest.raises(UnsupportedDeserializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTuple = field(metadata={"deserialize": "unsupported"}) with pytest.raises(UnsupportedDeserializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTupleWithDefaults = field( metadata={"deserialize": "unsupported"} ) with pytest.raises(UnsupportedDeserializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTuple = field( metadata={"deserialize": "unsupported"} ) with pytest.raises(UnsupportedDeserializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTupleWithDefaults = field( metadata={"deserialize": "unsupported"} ) def test_unsupported_named_tuple_serialization_engine(): with pytest.raises(UnsupportedSerializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTuple = field(metadata={"serialize": "unsupported"}) with pytest.raises(UnsupportedSerializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyNamedTupleWithDefaults = field( metadata={"serialize": "unsupported"} ) with pytest.raises(UnsupportedSerializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTuple = field( metadata={"serialize": "unsupported"} ) with pytest.raises(UnsupportedSerializationEngine): @dataclass class DataClass(DataClassDictMixin): x: MyUntypedNamedTupleWithDefaults = field( metadata={"serialize": "unsupported"} ) def test_field_metadata_omit_engine(): @dataclass class DataClass(DataClassDictMixin): x: int = field(metadata={"serialize": "omit"}) obj = DataClass(x=42) assert obj.to_dict() == {} assert DataClass.from_dict({"x": 42}) == obj mashumaro-3.13.1/tests/test_msgpack.py000066400000000000000000000114071463331001200177670ustar00rootroot00000000000000from dataclasses import dataclass from datetime import datetime from typing import Dict, List import msgpack from mashumaro import DataClassDictMixin from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.mixins.msgpack import DataClassMessagePackMixin class MyDialect(Dialect): serialization_strategy = { bytes: { "serialize": lambda x: x.decode(), "deserialize": lambda x: x.encode(), } } @dataclass class InnerDataClass(DataClassDictMixin): x: bytes @dataclass class DataClass(DataClassMessagePackMixin): x: bytes inner: InnerDataClass def test_data_class_msgpack_mixin(): assert DataClassMessagePackMixin.from_msgpack(b"") is None assert DataClassMessagePackMixin().to_msgpack() is None def test_to_msgpack(): @dataclass class DataClass(DataClassMessagePackMixin): x: List[int] dumped = msgpack.packb({"x": [1, 2, 3]}) assert DataClass([1, 2, 3]).to_msgpack() == dumped def test_from_msgpack(): @dataclass class DataClass(DataClassMessagePackMixin): x: List[int] dumped = msgpack.packb({"x": [1, 2, 3]}) assert DataClass.from_msgpack(dumped) == DataClass([1, 2, 3]) def test_to_msg_pack_datetime(): @dataclass class DataClass(DataClassMessagePackMixin): x: datetime dt = datetime(2018, 10, 29, 12, 46, 55, 308495) dumped = msgpack.packb({"x": dt.isoformat()}) assert DataClass(dt).to_msgpack() == dumped def test_msgpack_with_bytes(): @dataclass class DataClass(DataClassMessagePackMixin): x: bytes y: bytearray instance = DataClass(b"123", bytearray(b"456")) dumped = msgpack.packb({"x": b"123", "y": bytearray(b"456")}) assert DataClass.from_msgpack(dumped) == instance assert instance.to_msgpack() == dumped def test_msgpack_with_serialization_strategy(): @dataclass class DataClass(DataClassMessagePackMixin): x: bytes class Config(BaseConfig): serialization_strategy = { bytes: { "serialize": lambda x: x.decode(), "deserialize": lambda x: x.encode(), } } instance = DataClass(b"123") dumped = msgpack.packb({"x": "123"}) assert DataClass.from_dict({"x": "123"}) == instance assert DataClass.from_msgpack(dumped) == instance assert instance.to_dict() == {"x": "123"} assert instance.to_msgpack() == dumped def test_msgpack_with_dialect(): @dataclass class DataClass(DataClassMessagePackMixin): x: bytes class Config(BaseConfig): dialect = MyDialect instance = DataClass(b"123") dumped_dialect = msgpack.packb({"x": "123"}) assert DataClass.from_dict({"x": "123"}) == instance assert DataClass.from_msgpack(dumped_dialect) == instance assert instance.to_dict() == {"x": "123"} assert instance.to_msgpack() == dumped_dialect def test_msgpack_with_dialect_support(): @dataclass class DataClass(DataClassMessagePackMixin): x: bytes class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] instance = DataClass(b"123") dumped = msgpack.packb({"x": b"123"}) dumped_dialect = msgpack.packb({"x": "123"}) assert DataClass.from_dict({"x": "MTIz\n"}) == instance assert DataClass.from_dict({"x": "123"}, dialect=MyDialect) == instance assert DataClass.from_msgpack(dumped) == instance assert ( DataClass.from_msgpack(dumped_dialect, dialect=MyDialect) == instance ) assert instance.to_dict() == {"x": "MTIz\n"} assert instance.to_dict(dialect=MyDialect) == {"x": "123"} assert instance.to_msgpack() == dumped assert instance.to_msgpack(dialect=MyDialect) == dumped_dialect def test_msgpack_with_custom_encoder_and_decoder(): def decoder(data) -> Dict[str, bytes]: def to_lower(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = to_lower(v) else: result[k] = v.lower() return result return to_lower(msgpack.loads(data)) def encoder(data: Dict[str, bytes]) -> bytes: def to_upper(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = to_upper(v) else: result[k] = v.upper() return result return msgpack.dumps(to_upper(data)) instance = DataClass(b"abc", InnerDataClass(b"def")) dumped = msgpack.packb({"x": b"ABC", "inner": {"x": b"DEF"}}) assert instance.to_msgpack(encoder=encoder) == dumped assert DataClass.from_msgpack(dumped, decoder=decoder) == instance mashumaro-3.13.1/tests/test_orjson.py000066400000000000000000000167211463331001200176600ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime, time from typing import Dict, List from uuid import UUID, uuid4 import orjson import pytest from mashumaro import DataClassDictMixin from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.mixins.orjson import DataClassORJSONMixin serialization_strategy = { datetime: { "serialize": lambda dt: dt.strftime("%Y/%m/%d/%H/%M/%S"), "deserialize": lambda s: datetime.strptime(s, "%Y/%m/%d/%H/%M/%S"), }, date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, time: { "serialize": lambda t: t.strftime("%H/%M/%S"), "deserialize": lambda s: datetime.strptime(s, "%H/%M/%S").time(), }, UUID: { "serialize": lambda x: f"uuid:{x}", "deserialize": lambda s: UUID(s[5:]), }, } class MyDialect(Dialect): serialization_strategy = serialization_strategy @dataclass class InnerDataClass(DataClassDictMixin): x: str @dataclass class DataClass(DataClassORJSONMixin): x: str inner: InnerDataClass def test_data_class_orjson_mixin(): assert DataClassORJSONMixin.from_json("") is None assert DataClassORJSONMixin().to_jsonb() is None def test_to_orjson(): @dataclass class DataClass(DataClassORJSONMixin): x: List[int] dumped = orjson.dumps({"x": [1, 2, 3]}) assert DataClass([1, 2, 3]).to_jsonb() == dumped assert DataClass([1, 2, 3]).to_json() == dumped.decode() def test_from_orjson(): @dataclass class DataClass(DataClassORJSONMixin): x: List[int] dumped = orjson.dumps({"x": [1, 2, 3]}) assert DataClass.from_json(dumped) == DataClass([1, 2, 3]) def test_orjson_with_serialization_strategy(): @dataclass class DataClass(DataClassORJSONMixin): datetime: List[datetime] date: List[date] time: List[time] uuid: List[UUID] class Config(BaseConfig): serialization_strategy = serialization_strategy _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _uuid = uuid4() _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") _uuid_dumped = f"uuid:{_uuid}" instance = DataClass([_datetime], [_date], [_time], [_uuid]) dict_dumped = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } orjson_dumped = orjson.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert DataClass.from_json(orjson_dumped) == instance assert instance.to_dict() == dict_dumped assert instance.to_jsonb() == orjson_dumped def test_orjson_with_dialect(): @dataclass class DataClass(DataClassORJSONMixin): datetime: List[datetime] date: List[date] time: List[time] uuid: List[UUID] class Config(BaseConfig): dialect = MyDialect _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _uuid = uuid4() _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") _uuid_dumped = f"uuid:{_uuid}" instance = DataClass([_datetime], [_date], [_time], [_uuid]) dict_dumped = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } orjson_dumped = orjson.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert DataClass.from_json(orjson_dumped) == instance assert instance.to_dict() == dict_dumped assert instance.to_jsonb() == orjson_dumped def test_orjson_with_dialect_support(): @dataclass class DataClass(DataClassORJSONMixin): datetime: List[datetime] date: List[date] time: List[time] uuid: List[UUID] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _uuid = uuid4() _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") _uuid_dumped = f"uuid:{_uuid}" instance = DataClass([_datetime], [_date], [_time], [_uuid]) dict_dumped = { "datetime": [_datetime.isoformat()], "date": [_date.isoformat()], "time": [_time.isoformat()], "uuid": [str(_uuid)], } dict_dumped_dialect = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } orjson_dumped = orjson.dumps( { "datetime": [_datetime], "date": [_date], "time": [_time], "uuid": [_uuid], } ) orjson_dumped_dialect = orjson.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], "uuid": [_uuid_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert ( DataClass.from_dict(dict_dumped_dialect, dialect=MyDialect) == instance ) assert DataClass.from_json(orjson_dumped) == instance assert ( DataClass.from_json(orjson_dumped_dialect, dialect=MyDialect) == instance ) assert instance.to_dict() == dict_dumped assert instance.to_dict(dialect=MyDialect) == dict_dumped_dialect assert instance.to_jsonb() == orjson_dumped assert instance.to_jsonb(dialect=MyDialect) == orjson_dumped_dialect def test_orjson_with_custom_encoder_and_decoder(): def decoder(data) -> Dict[str, bytes]: def to_lower(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = to_lower(v) else: result[k] = v.lower() return result return to_lower(orjson.loads(data)) def encoder(data: Dict[str, bytes], **_) -> bytes: def to_upper(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = to_upper(v) else: result[k] = v.upper() return result return orjson.dumps(to_upper(data)) instance = DataClass("abc", InnerDataClass("def")) dumped = orjson.dumps({"x": "ABC", "inner": {"x": "DEF"}}) assert instance.to_jsonb(encoder=encoder) == dumped assert DataClass.from_json(dumped, decoder=decoder) == instance def test_to_orjson_with_non_str_keys(): @dataclass class DataClass(DataClassORJSONMixin): x: Dict[int, int] instance = DataClass({1: 2}) dumped = b'{"x":{"1":2}}' with pytest.raises(TypeError): instance.to_jsonb() assert instance.to_jsonb(orjson_options=orjson.OPT_NON_STR_KEYS) == dumped assert DataClass.from_json(dumped) == instance mashumaro-3.13.1/tests/test_pep_563.py000066400000000000000000000167411463331001200175310ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from typing import Dict import msgpack import orjson import pytest from mashumaro import DataClassDictMixin from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig from mashumaro.dialect import Dialect from mashumaro.exceptions import UnresolvedTypeReferenceError from mashumaro.mixins.msgpack import DataClassMessagePackMixin from mashumaro.mixins.orjson import DataClassORJSONMixin from .conftest import add_unpack_method @dataclass class A(DataClassDictMixin): x: B @dataclass class B(DataClassDictMixin): x: int @dataclass class Base(DataClassDictMixin): pass @dataclass class A1(Base): a: B1 @dataclass class A2(Base): a: B2 @dataclass class A3(Base): a: B1 x: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class B1(Base): b: int @dataclass class AMessagePack(DataClassMessagePackMixin): x: BMessagePack @dataclass class BMessagePack(DataClassMessagePackMixin): x: int @dataclass class BaseMessagePack(DataClassMessagePackMixin): pass @dataclass class A1MessagePack(BaseMessagePack): a: B1MessagePack @dataclass class A2MessagePack(BaseMessagePack): a: B2 @dataclass class A3MessagePack(BaseMessagePack): a: B1MessagePack x: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class B1MessagePack(BaseMessagePack): b: int @dataclass class A3ORJSON(DataClassORJSONMixin): a: B1ORJSON x: int class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] @dataclass class B1ORJSON(DataClassORJSONMixin): b: int def test_postponed_annotation_evaluation(): obj = A(x=B(x=1)) assert obj.to_dict() == {"x": {"x": 1}} assert A.from_dict({"x": {"x": 1}}) == obj def test_unresolved_type_with_allowed_postponed_annotation_evaluation(): @dataclass class DataClass(DataClassDictMixin): x: X with pytest.raises(UnresolvedTypeReferenceError): DataClass.from_dict({}) with pytest.raises(UnresolvedTypeReferenceError): DataClass(x=1).to_dict() def test_unresolved_type_with_disallowed_postponed_annotation_evaluation(): with pytest.raises(UnresolvedTypeReferenceError): @dataclass class DataClass(DataClassDictMixin): x: X class Config(BaseConfig): allow_postponed_evaluation = False with add_unpack_method: with pytest.raises(UnresolvedTypeReferenceError): @dataclass class DataClass(DataClassDictMixin): x: X class Config(BaseConfig): allow_postponed_evaluation = False def test_postponed_annotation_evaluation_with_parent(): obj = A1(B1(1)) assert A1.from_dict({"a": {"b": 1}}) == obj assert obj.to_dict() == {"a": {"b": 1}} def test_postponed_annotation_evaluation_with_parent_and_no_reference(): with pytest.raises(UnresolvedTypeReferenceError): A2.from_dict({"a": {"b": 1}}) with pytest.raises(UnresolvedTypeReferenceError): A2(None).to_dict() def test_postponed_annotation_evaluation_with_parent_and_dialect(): class MyDialect(Dialect): serialization_strategy = { int: { "serialize": lambda i: str(int(i * 1000)), "deserialize": lambda s: int(int(s) / 1000), } } obj = A3(B1(1), 2) assert A3.from_dict({"a": {"b": 1}, "x": 2}) == obj assert A3.from_dict({"a": {"b": 1}, "x": "2000"}, dialect=MyDialect) == obj assert obj.to_dict() == {"a": {"b": 1}, "x": 2} assert obj.to_dict(dialect=MyDialect) == {"a": {"b": 1}, "x": "2000"} def test_postponed_annotation_evaluation_msgpack(): obj = AMessagePack(x=BMessagePack(x=1)) assert obj.to_dict() == {"x": {"x": 1}} assert AMessagePack.from_dict({"x": {"x": 1}}) == obj dump = msgpack.dumps({"x": {"x": 1}}) assert obj.to_msgpack() == dump assert AMessagePack.from_msgpack(dump) == obj def test_unresolved_type_with_allowed_postponed_annotation_evaluation_msgpack(): @dataclass class DataClass(DataClassMessagePackMixin): x: X with pytest.raises(UnresolvedTypeReferenceError): DataClass.from_msgpack(b"") with pytest.raises(UnresolvedTypeReferenceError): DataClass(x=1).to_msgpack() def test_postponed_annotation_evaluation_with_parent_msgpack(): obj = A1MessagePack(B1MessagePack(1)) dump = msgpack.dumps({"a": {"b": 1}}) assert A1MessagePack.from_msgpack(dump) == obj assert obj.to_msgpack() == dump def test_postponed_annotation_evaluation_with_parent_and_no_reference_msgpack(): with pytest.raises(UnresolvedTypeReferenceError): A2MessagePack.from_msgpack(b"") with pytest.raises(UnresolvedTypeReferenceError): A2MessagePack(None).to_msgpack() def test_postponed_annotation_evaluation_with_parent_and_dialect_msgpack(): class MyDialect(Dialect): serialization_strategy = { int: { "serialize": lambda i: str(int(i * 1000)), "deserialize": lambda s: int(int(s) / 1000), } } obj = A3MessagePack(B1MessagePack(1), 2) dump = msgpack.dumps({"a": {"b": 1}, "x": 2}) dump_dialect = msgpack.dumps({"a": {"b": 1}, "x": "2000"}) assert A3MessagePack.from_msgpack(dump) == obj assert A3MessagePack.from_msgpack(dump_dialect, dialect=MyDialect) == obj assert obj.to_msgpack() == dump assert obj.to_msgpack(dialect=MyDialect) == dump_dialect def test_postponed_msgpack_with_custom_encoder_and_decoder(): def decoder(data) -> Dict[str, bytes]: def modify(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = modify(v) else: result[k] = v // 1000 return result return modify(msgpack.loads(data)) def encoder(data: Dict[str, bytes]) -> bytes: def modify(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = modify(v) else: result[k] = v * 1000 return result return msgpack.dumps(modify(data)) instance = A3MessagePack(B1MessagePack(123), 456) dumped = msgpack.packb({"a": {"b": 123000}, "x": 456000}) assert instance.to_msgpack(encoder=encoder) == dumped assert A3MessagePack.from_msgpack(dumped, decoder=decoder) == instance def test_postponed_orjson_with_custom_encoder_and_decoder(): def decoder(data) -> Dict[str, bytes]: def modify(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = modify(v) else: result[k] = v // 1000 return result return modify(orjson.loads(data)) def encoder(data: Dict[str, bytes], **_) -> bytes: def modify(d): result = {} for k, v in d.items(): if isinstance(v, dict): result[k] = modify(v) else: result[k] = v * 1000 return result return orjson.dumps(modify(data)) instance = A3ORJSON(B1ORJSON(123), 456) dumped = orjson.dumps({"a": {"b": 123000}, "x": 456000}) assert instance.to_jsonb(encoder=encoder) == dumped assert A3ORJSON.from_json(dumped, decoder=decoder) == instance mashumaro-3.13.1/tests/test_pep_646.py000066400000000000000000000440471463331001200175330ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from functools import partial from typing import Generic, Tuple, TypeVar try: from typing import TypeVarTuple except ImportError: from typing_extensions import TypeVarTuple try: from typing import Unpack except ImportError: from typing_extensions import Unpack import pytest from mashumaro import DataClassDictMixin from mashumaro.core.const import PEP_585_COMPATIBLE, PY_311_MIN # noinspection PyProtectedMember from mashumaro.core.meta.helpers import ( _check_generic, _flatten_type_args, resolve_type_params, type_name, ) from mashumaro.core.meta.types.common import FieldContext, ValueSpec # noinspection PyProtectedMember from mashumaro.core.meta.types.pack import pack_tuple # noinspection PyProtectedMember from mashumaro.core.meta.types.unpack import unpack_tuple from mashumaro.exceptions import MissingField K = TypeVar("K") V = TypeVar("V") Ts = TypeVarTuple("Ts") _type_name = partial(type_name, short=True) @dataclass class MyGenericDataClassTs(Generic[Unpack[Ts]], DataClassDictMixin): x: Tuple[int, Unpack[Ts]] class MyGenericTs(Generic[Unpack[Ts]]): pass class MyGenericTsK(Generic[Unpack[Ts], K]): pass def test_check_generic(): with pytest.raises(TypeError) as e: _check_generic(object, (K, Unpack[Ts], Unpack[Ts]), (int,)) assert ( str(e.value) == "Multiple unpacks are disallowed within a single type parameter " "list for object" ) with pytest.raises(TypeError) as e: _check_generic(object, (K, V), (int,)) assert ( str(e.value) == f"Too few arguments for object; actual 1, expected 2" ) with pytest.raises(TypeError) as e: _check_generic(object, (K, Unpack[Ts], V), (int,)) assert ( str(e.value) == "Too few arguments for object; actual 1, expected at least 2" ) def test_dataclass_with_multiple_unpacks(): with pytest.raises(TypeError) as e: @dataclass class DataClass(DataClassDictMixin): x: Tuple[Unpack[Tuple[int]], Unpack[Tuple[float]]] typ_name = type_name(Tuple[Unpack[Tuple[int]], Unpack[Tuple[float]]]) assert ( str(e.value) == "Multiple unpacks are disallowed within a single type parameter " f"list for {typ_name}" ) def test_dataclass_with_single_unpack_tuple(): @dataclass class DataClass(DataClassDictMixin): a: Tuple[Unpack[Tuple[int, ...]]] b: Tuple[Unpack[Tuple[int, float, int]]] c: Tuple[Unpack[Tuple[int]]] d: Tuple[Unpack[Tuple[()]]] obj = DataClass( a=(1, 2, 3, 4, 5), b=(1, 2.2, 3), c=(1,), d=(), ) assert obj.to_dict() == { "a": [1, 2, 3, 4, 5], "b": [1, 2.2, 3], "c": [1], "d": [], } assert ( DataClass.from_dict( { "a": ["1", "2", "3", "4", 5.0], "b": ["1", "2.2", "3"], "c": ["1"], "d": ["1", "2", "3"], } ) == obj ) def test_dataclass_with_mixed_unpack_tuple_ellipsis(): @dataclass class DataClass(DataClassDictMixin): a: Tuple[float, Unpack[Tuple[int, ...]]] b: Tuple[Unpack[Tuple[int, ...]], float] c: Tuple[float, Unpack[Tuple[int, ...]], float] obj = DataClass( a=(1.1, 2, 3, 4, 5), b=(1, 2, 3, 4, 5.5), c=(1.1, 2, 3, 4, 5.5), ) assert obj.to_dict() == { "a": [1.1, 2, 3, 4, 5], "b": [1, 2, 3, 4, 5.5], "c": [1.1, 2, 3, 4, 5.5], } assert ( DataClass.from_dict( { "a": ["1.1", "2", "3", "4", 5.0], "b": [1.0, "2", "3", "4", "5.5"], "c": ["1.1", "2", "3", 4.0, 5.5], } ) == obj ) def test_dataclass_with_mixed_unpack_tuple_multiple_args(): @dataclass class DataClass(DataClassDictMixin): a: Tuple[float, Unpack[Tuple[int, float, int]]] b: Tuple[Unpack[Tuple[int, float, int]], float] c: Tuple[float, Unpack[Tuple[int, float, int]], float] obj = DataClass( a=(1.1, 2, 3.3, 4), b=(1, 2.2, 3, 4.4), c=(1.1, 2, 3.3, 4, 5.5), ) assert obj.to_dict() == { "a": [1.1, 2, 3.3, 4], "b": [1, 2.2, 3, 4.4], "c": [1.1, 2, 3.3, 4, 5.5], } assert ( DataClass.from_dict( { "a": ["1.1", "2", 3.3, 4.0], "b": [1.0, 2.2, "3", "4.4"], "c": ["1.1", "2", 3.3, 4.0, "5.5"], } ) == obj ) def test_dataclass_with_mixed_unpack_tuple_one_arg(): @dataclass class DataClass(DataClassDictMixin): a: Tuple[float, Unpack[Tuple[int]]] b: Tuple[Unpack[Tuple[int]], float] c: Tuple[float, Unpack[Tuple[int]], float] obj = DataClass( a=(1.1, 2), b=(1, 2.2), c=(1.1, 2, 3.3), ) assert obj.to_dict() == { "a": [1.1, 2], "b": [1, 2.2], "c": [1.1, 2, 3.3], } assert ( DataClass.from_dict( { "a": ["1.1", 2.0], "b": [1.0, "2.2"], "c": ["1.1", 2.0, 3.3], } ) == obj ) def test_dataclass_with_mixed_unpack_empty_tuple(): @dataclass class DataClass(DataClassDictMixin): a: Tuple[float, Unpack[Tuple[()]]] b: Tuple[Unpack[Tuple[()]], float] c: Tuple[float, Unpack[Tuple[()]], float] obj = DataClass( a=(1.1,), b=(1.1,), c=(1.1, 2.2), ) assert obj.to_dict() == { "a": [1.1], "b": [1.1], "c": [1.1, 2.2], } assert ( DataClass.from_dict( { "a": ["1.1"], "b": ["1.1"], "c": ["1.1", "2.2"], } ) == obj ) @pytest.mark.skipif(not PY_311_MIN, reason="requires python 3.11") def test_type_name_for_unpacks_py_311(): assert _type_name(Tuple[Unpack[Tuple[int, ...]]]) == "Tuple[int, ...]" assert _type_name(Tuple[Unpack[Tuple[int, float]]]) == "Tuple[int, float]" assert ( _type_name(Tuple[int, Unpack[Tuple[float, ...]]]) == "Tuple[int, *Tuple[float, ...]]" ) assert ( _type_name(Tuple[int, Unpack[Tuple[float, str]]]) == "Tuple[int, float, str]" ) assert _type_name(Tuple[Unpack[Tuple[()]]]) == "Tuple[()]" assert _type_name(Tuple[int, Unpack[Tuple[()]]]) == "Tuple[int]" assert ( _type_name(Tuple[str, Unpack[Tuple[int, ...]], int]) == "Tuple[str, *Tuple[int, ...], int]" ) assert ( _type_name(Tuple[str, Unpack[Tuple[Tuple[int], ...]], int]) == "Tuple[str, *Tuple[Tuple[int], ...], int]" ) assert ( _type_name( Tuple[str, Unpack[Tuple[Tuple[Unpack[Tuple[int]], ...]]], int] ) == "Tuple[str, Tuple[int, ...], int]" ) assert _type_name(Tuple[Unpack[Ts]]) == "Tuple[*Ts]" assert _type_name(Tuple[int, Unpack[Ts], int]) == "Tuple[int, *Ts, int]" assert _type_name(Generic[Unpack[Ts]]) == "Generic[*Ts]" assert _type_name(Generic[K, Unpack[Ts], V]) == "Generic[Any, *Ts, Any]" assert _type_name(Unpack[Tuple[int]]) == "int" assert _type_name(Unpack[Tuple[int, float]]) == "int, float" assert _type_name(Unpack[Ts]) == "*Ts" assert _type_name(Ts) == "Ts" assert ( _type_name(Tuple[Unpack[Ts], K][Unpack[Tuple[int, ...]]]) == "Tuple[*Tuple[int, ...], int]" ) assert ( _type_name(Tuple[Unpack[Ts], K][Unpack[Tuple[int, ...]], date]) == "Tuple[*Tuple[int, ...], date]" ) assert ( _type_name(Tuple[Unpack[Ts], K][date, Unpack[Tuple[int, ...]]]) == "Tuple[date, *Tuple[int, ...], int]" ) @pytest.mark.skipif(PY_311_MIN, reason="requires python<3.11") def test_type_name_for_unpacks_py_less_than_311(): assert _type_name(Tuple[Unpack[Tuple[int, ...]]]) == "Tuple[int, ...]" assert _type_name(Tuple[Unpack[Tuple[int, float]]]) == "Tuple[int, float]" assert ( _type_name(Tuple[int, Unpack[Tuple[float, ...]]]) == "Tuple[int, Unpack[Tuple[float, ...]]]" ) assert ( _type_name(Tuple[int, Unpack[Tuple[float, str]]]) == "Tuple[int, float, str]" ) assert _type_name(Tuple[Unpack[Tuple[()]]]) == "Tuple[()]" assert _type_name(Tuple[int, Unpack[Tuple[()]]]) == "Tuple[int]" assert ( _type_name(Tuple[str, Unpack[Tuple[int, ...]], int]) == "Tuple[str, Unpack[Tuple[int, ...]], int]" ) assert ( _type_name(Tuple[str, Unpack[Tuple[Tuple[int], ...]], int]) == "Tuple[str, Unpack[Tuple[Tuple[int], ...]], int]" ) assert ( _type_name( Tuple[str, Unpack[Tuple[Tuple[Unpack[Tuple[int]], ...]]], int] ) == "Tuple[str, Tuple[int, ...], int]" ) assert _type_name(Tuple[Unpack[Ts]]) == "Tuple[Unpack[Ts]]" assert ( _type_name(Tuple[int, Unpack[Ts], int]) == "Tuple[int, Unpack[Ts], int]" ) assert _type_name(Generic[Unpack[Ts]]) == "Generic[Unpack[Ts]]" assert ( _type_name(Generic[K, Unpack[Ts], V]) == "Generic[Any, Unpack[Ts], Any]" ) assert _type_name(Unpack[Tuple[int]]) == "int" assert _type_name(Unpack[Tuple[int, float]]) == "int, float" assert _type_name(Unpack[Ts]) == "Unpack[Ts]" assert _type_name(Ts) == "Ts" # this doesn't work on python<3.11 # assert ( # _type_name(Tuple[Unpack[Ts], K][Unpack[Tuple[int, ...]]]) # == "Tuple[Unpack[Tuple[int, ...]], int]" # ) # assert ( # _type_name(Tuple[Unpack[Ts], K][Unpack[Tuple[int, ...]], date]) # == "Tuple[Unpack[Tuple[int, ...]], date]" # ) # assert ( # _type_name(Tuple[Unpack[Ts], K][date, Unpack[Tuple[int, ...]]]) # == "Tuple[date, Unpack[Tuple[int, ...]], int]" # ) def test_concrete_generic_with_empty_tuple(): @dataclass # I tried MyGenericDataClassTs[()] but [()] becomes just (). Impossible. class ConcreteDataClass(MyGenericDataClassTs[Unpack[Tuple[()]]]): pass obj = ConcreteDataClass((1,)) assert obj.to_dict() == {"x": [1]} assert ConcreteDataClass.from_dict({"x": [1]}) == obj assert ConcreteDataClass.from_dict({"x": [1, 2, 3]}) == obj def test_concrete_generic_with_variable_length_tuple_any(): @dataclass class ConcreteDataClass(MyGenericDataClassTs): pass obj = ConcreteDataClass((1, "a", date(2022, 12, 17))) assert obj.to_dict() == {"x": [1, "a", date(2022, 12, 17)]} assert ( ConcreteDataClass.from_dict({"x": ["1", "a", date(2022, 12, 17)]}) == obj ) def test_concrete_generic_with_replaced_tuple_with_one_arg(): @dataclass class ConcreteDataClass(MyGenericDataClassTs[Unpack[Tuple[K]]]): pass obj = ConcreteDataClass((1, date(2022, 12, 17))) assert obj.to_dict() == {"x": [1, date(2022, 12, 17)]} assert ( ConcreteDataClass.from_dict({"x": ["1", date(2022, 12, 17), "a"]}) == obj ) def test_concrete_generic_with_replaced_tuple_with_multiple_args(): @dataclass class ConcreteDataClass(MyGenericDataClassTs[Unpack[Tuple[float, float]]]): pass obj = ConcreteDataClass((1, 2.2, 3.3)) assert obj.to_dict() == {"x": [1, 2.2, 3.3]} assert ConcreteDataClass.from_dict({"x": ["1", "2.2", "3.3"]}) == obj @pytest.mark.skipif(not PY_311_MIN, reason="doesn't work on py<3.11") def test_with_int_float_tuple_and_any_at_the_end(): Ts1 = TypeVarTuple("Ts1") Ts2 = TypeVarTuple("Ts2") IntTuple = Tuple[int, Unpack[Ts1]] IntFloatTuple = IntTuple[float, Unpack[Ts2]] @dataclass class DataClass(DataClassDictMixin): x: IntFloatTuple obj = DataClass((1, 2.2, "3", date(2022, 12, 17))) assert obj.to_dict() == {"x": [1, 2.2, "3", date(2022, 12, 17)]} assert ( DataClass.from_dict({"x": ["1", "2.2", "3", date(2022, 12, 17)]}) == obj ) @pytest.mark.skipif(not PY_311_MIN, reason="doesn't work on py<3.11") def test_with_int_floats_tuple(): Ts1 = TypeVarTuple("Ts1") IntTuple = Tuple[int, Unpack[Ts1]] IntFloatsTuple = IntTuple[Unpack[Tuple[float, ...]]] @dataclass class DataClass(DataClassDictMixin): x: IntFloatsTuple obj = DataClass((1, 2.2, 3.3, 4.4)) assert obj.to_dict() == {"x": [1, 2.2, 3.3, 4.4]} assert DataClass.from_dict({"x": ["1", "2.2", "3.3", "4.4"]}) == obj @pytest.mark.skipif(not PY_311_MIN, reason="doesn't work on py<3.11") def test_splitting_arbitrary_length_tuples_1(): Elderberries = Tuple[Unpack[Ts], K] @dataclass class DataClass(DataClassDictMixin): x: Elderberries[Unpack[Tuple[int, ...]]] obj = DataClass((1, 2, 3)) assert obj.to_dict() == {"x": [1, 2, 3]} assert DataClass.from_dict({"x": ["1", "2", "3"]}) == obj obj = DataClass((1,)) assert obj.to_dict() == {"x": [1]} assert DataClass.from_dict({"x": [1]}) == obj def test_dataclass_with_splitting_arbitrary_length_tuples_1(): @dataclass class GenericDataClass(Generic[Unpack[Ts], K], DataClassDictMixin): x: Tuple[Unpack[Ts], K] @dataclass class ConcreteDataClass(GenericDataClass[Unpack[Tuple[int, ...]]]): pass obj = ConcreteDataClass((1, 2, 3)) assert obj.to_dict() == {"x": [1, 2, 3]} assert ConcreteDataClass.from_dict({"x": ["1", "2", "3"]}) == obj obj = ConcreteDataClass((1,)) assert obj.to_dict() == {"x": [1]} assert ConcreteDataClass.from_dict({"x": [1]}) == obj @pytest.mark.skipif(not PY_311_MIN, reason="doesn't work on py<3.11") def test_splitting_arbitrary_length_tuples_2(): Elderberries = Tuple[Unpack[Ts], K] @dataclass class DataClass(DataClassDictMixin): x: Elderberries[Unpack[Tuple[int, ...]], date] obj = DataClass((1, 2, date(2022, 12, 17))) assert obj.to_dict() == {"x": [1, 2, "2022-12-17"]} assert DataClass.from_dict({"x": ["1", "2", "2022-12-17"]}) == obj obj = DataClass((date(2022, 12, 17),)) assert obj.to_dict() == {"x": ["2022-12-17"]} assert DataClass.from_dict({"x": ["2022-12-17"]}) == obj def test_dataclass_with_splitting_arbitrary_length_tuples_2(): @dataclass class GenericDataClass(Generic[Unpack[Ts], K], DataClassDictMixin): x: Tuple[Unpack[Ts], K] @dataclass class ConcreteDataClass(GenericDataClass[Unpack[Tuple[int, ...]], date]): pass obj = ConcreteDataClass((1, 2, date(2022, 12, 17))) assert obj.to_dict() == {"x": [1, 2, "2022-12-17"]} assert ConcreteDataClass.from_dict({"x": ["1", "2", "2022-12-17"]}) == obj obj = ConcreteDataClass((date(2022, 12, 17),)) assert obj.to_dict() == {"x": ["2022-12-17"]} assert ConcreteDataClass.from_dict({"x": ["2022-12-17"]}) == obj @pytest.mark.skipif(not PY_311_MIN, reason="doesn't work on py<3.11") def test_splitting_arbitrary_length_tuples_3(): Elderberries = Tuple[Unpack[Ts], K] @dataclass class DataClass(DataClassDictMixin): x: Elderberries[date, Unpack[Tuple[int, ...]]] obj = DataClass((date(2022, 12, 17), 1, 2, 3)) assert obj.to_dict() == {"x": ["2022-12-17", 1, 2, 3]} assert DataClass.from_dict({"x": ["2022-12-17", "1", "2", "3"]}) == obj obj = DataClass((date(2022, 12, 17), 1)) assert obj.to_dict() == {"x": ["2022-12-17", 1]} assert DataClass.from_dict({"x": ["2022-12-17", "1"]}) == obj def test_dataclass_with_splitting_arbitrary_length_tuples_3(): @dataclass class GenericDataClass(Generic[Unpack[Ts], K], DataClassDictMixin): x: Tuple[Unpack[Ts], K] @dataclass class ConcreteDataClass(GenericDataClass[date, Unpack[Tuple[int, ...]]]): pass class Config: debug = True obj = ConcreteDataClass((date(2022, 12, 17), 1, 2)) assert obj.to_dict() == {"x": ["2022-12-17", 1, 2]} assert ConcreteDataClass.from_dict({"x": ["2022-12-17", "1", "2"]}) == obj obj = ConcreteDataClass((date(2022, 12, 17), 1)) assert obj.to_dict() == {"x": ["2022-12-17", 1]} assert ConcreteDataClass.from_dict({"x": ["2022-12-17", 1]}) == obj def test_resolve_type_params_with_unpacks(): assert resolve_type_params(MyGenericTsK, [int, float], False) == { MyGenericTsK: {K: float, Unpack[Ts]: Unpack[Tuple[int]]} } assert resolve_type_params(MyGenericTsK, [int, str, float], False) == { MyGenericTsK: {K: float, Unpack[Ts]: Unpack[Tuple[int, str]]} } assert resolve_type_params( MyGenericTsK, [Unpack[Tuple[int, str]], float], False ) == {MyGenericTsK: {K: float, Unpack[Ts]: Unpack[Tuple[int, str]]}} assert resolve_type_params( MyGenericTsK, [Unpack[Tuple[int, ...]]], False ) == {MyGenericTsK: {K: int, Unpack[Ts]: Unpack[Tuple[int, ...]]}} assert resolve_type_params( MyGenericTsK, [str, Unpack[Tuple[int, ...]]], False ) == { MyGenericTsK: { K: int, Unpack[Ts]: Unpack[Tuple[str, Unpack[Tuple[int, ...]]]], } } assert resolve_type_params(MyGenericTs, [()], False) == { MyGenericTs: {Unpack[Ts]: Unpack[Tuple[()]]} } def test_dataclass_with_tuple_int_and_empty(): @dataclass class ConcreteDataClass(MyGenericDataClassTs[Unpack[Tuple[()]]]): pass obj = ConcreteDataClass((1,)) assert obj.to_dict() == {"x": [1]} assert ConcreteDataClass.from_dict({"x": [1]}) == obj assert ConcreteDataClass.from_dict({"x": [1, 2, 3]}) == obj with pytest.raises(MissingField): ConcreteDataClass.from_dict({}) def test_unpack_tuple_with_multiple_unpacks(): spec = ValueSpec( type=Tuple, expression="value", builder=object, field_ctx=FieldContext("x", {}), ) with pytest.raises(TypeError): unpack_tuple(spec, (Unpack[Tuple[int]], Unpack[Tuple[float]])) with pytest.raises(TypeError): pack_tuple(spec, (Unpack[Tuple[int]], Unpack[Tuple[float]])) def test_flatten_type_args_with_empty_tuple(): assert _flatten_type_args([Unpack[Tuple[()]]]) == [()] assert _flatten_type_args([int, Unpack[Tuple[()]]]) == [int] if PEP_585_COMPATIBLE: assert _flatten_type_args([Unpack[tuple[()]]]) == [()] assert _flatten_type_args([int, Unpack[tuple[()]]]) == [int] mashumaro-3.13.1/tests/test_pep_655.py000066400000000000000000000030311463331001200175170ustar00rootroot00000000000000from dataclasses import dataclass import pytest import typing_extensions from mashumaro import DataClassDictMixin from mashumaro.core.meta.helpers import is_not_required, is_required from mashumaro.exceptions import InvalidFieldValue class TypedDictCorrectNotRequired(typing_extensions.TypedDict): required: int not_required: typing_extensions.NotRequired[int] class TypedDictCorrectRequired(typing_extensions.TypedDict, total=False): required: typing_extensions.Required[int] not_required: int def test_is_required(): assert is_required(typing_extensions.Required[int]) assert not is_required(typing_extensions.Self) def test_is_not_required(): assert is_not_required(typing_extensions.NotRequired[int]) assert not is_not_required(typing_extensions.Self) def test_typed_dict_correct_not_required(): @dataclass class MyClass(DataClassDictMixin): x: TypedDictCorrectNotRequired obj = MyClass({"required": 42}) assert MyClass.from_dict({"x": {"required": 42}}) == obj assert obj.to_dict() == {"x": {"required": 42}} def test_typed_dict_correct_required(): @dataclass class MyClass(DataClassDictMixin): x: TypedDictCorrectRequired with pytest.raises(InvalidFieldValue) as exc_info: MyClass.from_dict({"x": {}}) assert exc_info.type is InvalidFieldValue assert exc_info.value.holder_class is MyClass assert exc_info.value.field_type is TypedDictCorrectRequired assert exc_info.value.field_value == {} assert exc_info.value.field_name == "x" mashumaro-3.13.1/tests/test_pep_695.py000066400000000000000000000013071463331001200175270ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from mashumaro import DataClassDictMixin from mashumaro.codecs import BasicDecoder, BasicEncoder def test_type_alias_type_with_dataclass_dict_mixin(): type MyDate = date @dataclass class MyClass(DataClassDictMixin): x: MyDate obj = MyClass(date(2024, 4, 15)) assert MyClass.from_dict({"x": "2024-04-15"}) == obj assert obj.to_dict() == {"x": "2024-04-15"} def test_type_alias_type_with_codecs(): type MyDate = date decoder = BasicDecoder(MyDate) encoder = BasicEncoder(MyDate) obj = date(2024, 4, 15) assert decoder.decode("2024-04-15") == obj assert encoder.encode(obj) == "2024-04-15" mashumaro-3.13.1/tests/test_self.py000066400000000000000000000104241463331001200172710ustar00rootroot00000000000000from dataclasses import dataclass from typing import Optional import orjson from typing_extensions import Self from mashumaro import DataClassDictMixin from mashumaro.codecs import BasicDecoder, BasicEncoder from mashumaro.codecs.orjson import ORJSONDecoder, ORJSONEncoder from mashumaro.mixins.orjson import DataClassORJSONMixin @dataclass class DataClassDict(DataClassDictMixin): next: Optional[Self] = None @dataclass class DataClassDictChild(DataClassDict): x: int = 42 @dataclass class DataClassDictWithoutMixin: next: Optional[Self] = None @dataclass class DataClassOrjson(DataClassORJSONMixin): next: Optional[Self] = None @dataclass class DataClassOrjsonChild(DataClassOrjson): x: int = 42 def test_dataclass_dict_with_self(): obj = DataClassDict(DataClassDict()) assert obj.to_dict() == {"next": {"next": None}} assert DataClassDict.from_dict({"next": {"next": None}}) == obj assert DataClassDict().to_dict() == {"next": None} assert DataClassDict.from_dict({"next": None}) == DataClassDict() assert DataClassDict.from_dict({}) == DataClassDict() def test_dataclass_dict_with_self_without_mixin(): decoder = BasicDecoder(DataClassDictWithoutMixin) encoder = BasicEncoder(DataClassDictWithoutMixin) obj = DataClassDictWithoutMixin(DataClassDictWithoutMixin()) assert encoder.encode(obj) == {"next": {"next": None}} assert decoder.decode({"next": {"next": None}}) == obj assert encoder.encode(DataClassDictWithoutMixin()) == {"next": None} assert decoder.decode({"next": None}) == DataClassDictWithoutMixin() assert decoder.decode({}) == DataClassDictWithoutMixin() def test_dataclass_dict_child_with_self(): obj = DataClassDictChild(DataClassDictChild()) assert obj.to_dict() == {"x": 42, "next": {"x": 42, "next": None}} assert DataClassDictChild.from_dict({"next": {"next": None}}) == obj assert DataClassDictChild().to_dict() == {"x": 42, "next": None} assert DataClassDictChild.from_dict({"next": None}) == DataClassDictChild() assert DataClassDictChild.from_dict({}) == DataClassDictChild() def test_dataclass_orjson_with_self(): obj = DataClassOrjson(DataClassOrjson()) assert obj.to_dict() == {"next": {"next": None}} assert DataClassOrjson.from_dict({"next": {"next": None}}) == obj assert DataClassOrjson().to_dict() == {"next": None} assert DataClassOrjson.from_dict({"next": None}) == DataClassOrjson() assert DataClassOrjson.from_dict({}) == DataClassOrjson() dump = orjson.dumps({"next": {"next": None}}) assert obj.to_jsonb() == dump assert DataClassOrjson.from_json(dump) == obj dump = orjson.dumps({"next": None}) assert DataClassOrjson().to_jsonb() == dump assert DataClassOrjson.from_json(dump) == DataClassOrjson() assert DataClassOrjson.from_json(b"{}") == DataClassOrjson() def test_dataclass_orjson_with_self_without_mixin(): decoder = ORJSONDecoder(DataClassDictWithoutMixin) encoder = ORJSONEncoder(DataClassDictWithoutMixin) obj = DataClassDictWithoutMixin(DataClassDictWithoutMixin()) dump = orjson.dumps({"next": {"next": None}}) assert encoder.encode(obj) == dump assert decoder.decode(dump) == obj dump = orjson.dumps({"next": None}) assert encoder.encode(DataClassDictWithoutMixin()) == dump assert decoder.decode(dump) == DataClassDictWithoutMixin() assert decoder.decode(b"{}") == DataClassDictWithoutMixin() def test_dataclass_orjson_child_with_self(): obj = DataClassOrjsonChild(DataClassOrjsonChild()) assert obj.to_dict() == {"x": 42, "next": {"x": 42, "next": None}} assert DataClassOrjsonChild.from_dict({"next": {"next": None}}) == obj assert DataClassOrjsonChild().to_dict() == {"x": 42, "next": None} assert ( DataClassOrjsonChild.from_dict({"next": None}) == DataClassOrjsonChild() ) assert DataClassOrjsonChild.from_dict({}) == DataClassOrjsonChild() dump = orjson.dumps({"next": {"next": None, "x": 42}, "x": 42}) assert obj.to_jsonb() == dump assert DataClassOrjsonChild.from_json(dump) == obj dump = orjson.dumps({"next": None, "x": 42}) assert DataClassOrjsonChild().to_jsonb() == dump assert DataClassOrjsonChild.from_json(dump) == DataClassOrjsonChild() assert DataClassOrjsonChild.from_json(b"{}") == DataClassOrjsonChild() mashumaro-3.13.1/tests/test_serialization_strategy.py000066400000000000000000000120101463331001200231300ustar00rootroot00000000000000from dataclasses import dataclass, field from datetime import date from typing import Generic, List, Sequence, TypeVar from mashumaro import DataClassDictMixin, field_options from mashumaro.types import SerializationStrategy T = TypeVar("T") class TruncatedGenericListSerializationStrategy( SerializationStrategy, Generic[T] ): def serialize(self, value: List[T]) -> Sequence[T]: return value[:-1] def deserialize(self, value: Sequence[T]) -> Sequence[T]: return value[:-1] class TruncatedAnnotatedDateListSerializationStrategy( SerializationStrategy, use_annotations=True ): def serialize(self, value) -> Sequence[date]: return value[:-1] def deserialize(self, value: Sequence[date]): return value[:-1] class TruncatedDateListSerializationStrategy(SerializationStrategy): def serialize(self, value) -> Sequence[date]: return value[:-1] def deserialize(self, value: Sequence[date]): return value[:-1] class TruncatedUnannotatedListSerializationStrategy( SerializationStrategy, use_annotations=True ): def serialize(self, value): return value[:-1] def deserialize(self, value): return value[:-1] def test_generic_list_serialization_strategy(): @dataclass class MyDataClass(DataClassDictMixin): x: List[int] y: List[List[int]] class Config: serialization_strategy = { list: TruncatedGenericListSerializationStrategy() } obj = MyDataClass([1, 2, 3, 4, 5], [[1, 2], [3, 4]]) assert obj.to_dict() == {"x": [1, 2, 3, 4], "y": [[1]]} assert MyDataClass.from_dict( {"x": ["1", "2", "3", "4"], "y": [[1, 2], [3, 4]]} ) == MyDataClass([1, 2, 3], [[1]]) def test_date_list_serialization_strategy_with_use_annotations(): @dataclass class MyDataClass(DataClassDictMixin): x: List[date] class Config: serialization_strategy = { list: TruncatedAnnotatedDateListSerializationStrategy() } obj = MyDataClass( [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] ) assert obj.to_dict() == { "x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"] } assert MyDataClass.from_dict( {"x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"]} ) == MyDataClass([date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12)]) def test_date_list_serialization_strategy_without_use_annotations(): @dataclass class MyDataClass(DataClassDictMixin): x: List[date] class Config: serialization_strategy = { list: TruncatedDateListSerializationStrategy() } obj = MyDataClass( [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] ) assert obj.to_dict() == { "x": [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] } assert MyDataClass.from_dict( {"x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"]} ) == MyDataClass(["2023-02-12", "2023-02-12", "2023-02-12"]) def test_date_list_serialization_strategy_use_annotations_without_annotations(): @dataclass class MyDataClass(DataClassDictMixin): x: List[date] class Config: serialization_strategy = { list: TruncatedUnannotatedListSerializationStrategy() } obj = MyDataClass( [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] ) assert obj.to_dict() == { "x": [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] } assert MyDataClass.from_dict( {"x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"]} ) == MyDataClass(["2023-02-12", "2023-02-12", "2023-02-12"]) def test_date_list_field_serialization_strategy_with_use_annotations(): @dataclass class MyDataClass(DataClassDictMixin): x: List[date] = field( metadata=field_options( serialization_strategy=( TruncatedAnnotatedDateListSerializationStrategy() ) ) ) obj = MyDataClass( [ date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12), ] ) assert obj.to_dict() == { "x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"] } assert MyDataClass.from_dict( {"x": ["2023-02-12", "2023-02-12", "2023-02-12", "2023-02-12"]} ) == MyDataClass([date(2023, 2, 12), date(2023, 2, 12), date(2023, 2, 12)]) mashumaro-3.13.1/tests/test_slots.py000066400000000000000000000033601463331001200175050ustar00rootroot00000000000000from dataclasses import dataclass, field import pytest from mashumaro import DataClassDictMixin, field_options from mashumaro.core.const import PY_310_MIN if not PY_310_MIN: pytest.skip("requires python>=3.10", allow_module_level=True) def test_field_options_in_dataclass_with_slots(): @dataclass(slots=True) class DataClass(DataClassDictMixin): x: int = field(metadata=field_options(serialize=str, alias="alias")) instance = DataClass(123) assert DataClass.from_dict({"alias": 123}) == instance assert instance.to_dict() == {"x": "123"} def test_field_options_in_inherited_dataclass_with_slots(): @dataclass class BaseDataClass(DataClassDictMixin): y: int @dataclass(slots=True) class DataClass(BaseDataClass): x: int = field(metadata=field_options(serialize=str, alias="alias")) instance = DataClass(x=123, y=456) assert DataClass.from_dict({"alias": 123, "y": 456}) == instance assert instance.to_dict() == {"x": "123", "y": 456} def test_no_field_options_in_inherited_dataclass_with_slots(): @dataclass class BaseDataClass(DataClassDictMixin): y: int @dataclass(slots=True) class DataClass(BaseDataClass): x: int instance = DataClass(x=123, y=456) assert DataClass.from_dict({"x": 123, "y": 456}) == instance assert instance.to_dict() == {"x": 123, "y": 456} def test_no_field_options_in_inherited_dataclass_with_slots_and_default(): @dataclass class BaseDataClass(DataClassDictMixin): y: int @dataclass(slots=True) class DataClass(BaseDataClass): x: int = 123 instance = DataClass(y=456) assert DataClass.from_dict({"y": 456}) == instance assert instance.to_dict() == {"x": 123, "y": 456} mashumaro-3.13.1/tests/test_timezones.py000066400000000000000000000061171463331001200203610ustar00rootroot00000000000000from dataclasses import dataclass from datetime import timedelta, timezone import pytest from mashumaro import DataClassDictMixin time_zones = [ (timezone(timedelta(hours=-12)), "UTC-12:00"), # Baker Island (timezone(timedelta(hours=-11)), "UTC-11:00"), # American Samoa (timezone(timedelta(hours=-10)), "UTC-10:00"), # Hawaii ( timezone(timedelta(hours=-9, minutes=-30)), "UTC-09:30", ), # Marquesas Isl. (timezone(timedelta(hours=-9)), "UTC-09:00"), # Gambier Isl. (timezone(timedelta(hours=-8)), "UTC-08:00"), # Pitcairn Isl. (timezone(timedelta(hours=-7)), "UTC-07:00"), # Sonora (timezone(timedelta(hours=-6)), "UTC-06:00"), # Costa Rica (timezone(timedelta(hours=-5)), "UTC-05:00"), # Colombia (timezone(timedelta(hours=-4)), "UTC-04:00"), # Bolivia (timezone(timedelta(hours=-3, minutes=-30)), "UTC-03:30"), # Canada (timezone(timedelta(hours=-3)), "UTC-03:00"), # Argentina (timezone(timedelta(hours=-2)), "UTC-02:00"), # South Georgia (timezone(timedelta(hours=-1)), "UTC-01:00"), # Cape Verde (timezone(timedelta(hours=0)), "UTC"), # Burkina Faso (timezone(timedelta(hours=1)), "UTC+01:00"), # Algeria (timezone(timedelta(hours=2)), "UTC+02:00"), # Botswana (timezone(timedelta(hours=3)), "UTC+03:00"), # Moscow (timezone(timedelta(hours=3, minutes=30)), "UTC+03:30"), # Iran (timezone(timedelta(hours=4)), "UTC+04:00"), # Armenia (timezone(timedelta(hours=4, minutes=30)), "UTC+04:30"), # Afghanistan (timezone(timedelta(hours=5)), "UTC+05:00"), # Maldives (timezone(timedelta(hours=5, minutes=30)), "UTC+05:30"), # India (timezone(timedelta(hours=5, minutes=45)), "UTC+05:45"), # Nepal (timezone(timedelta(hours=6)), "UTC+06:00"), # Bangladesh (timezone(timedelta(hours=6, minutes=30)), "UTC+06:30"), # Myanmar (timezone(timedelta(hours=7)), "UTC+07:00"), # Cambodia (timezone(timedelta(hours=8)), "UTC+08:00"), # Hong Kong (timezone(timedelta(hours=8, minutes=45)), "UTC+08:45"), # Eucla (timezone(timedelta(hours=9)), "UTC+09:00"), # Japan (timezone(timedelta(hours=9, minutes=30)), "UTC+09:30"), # N. Australia (timezone(timedelta(hours=10)), "UTC+10:00"), # Queensland (timezone(timedelta(hours=10, minutes=30)), "UTC+10:30"), # Australia (timezone(timedelta(hours=11)), "UTC+11:00"), # Vanuatu (timezone(timedelta(hours=12)), "UTC+12:00"), # Nauru (timezone(timedelta(hours=12, minutes=45)), "UTC+12:45"), # New Zealand (timezone(timedelta(hours=13)), "UTC+13:00"), # Tonga (timezone(timedelta(hours=14)), "UTC+14:00"), # Kiribati ] @pytest.mark.parametrize(["tz", "tz_string"], time_zones) def test_timezones(tz, tz_string): @dataclass class DataClass(DataClassDictMixin): x: timezone assert DataClass(tz).to_dict() == {"x": tz_string} assert DataClass.from_dict({"x": tz_string}) == DataClass(tz) def test_invalid_timezone(): @dataclass class DataClass(DataClassDictMixin): x: timezone with pytest.raises(ValueError): DataClass.from_dict({"x": "UTC+03:00:01"}) mashumaro-3.13.1/tests/test_toml.py000066400000000000000000000133721463331001200173200ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date, datetime, time from typing import List, Optional import tomli_w from mashumaro.config import ADD_DIALECT_SUPPORT, BaseConfig try: import tomllib except ModuleNotFoundError: import tomli as tomllib # type: ignore from mashumaro.dialect import Dialect from mashumaro.mixins.toml import DataClassTOMLMixin serialization_strategy = { datetime: { "serialize": lambda dt: dt.strftime("%Y/%m/%d/%H/%M/%S"), "deserialize": lambda s: datetime.strptime(s, "%Y/%m/%d/%H/%M/%S"), }, date: { "serialize": date.toordinal, "deserialize": date.fromordinal, }, time: { "serialize": lambda t: t.strftime("%H/%M/%S"), "deserialize": lambda s: datetime.strptime(s, "%H/%M/%S").time(), }, } class MyDialect(Dialect): serialization_strategy = serialization_strategy @dataclass class InnerDataClassWithOptionalField(DataClassTOMLMixin): x: Optional[int] = None def test_data_class_toml_mixin(): assert DataClassTOMLMixin.from_toml("") is None assert DataClassTOMLMixin().to_toml() is None def test_to_toml(): @dataclass class DataClass(DataClassTOMLMixin): x: List[int] dumped = tomli_w.dumps({"x": [1, 2, 3]}) assert DataClass([1, 2, 3]).to_toml() == dumped def test_from_toml(): @dataclass class DataClass(DataClassTOMLMixin): x: List[int] dumped = tomli_w.dumps({"x": [1, 2, 3]}) assert DataClass.from_toml(dumped) == DataClass([1, 2, 3]) def test_toml_with_serialization_strategy(): @dataclass class DataClass(DataClassTOMLMixin): datetime: List[datetime] date: List[date] time: List[time] class Config(BaseConfig): serialization_strategy = serialization_strategy _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") instance = DataClass([_datetime], [_date], [_time]) dict_dumped = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } toml_dumped = tomli_w.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert DataClass.from_toml(toml_dumped) == instance assert instance.to_dict() == dict_dumped assert instance.to_toml() == toml_dumped def test_toml_with_dialect(): @dataclass class DataClass(DataClassTOMLMixin): datetime: List[datetime] date: List[date] time: List[time] class Config(BaseConfig): dialect = MyDialect _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") instance = DataClass([_datetime], [_date], [_time]) dict_dumped = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } toml_dumped = tomli_w.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert DataClass.from_toml(toml_dumped) == instance assert instance.to_dict() == dict_dumped assert instance.to_toml() == toml_dumped def test_toml_with_dialect_support(): @dataclass class DataClass(DataClassTOMLMixin): datetime: List[datetime] date: List[date] time: List[time] class Config(BaseConfig): code_generation_options = [ADD_DIALECT_SUPPORT] _datetime = datetime(2022, 10, 12, 12, 54, 30) _date = date(2022, 10, 12) _time = time(12, 54, 30) _datetime_dumped = _datetime.strftime("%Y/%m/%d/%H/%M/%S") _date_dumped = _date.toordinal() _time_dumped = _time.strftime("%H/%M/%S") instance = DataClass([_datetime], [_date], [_time]) dict_dumped = { "datetime": [_datetime.isoformat()], "date": [_date.isoformat()], "time": [_time.isoformat()], } dict_dumped_dialect = { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } toml_dumped = tomli_w.dumps( {"datetime": [_datetime], "date": [_date], "time": [_time]} ) toml_dumped_dialect = tomli_w.dumps( { "datetime": [_datetime_dumped], "date": [_date_dumped], "time": [_time_dumped], } ) assert DataClass.from_dict(dict_dumped) == instance assert ( DataClass.from_dict(dict_dumped_dialect, dialect=MyDialect) == instance ) assert DataClass.from_toml(toml_dumped) == instance assert ( DataClass.from_toml(toml_dumped_dialect, dialect=MyDialect) == instance ) assert instance.to_dict() == dict_dumped assert instance.to_dict(dialect=MyDialect) == dict_dumped_dialect assert instance.to_toml() == toml_dumped assert instance.to_toml(dialect=MyDialect) == toml_dumped_dialect def test_toml_omit_none(): @dataclass class DataClass(DataClassTOMLMixin): x: Optional[InnerDataClassWithOptionalField] = None y: Optional[int] = None obj = DataClass() assert obj.to_dict() == {"x": None, "y": None} assert obj.to_toml() == "" obj = DataClass(InnerDataClassWithOptionalField()) assert obj.to_dict() == {"x": {"x": None}, "y": None} assert obj.to_toml() == "[x]\n" assert DataClass.from_toml("[x]\n") == obj mashumaro-3.13.1/tests/test_types.py000066400000000000000000000250651463331001200175130ustar00rootroot00000000000000from dataclasses import dataclass from datetime import date from typing import Any, Generic, Iterator, Mapping, Tuple, TypeVar import pytest from typing_extensions import Self from mashumaro import DataClassDictMixin from mashumaro.core.const import PEP_585_COMPATIBLE from mashumaro.exceptions import UnserializableField from mashumaro.types import Alias, SerializableType from tests.entities import GenericSerializableList, GenericSerializableWrapper XT = TypeVar("XT") YT = TypeVar("YT") class MySerializableType(SerializableType): def __init__(self, value): self.value = value def _serialize(self): return self.value.isoformat() @classmethod def _deserialize(cls, value): return cls(date.fromisoformat(value)) class MyAnnotatedSerializableType(SerializableType, use_annotations=True): def __init__(self, value: date): self.value = value def _serialize(self) -> date: return self.value @classmethod def _deserialize(cls, value: date): return cls(value) class MyAnnotatedSerializableTypeWithoutAnnotationsInDeserialize( SerializableType, use_annotations=True ): def __init__(self, value): # pragma: no cover self.value = value def _serialize(self) -> date: # pragma: no cover return self.value @classmethod def _deserialize(cls, value): # pragma: no cover return cls(value) class MyAnnotatedSerializableTypeWithoutAnnotationsInSerialize( SerializableType, use_annotations=True ): def __init__(self, value): # pragma: no cover self.value = value def _serialize(self): # pragma: no cover return self.value @classmethod def _deserialize(cls, value: date): # pragma: no cover return cls(value) class MyAnnotatedGenericSerializableType( Generic[XT, YT], SerializableType, use_annotations=True ): def __init__(self, value: Tuple[XT, YT]): self.value = value def _serialize(self) -> Tuple[YT, XT]: return tuple(reversed(self.value)) @classmethod def _deserialize(cls, value: Tuple[XT, YT]): return cls(value) class MyAnnotatedGenericSerializableTypeWithMixedTypeVars( Generic[XT], SerializableType, use_annotations=True ): def __init__(self, value): self.value = value def _serialize(self) -> Tuple[int, XT, float]: return self.value @classmethod def _deserialize(cls, value: Tuple[int, XT, float]): return cls(value) if PEP_585_COMPATIBLE: class MyAnnotatedGenericPEP585SerializableType( Generic[XT, YT], SerializableType, use_annotations=True ): def __init__(self, value: tuple[XT, YT]): self.value = value def _serialize(self) -> tuple[YT, XT]: return tuple(reversed(self.value)) @classmethod def _deserialize(cls, value: tuple[XT, YT]): return cls(value) class MyMapping(Mapping[XT, YT], SerializableType, use_annotations=True): def __init__(self, value: Mapping[Any, Any]): self.value = value def _serialize(self) -> Mapping[XT, YT]: return self.value @classmethod def _deserialize(cls, mapping: Mapping[XT, YT]): return cls(mapping) def __getitem__(self, __k: XT) -> YT: return self.value[__k] def __len__(self) -> int: # pragma: no cover return len(self.value) def __iter__(self) -> Iterator[XT]: return iter(self.value) def __repr__(self): # pragma: no cover return f"" class MyAnnotatedUserGenericSerializableType( Generic[XT, YT], SerializableType, use_annotations=True ): def __init__(self, value: MyMapping[XT, YT]): self.value = value def _serialize(self) -> MyMapping[XT, YT]: return self.value @classmethod def _deserialize(cls, value: MyMapping[XT, YT]): return cls(value) def __repr__(self): # pragma: no cover return f"" class MySelfSerializableType(SerializableType, use_annotations=True): def _serialize(self) -> Self: return self @classmethod def _deserialize(cls, value: Self) -> Self: return value def test_generic_serializable_list_int(): @dataclass class DataClass(DataClassDictMixin): x: GenericSerializableList[int] obj = DataClass(x=GenericSerializableList([1, 2, 3])) assert DataClass.from_dict({"x": [3, 4, 5]}) == obj assert obj.to_dict() == {"x": [3, 4, 5]} def test_generic_serializable_list_str(): @dataclass class DataClass(DataClassDictMixin): x: GenericSerializableList[str] obj = DataClass(x=GenericSerializableList(["a", "b", "c"])) assert DataClass.from_dict({"x": ["_a", "_b", "_c"]}) == obj assert obj.to_dict() == {"x": ["_a", "_b", "_c"]} def test_generic_serializable_wrapper_with_type_from_another_module(): @dataclass class DataClass(DataClassDictMixin): x: GenericSerializableWrapper[date] obj = DataClass(x=GenericSerializableWrapper(date(2022, 12, 8))) assert DataClass.from_dict({"x": "2022-12-08"}) == obj assert obj.to_dict() == {"x": "2022-12-08"} def test_simple_serializable_type(): @dataclass class DataClass(DataClassDictMixin): x: MySerializableType obj = DataClass.from_dict({"x": "2022-12-07"}) assert obj.x.value == date(2022, 12, 7) assert obj.to_dict() == {"x": "2022-12-07"} def test_annotated_serializable_type(): @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedSerializableType obj = DataClass.from_dict({"x": "2022-12-07"}) assert obj.x.value == date(2022, 12, 7) assert obj.to_dict() == {"x": "2022-12-07"} def test_annotated_serializable_type_without_annotations_in_deserialize(): with pytest.raises(UnserializableField) as e: @dataclass class _(DataClassDictMixin): x: MyAnnotatedSerializableTypeWithoutAnnotationsInDeserialize assert ( e.value.msg == 'Method _deserialize must have annotated "value" argument' ) def test_annotated_serializable_type_without_annotations_in_serialize(): with pytest.raises(UnserializableField) as e: @dataclass class _(DataClassDictMixin): x: MyAnnotatedSerializableTypeWithoutAnnotationsInSerialize assert e.value.msg == "Method _serialize must have return annotation" def test_annotated_generic_serializable_type(): @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableType obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == ("2022-12-07", "3.14") assert obj.to_dict() == {"x": ["3.14", "2022-12-07"]} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableType[date, float] obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == (date(2022, 12, 7), 3.14) assert obj.to_dict() == {"x": [3.14, "2022-12-07"]} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableType[date, YT] obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == (date(2022, 12, 7), "3.14") assert obj.to_dict() == {"x": ["3.14", "2022-12-07"]} @pytest.mark.skipif(not PEP_585_COMPATIBLE, reason="requires python 3.9+") def test_annotated_generic_pep585_serializable_type(): @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericPEP585SerializableType obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == ("2022-12-07", "3.14") assert obj.to_dict() == {"x": ["3.14", "2022-12-07"]} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableType[date, float] obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == (date(2022, 12, 7), 3.14) assert obj.to_dict() == {"x": [3.14, "2022-12-07"]} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableType[date, YT] obj = DataClass.from_dict({"x": ["2022-12-07", "3.14"]}) assert obj.x.value == (date(2022, 12, 7), "3.14") assert obj.to_dict() == {"x": ["3.14", "2022-12-07"]} def test_annotated_user_generic_serializable_type(): @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedUserGenericSerializableType obj = DataClass.from_dict({"x": {"2022-12-07": "3.14"}}) assert type(obj.x.value) is MyMapping assert dict(obj.x.value) == {"2022-12-07": "3.14"} assert obj.to_dict() == {"x": {"2022-12-07": "3.14"}} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedUserGenericSerializableType[date, float] obj = DataClass.from_dict({"x": {"2022-12-07": "3.14"}}) assert type(obj.x.value) is MyMapping assert dict(obj.x.value) == {date(2022, 12, 7): 3.14} assert obj.to_dict() == {"x": {"2022-12-07": 3.14}} @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedUserGenericSerializableType[date, YT] obj = DataClass.from_dict({"x": {"2022-12-07": "3.14"}}) assert type(obj.x.value) is MyMapping assert dict(obj.x.value) == {date(2022, 12, 7): "3.14"} assert obj.to_dict() == {"x": {"2022-12-07": "3.14"}} def test_serializable_type_inheritance(): class MySerializableType2(MySerializableType, use_annotations=True): pass assert MySerializableType2.__use_annotations__ class MySerializableType3(MySerializableType2): pass assert MySerializableType3.__use_annotations__ class MySerializableType4(MySerializableType3, use_annotations=False): pass assert not MySerializableType4.__use_annotations__ def test_serializable_type_with_self(): @dataclass class DataClass(DataClassDictMixin): x: MySelfSerializableType obj = DataClass(MySelfSerializableType()) assert obj.to_dict() == {"x": obj.x} assert DataClass.from_dict({"x": obj.x}) == obj def test_annotated_generic_serializable_type_with_mixed_type_vars(): @dataclass class DataClass(DataClassDictMixin): x: MyAnnotatedGenericSerializableTypeWithMixedTypeVars[date] obj = DataClass.from_dict({"x": ["1", "2022-05-29", "2.3"]}) # assert obj.x.value == (1, date(2022, 5, 29), 2.3) assert obj.to_dict() == {"x": [1, "2022-05-29", 2.3]} def test_alias(): x1 = Alias("x") x2 = Alias("x") y = Alias("y") assert x1 == x2 assert hash(x1) == hash(x2) assert x1 != y assert x1 != "x" assert hash(x1) != hash(y) assert str(y) == "Alias(name='y')" assert repr(y) == "Alias(name='y')" mashumaro-3.13.1/tests/test_union.py000066400000000000000000000027541463331001200174770ustar00rootroot00000000000000from dataclasses import dataclass from itertools import permutations from typing import Any, Dict, List, Union import pytest from mashumaro import DataClassDictMixin from mashumaro.codecs.basic import encode from tests.utils import same_types @dataclass class UnionTestCase: type: Any dumped: Any loaded: Any @pytest.mark.parametrize( "test_case", [ UnionTestCase(Union[int, str], 1, 1), UnionTestCase(Union[int, str], "a", "a"), UnionTestCase(Union[Dict[int, int], List[int]], {1: 2}, {1: 2}), UnionTestCase(Union[Dict[int, int], List[int]], [1], [1]), UnionTestCase(Union[str, List[str]], ["a"], ["a"]), UnionTestCase(Union[str, List[str]], "abc", "abc"), ], ) def test_union(test_case): @dataclass class DataClass(DataClassDictMixin): x: test_case.type instance = DataClass(x=test_case.loaded) assert DataClass.from_dict({"x": test_case.dumped}) == instance assert instance.to_dict() == {"x": test_case.dumped} def test_union_encoding(): for variants in permutations((int, float, str, bool)): for value in (1, 2.0, 3.1, "4", "5.0", True, False): encoded = encode(value, Union[variants]) assert value == encoded assert same_types(value, encoded) # TDDO: Convert this to a normal test # def test_str_bool_union_warning(): # with pytest.warns(UserWarning): # # @dataclass # class _(DataClassDictMixin): # x: Union[str, bool] mashumaro-3.13.1/tests/test_yaml.py000066400000000000000000000010141463331001200172750ustar00rootroot00000000000000from dataclasses import dataclass from typing import List import yaml from mashumaro.mixins.yaml import DataClassYAMLMixin def test_to_yaml(): @dataclass class DataClass(DataClassYAMLMixin): x: List[int] dumped = yaml.dump({"x": [1, 2, 3]}) assert DataClass([1, 2, 3]).to_yaml() == dumped def test_from_yaml(): @dataclass class DataClass(DataClassYAMLMixin): x: List[int] dumped = yaml.dump({"x": [1, 2, 3]}) assert DataClass.from_yaml(dumped) == DataClass([1, 2, 3]) mashumaro-3.13.1/tests/utils.py000066400000000000000000000013771463331001200164500ustar00rootroot00000000000000from typing import ChainMap, Deque, List, Mapping, Set, Tuple def same_types(first, second): if isinstance(first, (List, Deque, Tuple)): return all(map(lambda x: same_types(*x), zip(first, second))) elif isinstance(first, ChainMap): return all(map(lambda x: same_types(*x), zip(first.maps, second.maps))) elif isinstance(first, Mapping): return all( map(lambda x: same_types(*x), zip(first.keys(), second.keys())) ) and all( map(lambda x: same_types(*x), zip(first.values(), second.values())) ) elif isinstance(first, Set): return all( map(lambda x: same_types(*x), zip(sorted(first), sorted(second))) ) else: return type(first) is type(second)